You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by lm...@apache.org on 2017/10/26 14:22:37 UTC

[01/37] knox git commit: KNOX-1052 - Add some tests for the Knox SSO Service

Repository: knox
Updated Branches:
  refs/heads/KNOX-1049 f5490414d -> 1ee937071


KNOX-1052 - Add some tests for the Knox SSO Service


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/2666894b
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/2666894b
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/2666894b

Branch: refs/heads/KNOX-1049
Commit: 2666894bc84281ed78890110ab15b009fa5f2830
Parents: a5a8825
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Wed Sep 20 11:09:54 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Wed Sep 20 11:09:54 2017 +0100

----------------------------------------------------------------------
 gateway-service-knoxsso/pom.xml                 |  11 +-
 .../gateway/service/knoxsso/WebSSOResource.java |  20 +-
 .../service/knoxsso/WebSSOResourceTest.java     | 304 ++++++++++++++++++-
 3 files changed, 308 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/2666894b/gateway-service-knoxsso/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-service-knoxsso/pom.xml b/gateway-service-knoxsso/pom.xml
index f5018a2..e6b6ca5 100644
--- a/gateway-service-knoxsso/pom.xml
+++ b/gateway-service-knoxsso/pom.xml
@@ -59,9 +59,10 @@
       <artifactId>gateway-test-utils</artifactId>
       <scope>test</scope>
     </dependency>
-      <dependency>
-          <groupId>org.easymock</groupId>
-          <artifactId>easymock</artifactId>
-          <scope>test</scope>
-      </dependency>  </dependencies>
+    <dependency>
+      <groupId>org.easymock</groupId>
+      <artifactId>easymock</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/knox/blob/2666894b/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java b/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
index 7cc5378..0d9e6dd 100644
--- a/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
+++ b/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
@@ -23,6 +23,7 @@ import java.net.URISyntaxException;
 import java.security.Principal;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
@@ -74,14 +75,14 @@ public class WebSSOResource {
   private long tokenTTL = 30000l;
   private String whitelist = null;
   private String domainSuffix = null;
-  private String[] targetAudiences = null;
+  private List<String> targetAudiences = new ArrayList<>();
   private boolean enableSession = false;
 
   @Context
-  private HttpServletRequest request;
+  HttpServletRequest request;
 
   @Context
-  private HttpServletResponse response;
+  HttpServletResponse response;
 
   @Context
   ServletContext context;
@@ -124,7 +125,10 @@ public class WebSSOResource {
 
     String audiences = context.getInitParameter(SSO_COOKIE_TOKEN_AUDIENCES_PARAM);
     if (audiences != null) {
-      targetAudiences = audiences.split(",");
+      String[] auds = audiences.split(",");
+      for (int i = 0; i < auds.length; i++) {
+        targetAudiences.add(auds[i]);
+      }
     }
 
     String ttl = context.getInitParameter(SSO_COOKIE_TOKEN_TTL_PARAM);
@@ -180,14 +184,10 @@ public class WebSSOResource {
 
     try {
       JWT token = null;
-      if (targetAudiences == null || targetAudiences.length == 0) {
+      if (targetAudiences.isEmpty()) {
         token = ts.issueToken(p, "RS256", getExpiry());
       } else {
-        ArrayList<String> aud = new ArrayList<String>();
-        for (int i = 0; i < targetAudiences.length; i++) {
-          aud.add(targetAudiences[i]);
-        }
-        token = ts.issueToken(p, aud, "RS256", getExpiry());
+        token = ts.issueToken(p, targetAudiences, "RS256", getExpiry());
       }
 
       // Coverity CID 1327959

http://git-wip-us.apache.org/repos/asf/knox/blob/2666894b/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java b/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
index 73910dd..c953c91 100644
--- a/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
+++ b/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
@@ -17,15 +17,65 @@
  */
 package org.apache.hadoop.gateway.service.knoxsso;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.security.KeyPair;
+import java.security.KeyPairGenerator;
+import java.security.NoSuchAlgorithmException;
+import java.security.Principal;
+import java.security.interfaces.RSAPrivateKey;
+import java.security.interfaces.RSAPublicKey;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.security.auth.Subject;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletOutputStream;
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpServletResponseWrapper;
+
+import org.apache.hadoop.gateway.services.GatewayServices;
+import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
+import org.apache.hadoop.gateway.services.security.token.TokenServiceException;
+import org.apache.hadoop.gateway.services.security.token.impl.JWT;
+import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
 import org.apache.hadoop.gateway.util.RegExUtils;
+import org.easymock.EasyMock;
 import org.junit.Assert;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
+import com.nimbusds.jose.JWSSigner;
+import com.nimbusds.jose.JWSVerifier;
+import com.nimbusds.jose.crypto.RSASSASigner;
+import com.nimbusds.jose.crypto.RSASSAVerifier;
+
 /**
- *
+ * Some tests for the Knox SSO service.
  */
 public class WebSSOResourceTest {
 
+  protected static RSAPublicKey publicKey;
+  protected static RSAPrivateKey privateKey;
+
+  @BeforeClass
+  public static void setup() throws Exception, NoSuchAlgorithmException {
+    KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
+    kpg.initialize(1024);
+    KeyPair KPair = kpg.generateKeyPair();
+
+    publicKey = (RSAPublicKey) KPair.getPublic();
+    privateKey = (RSAPrivateKey) KPair.getPrivate();
+  }
+
   @Test
   public void testWhitelistMatching() throws Exception {
     String whitelist = "^https?://.*example.com:8080/.*$;" +
@@ -35,37 +85,267 @@ public class WebSSOResourceTest {
         "^https?://localhost:\\d{0,9}/.*$;^/.*$";
 
     // match on explicit hostname/domain and port
-    Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist, 
+    Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist,
         "http://host.example.com:8080/"));
     // match on non-required port
-    Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist, 
+    Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist,
         "http://host.example.com/"));
     // match on required but any port
-    Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist, 
+    Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist,
         "http://host.example2.com:1234/"));
     // fail on missing port
-    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist, 
+    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
         "http://host.example2.com/"));
     // fail on invalid port
-    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist, 
+    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
         "http://host.example.com:8081/"));
     // fail on alphanumeric port
-    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist, 
+    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
         "http://host.example.com:A080/"));
     // fail on invalid hostname/domain
-    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist, 
+    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
         "http://host.example.net:8080/"));
     // fail on required port
-    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist, 
+    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
         "http://host.example2.com/"));
     // fail on required https
-    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist, 
+    Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
         "http://host.example3.com/"));
     // match on localhost and port
-    Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist, 
+    Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist,
         "http://localhost:8080/"));
     // match on local/relative path
-    Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist, 
+    Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist,
         "/local/resource/"));
   }
+
+  @Test
+  public void testGetToken() throws Exception {
+
+    ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.name")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.secure.only")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.max.age")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.domain.suffix")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.redirect.whitelist.regex")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.token.audiences")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.token.ttl")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.enable.session")).andReturn(null);
+
+    HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+    EasyMock.expect(request.getParameter("originalUrl")).andReturn("http://localhost:9080/service");
+    EasyMock.expect(request.getParameterMap()).andReturn(Collections.<String,String[]>emptyMap());
+    EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+    EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+    GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+    EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+    JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+    EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+    HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+    ServletOutputStream outputStream = EasyMock.createNiceMock(ServletOutputStream.class);
+    CookieResponseWrapper responseWrapper = new CookieResponseWrapper(response, outputStream);
+
+    EasyMock.replay(principal, services, context, request);
+
+    WebSSOResource webSSOResponse = new WebSSOResource();
+    webSSOResponse.request = request;
+    webSSOResponse.response = responseWrapper;
+    webSSOResponse.context = context;
+    webSSOResponse.init();
+
+    // Issue a token
+    webSSOResponse.doGet();
+
+    // Check the cookie
+    Cookie cookie = responseWrapper.getCookie("hadoop-jwt");
+    assertNotNull(cookie);
+
+    JWTToken parsedToken = new JWTToken(cookie.getValue());
+    assertEquals("alice", parsedToken.getSubject());
+    assertTrue(authority.verifyToken(parsedToken));
+  }
+
+  @Test
+  public void testAudiences() throws Exception {
+
+    ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.name")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.secure.only")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.max.age")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.domain.suffix")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.redirect.whitelist.regex")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.token.audiences")).andReturn("recipient1,recipient2");
+    EasyMock.expect(context.getInitParameter("knoxsso.token.ttl")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.enable.session")).andReturn(null);
+
+    HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+    EasyMock.expect(request.getParameter("originalUrl")).andReturn("http://localhost:9080/service");
+    EasyMock.expect(request.getParameterMap()).andReturn(Collections.<String,String[]>emptyMap());
+    EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+    EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+    GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+    EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+    JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+    EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+    HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+    ServletOutputStream outputStream = EasyMock.createNiceMock(ServletOutputStream.class);
+    CookieResponseWrapper responseWrapper = new CookieResponseWrapper(response, outputStream);
+
+    EasyMock.replay(principal, services, context, request);
+
+    WebSSOResource webSSOResponse = new WebSSOResource();
+    webSSOResponse.request = request;
+    webSSOResponse.response = responseWrapper;
+    webSSOResponse.context = context;
+    webSSOResponse.init();
+
+    // Issue a token
+    webSSOResponse.doGet();
+
+    // Check the cookie
+    Cookie cookie = responseWrapper.getCookie("hadoop-jwt");
+    assertNotNull(cookie);
+
+    JWTToken parsedToken = new JWTToken(cookie.getValue());
+    assertEquals("alice", parsedToken.getSubject());
+    assertTrue(authority.verifyToken(parsedToken));
+
+    // Verify the audiences
+    List<String> audiences = Arrays.asList(parsedToken.getAudienceClaims());
+    assertEquals(2, audiences.size());
+    assertTrue(audiences.contains("recipient1"));
+    assertTrue(audiences.contains("recipient2"));
+  }
+
+  /**
+   * A wrapper for HttpServletResponseWrapper to store the cookies
+   */
+  private static class CookieResponseWrapper extends HttpServletResponseWrapper {
+
+    private ServletOutputStream outputStream;
+    private Map<String, Cookie> cookies = new HashMap<>();
+
+    public CookieResponseWrapper(HttpServletResponse response) {
+        super(response);
+    }
+
+    public CookieResponseWrapper(HttpServletResponse response, ServletOutputStream outputStream) {
+        super(response);
+        this.outputStream = outputStream;
+    }
+
+    @Override
+    public ServletOutputStream getOutputStream() {
+        return outputStream;
+    }
+
+    @Override
+    public void addCookie(Cookie cookie) {
+        super.addCookie(cookie);
+        cookies.put(cookie.getName(), cookie);
+    }
+
+    public Cookie getCookie(String name) {
+        return cookies.get(name);
+    }
+
+  }
+
+  private static class TestJWTokenAuthority implements JWTokenAuthority {
+
+    private RSAPublicKey publicKey;
+    private RSAPrivateKey privateKey;
+
+    public TestJWTokenAuthority(RSAPublicKey publicKey, RSAPrivateKey privateKey) {
+      this.publicKey = publicKey;
+      this.privateKey = privateKey;
+    }
+
+    @Override
+    public JWTToken issueToken(Subject subject, String algorithm)
+      throws TokenServiceException {
+      Principal p = (Principal) subject.getPrincipals().toArray()[0];
+      return issueToken(p, algorithm);
+    }
+
+    @Override
+    public JWTToken issueToken(Principal p, String algorithm)
+      throws TokenServiceException {
+      return issueToken(p, null, algorithm);
+    }
+
+    @Override
+    public JWTToken issueToken(Principal p, String audience, String algorithm)
+      throws TokenServiceException {
+      return issueToken(p, audience, algorithm, -1);
+    }
+
+    @Override
+    public boolean verifyToken(JWTToken token) throws TokenServiceException {
+      JWSVerifier verifier = new RSASSAVerifier(publicKey);
+      return token.verify(verifier);
+    }
+
+    @Override
+    public JWTToken issueToken(Principal p, String audience, String algorithm,
+                               long expires) throws TokenServiceException {
+      List<String> audiences = null;
+      if (audience != null) {
+        audiences = new ArrayList<String>();
+        audiences.add(audience);
+      }
+      return issueToken(p, audiences, algorithm, expires);
+    }
+
+    @Override
+    public JWTToken issueToken(Principal p, List<String> audiences, String algorithm,
+                               long expires) throws TokenServiceException {
+      String[] claimArray = new String[4];
+      claimArray[0] = "KNOXSSO";
+      claimArray[1] = p.getName();
+      claimArray[2] = null;
+      if (expires == -1) {
+        claimArray[3] = null;
+      } else {
+        claimArray[3] = String.valueOf(expires);
+      }
+
+      JWTToken token = null;
+      if ("RS256".equals(algorithm)) {
+        token = new JWTToken("RS256", claimArray, audiences);
+        JWSSigner signer = new RSASSASigner(privateKey);
+        token.sign(signer);
+      } else {
+        throw new TokenServiceException("Cannot issue token - Unsupported algorithm");
+      }
+
+      return token;
+    }
+
+    @Override
+    public JWT issueToken(Principal p, String algorithm, long expiry)
+        throws TokenServiceException {
+      return issueToken(p, Collections.<String>emptyList(), algorithm, expiry);
+    }
+
+    @Override
+    public boolean verifyToken(JWTToken token, RSAPublicKey publicKey) throws TokenServiceException {
+      JWSVerifier verifier = new RSASSAVerifier(publicKey);
+      return token.verify(verifier);
+    }
+
+  }
+
 }


[02/37] knox git commit: KNOX-1054 - Make DefaultURLManager thread-safe (Phil Zampino via Sandeep More)

Posted by lm...@apache.org.
KNOX-1054 - Make DefaultURLManager thread-safe (Phil Zampino via Sandeep More)


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/d3f507f9
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/d3f507f9
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/d3f507f9

Branch: refs/heads/KNOX-1049
Commit: d3f507f9424b7deb355696c1c6d5f5aad142157d
Parents: 2666894
Author: Sandeep More <mo...@apache.org>
Authored: Wed Sep 20 21:47:35 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Wed Sep 20 21:47:35 2017 -0400

----------------------------------------------------------------------
 .../hadoop/gateway/ha/provider/impl/DefaultURLManager.java  | 9 +++------
 1 file changed, 3 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/d3f507f9/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultURLManager.java
----------------------------------------------------------------------
diff --git a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultURLManager.java b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultURLManager.java
index 484a580..e76be7d 100644
--- a/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultURLManager.java
+++ b/gateway-provider-ha/src/main/java/org/apache/hadoop/gateway/ha/provider/impl/DefaultURLManager.java
@@ -24,7 +24,6 @@ import org.apache.hadoop.gateway.ha.provider.impl.i18n.HaMessages;
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
 
 import java.net.URI;
-import java.util.Iterator;
 import java.util.List;
 import java.util.concurrent.ConcurrentLinkedQueue;
 
@@ -34,8 +33,6 @@ public class DefaultURLManager implements URLManager {
 
   private ConcurrentLinkedQueue<String> urls = new ConcurrentLinkedQueue<>();
 
-  public DefaultURLManager() {
-  }
 
   @Override
   public boolean supportsConfig(HaServiceConfig config) {
@@ -48,7 +45,7 @@ public class DefaultURLManager implements URLManager {
   }
 
   @Override
-  public String getActiveURL() {
+  public synchronized String getActiveURL() {
     return urls.peek();
   }
 
@@ -68,13 +65,13 @@ public class DefaultURLManager implements URLManager {
   }
 
   @Override
-  public List<String> getURLs() {
+  public synchronized List<String> getURLs() {
     return Lists.newArrayList(urls.iterator());
   }
 
   @Override
   public synchronized void setURLs(List<String> urls) {
-    if ( urls != null && !urls.isEmpty()) {
+    if (urls != null && !urls.isEmpty()) {
       this.urls.clear();
       this.urls.addAll(urls);
     }


[08/37] knox git commit: KNOX-1014 - Service Discovery and Topology Generation Framework (Phil Zampino via lmccay)

Posted by lm...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
new file mode 100644
index 0000000..fb563fa
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
@@ -0,0 +1,186 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.Service;
+import org.apache.hadoop.gateway.topology.discovery.DefaultServiceDiscoveryConfig;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryFactory;
+
+import java.io.*;
+import java.util.*;
+
+
+/**
+ * Processes simple topology descriptors, producing full topology files, which can subsequently be deployed to the
+ * gateway.
+ */
+public class SimpleDescriptorHandler {
+
+    private static final Service[] NO_GATEWAY_SERVICES = new Service[]{};
+
+    private static final SimpleDescriptorMessages log = MessagesFactory.get(SimpleDescriptorMessages.class);
+
+    public static Map<String, File> handle(File desc) throws IOException {
+        return handle(desc, NO_GATEWAY_SERVICES);
+    }
+
+    public static Map<String, File> handle(File desc, Service...gatewayServices) throws IOException {
+        return handle(desc, desc.getParentFile(), gatewayServices);
+    }
+
+    public static Map<String, File> handle(File desc, File destDirectory) throws IOException {
+        return handle(desc, destDirectory, NO_GATEWAY_SERVICES);
+    }
+
+    public static Map<String, File> handle(File desc, File destDirectory, Service...gatewayServices) throws IOException {
+        return handle(SimpleDescriptorFactory.parse(desc.getAbsolutePath()), desc.getParentFile(), destDirectory, gatewayServices);
+    }
+
+    public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory) {
+        return handle(desc, srcDirectory, destDirectory, NO_GATEWAY_SERVICES);
+    }
+
+    public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory, Service...gatewayServices) {
+        Map<String, File> result = new HashMap<>();
+
+        File topologyDescriptor;
+
+        DefaultServiceDiscoveryConfig sdc = new DefaultServiceDiscoveryConfig(desc.getDiscoveryAddress());
+        sdc.setUser(desc.getDiscoveryUser());
+        sdc.setPasswordAlias(desc.getDiscoveryPasswordAlias());
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(desc.getDiscoveryType(), gatewayServices);
+        ServiceDiscovery.Cluster cluster = sd.discover(sdc, desc.getClusterName());
+
+        Map<String, List<String>> serviceURLs = new HashMap<>();
+
+        if (cluster != null) {
+            for (SimpleDescriptor.Service descService : desc.getServices()) {
+                String serviceName = descService.getName();
+
+                List<String> descServiceURLs = descService.getURLs();
+                if (descServiceURLs == null || descServiceURLs.isEmpty()) {
+                    descServiceURLs = cluster.getServiceURLs(serviceName);
+                }
+
+                // If there is at least one URL associated with the service, then add it to the map
+                if (descServiceURLs != null && !descServiceURLs.isEmpty()) {
+                    serviceURLs.put(serviceName, descServiceURLs);
+                } else {
+                    log.failedToDiscoverClusterServiceURLs(serviceName, cluster.getName());
+                    throw new IllegalStateException("ServiceDiscovery failed to resolve any URLs for " + serviceName +
+                                                    ". Topology update aborted!");
+                }
+            }
+        } else {
+            log.failedToDiscoverClusterServices(desc.getClusterName());
+        }
+
+        topologyDescriptor = null;
+        File providerConfig = null;
+        try {
+            // Verify that the referenced provider configuration exists before attempting to reading it
+            providerConfig = resolveProviderConfigurationReference(desc.getProviderConfig(), srcDirectory);
+            if (providerConfig == null) {
+                log.failedToResolveProviderConfigRef(desc.getProviderConfig());
+                throw new IllegalArgumentException("Unresolved provider configuration reference: " +
+                                                   desc.getProviderConfig() + " ; Topology update aborted!");
+            }
+            result.put("reference", providerConfig);
+
+            // TODO: Should the contents of the provider config be validated before incorporating it into the topology?
+
+            String topologyFilename = desc.getName();
+            if (topologyFilename == null) {
+                topologyFilename = desc.getClusterName();
+            }
+            topologyDescriptor = new File(destDirectory, topologyFilename + ".xml");
+            FileWriter fw = new FileWriter(topologyDescriptor);
+
+            fw.write("<topology>\n");
+
+            // Copy the externalized provider configuration content into the topology descriptor in-line
+            InputStreamReader policyReader = new InputStreamReader(new FileInputStream(providerConfig));
+            char[] buffer = new char[1024];
+            int count;
+            while ((count = policyReader.read(buffer)) > 0) {
+                fw.write(buffer, 0, count);
+            }
+            policyReader.close();
+
+            // Write the service declarations
+            for (String serviceName : serviceURLs.keySet()) {
+                fw.write("    <service>\n");
+                fw.write("        <role>" + serviceName + "</role>\n");
+                for (String url : serviceURLs.get(serviceName)) {
+                    fw.write("        <url>" + url + "</url>\n");
+                }
+                fw.write("    </service>\n");
+            }
+
+            fw.write("</topology>\n");
+
+            fw.flush();
+            fw.close();
+        } catch (IOException e) {
+            log.failedToGenerateTopologyFromSimpleDescriptor(topologyDescriptor.getName(), e);
+            topologyDescriptor.delete();
+        }
+
+        result.put("topology", topologyDescriptor);
+        return result;
+    }
+
+
+    private static File resolveProviderConfigurationReference(String reference, File srcDirectory) {
+        File providerConfig;
+
+        // If the reference includes a path
+        if (reference.contains(File.separator)) {
+            // Check if it's an absolute path
+            providerConfig = new File(reference);
+            if (!providerConfig.exists()) {
+                // If it's not an absolute path, try treating it as a relative path
+                providerConfig = new File(srcDirectory, reference);
+                if (!providerConfig.exists()) {
+                    providerConfig = null;
+                }
+            }
+        } else { // No file path, just a name
+            // Check if it's co-located with the referencing descriptor
+            providerConfig = new File(srcDirectory, reference);
+            if (!providerConfig.exists()) {
+                // Check the shared-providers config location
+                File sharedProvidersDir = new File(srcDirectory, "../shared-providers");
+                if (sharedProvidersDir.exists()) {
+                    providerConfig = new File(sharedProvidersDir, reference);
+                    if (!providerConfig.exists()) {
+                        // Check if it's a valid name without the extension
+                        providerConfig = new File(sharedProvidersDir, reference + ".xml");
+                        if (!providerConfig.exists()) {
+                            providerConfig = null;
+                        }
+                    }
+                }
+            }
+        }
+
+        return providerConfig;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
new file mode 100644
index 0000000..32ceba9
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import java.util.ArrayList;
+import java.util.List;
+
+class SimpleDescriptorImpl implements SimpleDescriptor {
+
+    @JsonProperty("discovery-type")
+    private String discoveryType;
+
+    @JsonProperty("discovery-address")
+    private String discoveryAddress;
+
+    @JsonProperty("discovery-user")
+    private String discoveryUser;
+
+    @JsonProperty("discovery-pwd-alias")
+    private String discoveryPasswordAlias;
+
+    @JsonProperty("provider-config-ref")
+    private String providerConfig;
+
+    @JsonProperty("cluster")
+    private String cluster;
+
+    @JsonProperty("services")
+    private List<ServiceImpl> services;
+
+    private String name = null;
+
+    void setName(String name) {
+        this.name = name;
+    }
+
+    @Override
+    public String getName() {
+        return name;
+    }
+
+    @Override
+    public String getDiscoveryType() {
+        return discoveryType;
+    }
+
+    @Override
+    public String getDiscoveryAddress() {
+        return discoveryAddress;
+    }
+
+    @Override
+    public String getDiscoveryUser() {
+        return discoveryUser;
+    }
+
+    @Override
+    public String getDiscoveryPasswordAlias() {
+        return discoveryPasswordAlias;
+    }
+
+    @Override
+    public String getClusterName() {
+        return cluster;
+    }
+
+    @Override
+    public String getProviderConfig() {
+        return providerConfig;
+    }
+
+    @Override
+    public List<Service> getServices() {
+        List<Service> result = new ArrayList<>();
+        result.addAll(services);
+        return result;
+    }
+
+    public static class ServiceImpl implements Service {
+        private String name;
+        private List<String> urls;
+
+        @Override
+        public String getName() {
+            return name;
+        }
+
+        @Override
+        public List<String> getURLs() {
+            return urls;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
new file mode 100644
index 0000000..cf9aa28
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import org.apache.hadoop.gateway.i18n.messages.Message;
+import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
+import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.i18n.messages.StackTrace;
+
+@Messages(logger="org.apache.gateway.topology.simple")
+public interface SimpleDescriptorMessages {
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Service discovery for cluster {0} failed.")
+    void failedToDiscoverClusterServices(final String cluster);
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No URLs were discovered for {0} in the {1} cluster.")
+    void failedToDiscoverClusterServiceURLs(final String serviceName, final String clusterName);
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Failed to resolve the referenced provider configuration {0}.")
+    void failedToResolveProviderConfigRef(final String providerConfigRef);
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Error generating topology {0} from simple descriptor: {1}")
+    void failedToGenerateTopologyFromSimpleDescriptor(final String topologyFile,
+                                                      @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
index 55cd5cc..498d750 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
@@ -22,8 +22,12 @@ import org.apache.commons.io.IOUtils;
 import org.apache.commons.io.monitor.FileAlterationMonitor;
 import org.apache.commons.io.monitor.FileAlterationObserver;
 import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.services.security.AliasService;
 import org.apache.hadoop.gateway.services.topology.impl.DefaultTopologyService;
 import org.apache.hadoop.gateway.topology.*;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryFactory;
 import org.apache.hadoop.test.TestUtils;
 import org.easymock.EasyMock;
 import org.junit.After;
@@ -36,6 +40,8 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.*;
 
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.isA;
 import static org.hamcrest.CoreMatchers.is;
 import static org.hamcrest.Matchers.hasItem;
 import static org.hamcrest.core.IsNull.notNullValue;
@@ -78,9 +84,17 @@ public class DefaultTopologyServiceTest {
   public void testGetTopologies() throws Exception {
 
     File dir = createDir();
-    long time = dir.lastModified();
+    File topologyDir = new File(dir, "topologies");
+
+    File descriptorsDir = new File(dir, "descriptors");
+    descriptorsDir.mkdirs();
+
+    File sharedProvidersDir = new File(dir, "shared-providers");
+    sharedProvidersDir.mkdirs();
+
+    long time = topologyDir.lastModified();
     try {
-      createFile(dir, "one.xml", "org/apache/hadoop/gateway/topology/file/topology-one.xml", time);
+      createFile(topologyDir, "one.xml", "org/apache/hadoop/gateway/topology/file/topology-one.xml", time);
 
       TestTopologyListener topoListener = new TestTopologyListener();
       FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
@@ -89,17 +103,16 @@ public class DefaultTopologyServiceTest {
       Map<String, String> c = new HashMap<>();
 
       GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
-      EasyMock.expect(config.getGatewayTopologyDir()).andReturn(dir.toString()).anyTimes();
+      EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
+      EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
       EasyMock.replay(config);
 
       provider.init(config, c);
 
-
       provider.addTopologyChangeListener(topoListener);
 
       provider.reloadTopologies();
 
-
       Collection<Topology> topologies = provider.getTopologies();
       assertThat(topologies, notNullValue());
       assertThat(topologies.size(), is(1));
@@ -110,7 +123,7 @@ public class DefaultTopologyServiceTest {
       topoListener.events.clear();
 
       // Add a file to the directory.
-      File two = createFile(dir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-two.xml", 1L);
+      File two = createFile(topologyDir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-two.xml", 1L);
       provider.reloadTopologies();
       topologies = provider.getTopologies();
       assertThat(topologies.size(), is(2));
@@ -131,7 +144,7 @@ public class DefaultTopologyServiceTest {
       assertThat(event.getTopology(), notNullValue());
 
       // Update a file in the directory.
-      two = createFile(dir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-three.xml", 2L);
+      two = createFile(topologyDir, "two.xml", "org/apache/hadoop/gateway/topology/file/topology-three.xml", 2L);
       provider.reloadTopologies();
       topologies = provider.getTopologies();
       assertThat(topologies.size(), is(2));
@@ -153,6 +166,49 @@ public class DefaultTopologyServiceTest {
       topology = topologies.iterator().next();
       assertThat(topology.getName(), is("one"));
       assertThat(topology.getTimestamp(), is(time));
+
+      // Add a simple descriptor to the descriptors dir to verify topology generation and loading (KNOX-1006)
+      // N.B. This part of the test depends on the DummyServiceDiscovery extension being configured:
+      //         org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
+      AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+      EasyMock.expect(aliasService.getPasswordFromAliasForGateway(anyObject(String.class))).andReturn(null).anyTimes();
+      EasyMock.replay(aliasService);
+      DefaultTopologyService.DescriptorsMonitor dm =
+                                          new DefaultTopologyService.DescriptorsMonitor(topologyDir, aliasService);
+
+      // Write out the referenced provider config first
+      File provCfgFile = createFile(sharedProvidersDir,
+                                    "ambari-cluster-policy.xml",
+                                    "org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml",
+                                    1L);
+      try {
+        // Create the simple descriptor in the descriptors dir
+        File simpleDesc =
+                createFile(descriptorsDir,
+                           "four.json",
+                           "org/apache/hadoop/gateway/topology/file/simple-topology-four.json",
+                           1L);
+
+        // Trigger the topology generation by noticing the simple descriptor
+        dm.onFileChange(simpleDesc);
+
+        // Load the generated topology
+        provider.reloadTopologies();
+        topologies = provider.getTopologies();
+        assertThat(topologies.size(), is(2));
+        names = new HashSet<>(Arrays.asList("one", "four"));
+        iterator = topologies.iterator();
+        topology = iterator.next();
+        assertThat(names, hasItem(topology.getName()));
+        names.remove(topology.getName());
+        topology = iterator.next();
+        assertThat(names, hasItem(topology.getName()));
+        names.remove(topology.getName());
+        assertThat(names.size(), is(0));
+      } finally {
+        provCfgFile.delete();
+
+      }
     } finally {
       FileUtils.deleteQuietly(dir);
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
new file mode 100644
index 0000000..269bed2
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/PropertiesFileServiceDiscoveryTest.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import org.apache.hadoop.gateway.services.security.impl.DefaultAliasService;
+import org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import static org.junit.Assert.*;
+
+
+public class PropertiesFileServiceDiscoveryTest {
+
+    private static final Map<String, String> clusterProperties = new HashMap<>();
+    static {
+        clusterProperties.put("mycluster.name", "mycluster");
+        clusterProperties.put("mycluster.NAMENODE", "hdfs://namenodehost:8020");
+        clusterProperties.put("mycluster.JOBTRACKER", "rpc://jobtrackerhostname:8050");
+        clusterProperties.put("mycluster.WEBHCAT", "http://webhcathost:50111/templeton");
+        clusterProperties.put("mycluster.OOZIE", "http://ooziehost:11000/oozie");
+        clusterProperties.put("mycluster.HIVE", "http://hivehostname:10001/clipath");
+        clusterProperties.put("mycluster.RESOURCEMANAGER", "http://remanhost:8088/ws");
+    }
+
+    private static final Properties config = new Properties();
+    static {
+        for (String name : clusterProperties.keySet()) {
+            config.setProperty(name, clusterProperties.get(name));
+        }
+    }
+
+
+    @Test
+    public void testPropertiesFileServiceDiscovery() throws Exception {
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get("PROPERTIES_FILE");
+        assertNotNull(sd);
+
+        String discoveryAddress = this.getClass().getName() + "__test-discovery-source.properties";
+        File discoverySource = new File(discoveryAddress);
+        try {
+            config.store(new FileOutputStream(discoverySource), "Test discovery source for PropertiesFileServiceDiscovery");
+
+            ServiceDiscovery.Cluster c =
+                        sd.discover(new DefaultServiceDiscoveryConfig(discoverySource.getAbsolutePath()), "mycluster");
+            assertNotNull(c);
+            for (String name : clusterProperties.keySet()) {
+                assertEquals(clusterProperties.get(name), c.getServiceURLs(name.split("\\.")[1]).get(0));
+            }
+        } finally {
+            discoverySource.delete();
+        }
+    }
+
+
+    private void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
+        for (String name : services) {
+            String value = "";
+            List<String> urls = cluster.getServiceURLs(name);
+            if (urls != null && !urls.isEmpty()) {
+                for (String url : urls) {
+                    value += url + " ";
+                }
+            }
+            System.out.println(String.format("%18s: %s", name, value));
+        }
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java
new file mode 100644
index 0000000..d592ede
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactoryTest.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.services.security.impl.DefaultAliasService;
+import org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType;
+import org.junit.Test;
+
+import java.lang.reflect.Field;
+
+import static org.junit.Assert.*;
+
+
+public class ServiceDiscoveryFactoryTest {
+
+    @Test
+    public void testGetDummyImpl() throws Exception {
+        String TYPE = "DUMMY";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNotNull("Expected to get a ServiceDiscovery object.", sd);
+        assertEquals("Unexpected ServiceDiscovery type.", TYPE, sd.getType());
+    }
+
+
+    @Test
+    public void testGetDummyImplWithMismatchedCase() throws Exception {
+        String TYPE = "dUmmY";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNotNull("Expected to get a ServiceDiscovery object.", sd);
+        assertEquals("Unexpected ServiceDiscovery type.", TYPE.toUpperCase(), sd.getType());
+    }
+
+
+    @Test
+    public void testGetInvalidImpl() throws Exception {
+        String TYPE = "InValID";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNull("Unexpected ServiceDiscovery object.", sd);
+    }
+
+
+    @Test
+    public void testGetImplWithMismatchedType() throws Exception {
+        String TYPE = "DeclaredType";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE);
+        assertNull("Unexpected ServiceDiscovery object.", sd);
+    }
+
+
+    @Test
+    public void testGetPropertiesFileImplWithAliasServiceInjection() throws Exception {
+        String TYPE = "PROPERTIES_FILE";
+        ServiceDiscovery sd = ServiceDiscoveryFactory.get(TYPE, new DefaultAliasService());
+        assertNotNull("Expected to get a ServiceDiscovery object.", sd);
+        assertEquals("Unexpected ServiceDiscovery type.", TYPE, sd.getType());
+
+        // Verify that the AliasService was injected as expected
+        Field aliasServiceField = sd.getClass().getDeclaredField("aliasService");
+        aliasServiceField.setAccessible(true);
+        Object fieldValue = aliasServiceField.get(sd);
+        assertNotNull(fieldValue);
+        assertTrue(AliasService.class.isAssignableFrom(fieldValue.getClass()));
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java
new file mode 100644
index 0000000..4a5323e
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscovery.java
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * This implementation is intended to be used by tests for which the actual service URLs are of no importance, such that
+ * tests can be written without having a valid service registry (e.g., Ambari) available.
+ */
+public class DummyServiceDiscovery implements ServiceDiscovery {
+
+    static final String TYPE = "DUMMY";
+
+    private static final Cluster DUMMY = new Cluster() {
+        @Override
+        public String getName() {
+            return "dummy";
+        }
+
+        @Override
+        public List<String> getServiceURLs(String serviceName) {
+            return Collections.singletonList("http://servicehost:9999/dummy");
+        }
+    };
+
+    private static final Map<String, Cluster> CLUSTERS = new HashMap<>();
+    static {
+        CLUSTERS.put(DUMMY.getName(), DUMMY);
+    }
+
+    @Override
+    public String getType() {
+        return TYPE;
+    }
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+        return CLUSTERS;
+    }
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        return DUMMY;
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java
new file mode 100644
index 0000000..d47c38d
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/DummyServiceDiscoveryType.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class DummyServiceDiscoveryType implements ServiceDiscoveryType {
+    @Override
+    public String getType() {
+        return DummyServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new DummyServiceDiscovery();
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
new file mode 100644
index 0000000..a7fc34a
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscovery.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.topology.discovery.GatewayService;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.*;
+
+class PropertiesFileServiceDiscovery implements ServiceDiscovery {
+
+    static final String TYPE = "PROPERTIES_FILE";
+
+    @GatewayService
+    AliasService aliasService;
+
+    @Override
+    public String getType() {
+        return TYPE;
+    }
+
+    @Override
+    public Map<String, ServiceDiscovery.Cluster> discover(ServiceDiscoveryConfig config) {
+
+        Map<String, ServiceDiscovery.Cluster> result = new HashMap<>();
+
+        Properties p = new Properties();
+        try {
+            p.load(new FileInputStream(config.getAddress()));
+
+            Map<String, Map<String, List<String>>> clusters = new HashMap<>();
+            for (Object key : p.keySet()) {
+                String propertyKey = (String)key;
+                String[] parts = propertyKey.split("\\.");
+                if (parts.length == 2) {
+                    String clusterName = parts[0];
+                    String serviceName = parts[1];
+                    String serviceURL  = p.getProperty(propertyKey);
+                    if (!clusters.containsKey(clusterName)) {
+                        clusters.put(clusterName, new HashMap<String, List<String>>());
+                    }
+                    Map<String, List<String>> serviceURLs = clusters.get(clusterName);
+                    if (!serviceURLs.containsKey(serviceName)) {
+                        serviceURLs.put(serviceName, new ArrayList<String>());
+                    }
+                    serviceURLs.get(serviceName).add(serviceURL);
+                }
+            }
+
+            for (String clusterName : clusters.keySet()) {
+                result.put(clusterName,
+                        new PropertiesFileServiceDiscovery.Cluster(clusterName, clusters.get(clusterName)));
+            }
+        } catch (IOException e) {
+            e.printStackTrace();
+        }
+
+        return result;
+    }
+
+
+    @Override
+    public ServiceDiscovery.Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        Map<String, ServiceDiscovery.Cluster> clusters = discover(config);
+        return clusters.get(clusterName);
+    }
+
+
+    static class Cluster implements ServiceDiscovery.Cluster {
+        private String name;
+        private Map<String, List<String>> serviceURLS = new HashMap<>();
+
+        Cluster(String name, Map<String, List<String>> serviceURLs) {
+            this.name = name;
+            this.serviceURLS.putAll(serviceURLs);
+        }
+
+        @Override
+        public String getName() {
+            return name;
+        }
+
+        @Override
+        public List<String> getServiceURLs(String serviceName) {
+            return serviceURLS.get(serviceName);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java
new file mode 100644
index 0000000..2cfd998
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/PropertiesFileServiceDiscoveryType.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class PropertiesFileServiceDiscoveryType implements ServiceDiscoveryType {
+
+    @Override
+    public String getType() {
+        return PropertiesFileServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new PropertiesFileServiceDiscovery();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java
new file mode 100644
index 0000000..8f7b71a
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryImpl.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+
+import java.util.Map;
+
+public class SneakyServiceDiscoveryImpl implements ServiceDiscovery {
+    @Override
+    public String getType() {
+        return "ActualType";
+    }
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+        return null;
+    }
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        return null;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java
new file mode 100644
index 0000000..97665dc
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/discovery/test/extension/SneakyServiceDiscoveryType.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.test.extension;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class SneakyServiceDiscoveryType implements ServiceDiscoveryType {
+    @Override
+    public String getType() {
+        return "DeclaredType";
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new SneakyServiceDiscoveryImpl();
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
new file mode 100644
index 0000000..3dac66a
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
@@ -0,0 +1,218 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.Writer;
+import java.util.*;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+
+public class SimpleDescriptorFactoryTest {
+
+
+    @Test
+    public void testParseJSONSimpleDescriptor() throws Exception {
+
+        final String   discoveryType    = "AMBARI";
+        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
+        final String   discoveryUser    = "admin";
+        final String   providerConfig   = "ambari-cluster-policy.xml";
+        final String   clusterName      = "myCluster";
+
+        final Map<String, List<String>> services = new HashMap<>();
+        services.put("NODEMANAGER", null);
+        services.put("JOBTRACKER", null);
+        services.put("RESOURCEMANAGER", null);
+        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
+        services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
+
+        String fileName = "test-topology.json";
+        File testJSON = null;
+        try {
+            testJSON = writeJSON(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services);
+            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testJSON.getAbsolutePath());
+            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services);
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            if (testJSON != null) {
+                try {
+                    testJSON.delete();
+                } catch (Exception e) {
+                    // Ignore
+                }
+            }
+        }
+    }
+
+    @Test
+    public void testParseYAMLSimpleDescriptor() throws Exception {
+
+        final String   discoveryType    = "AMBARI";
+        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
+        final String   discoveryUser    = "joeblow";
+        final String   providerConfig   = "ambari-cluster-policy.xml";
+        final String   clusterName      = "myCluster";
+
+        final Map<String, List<String>> services = new HashMap<>();
+        services.put("NODEMANAGER", null);
+        services.put("JOBTRACKER", null);
+        services.put("RESOURCEMANAGER", null);
+        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
+        services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
+
+        String fileName = "test-topology.yml";
+        File testYAML = null;
+        try {
+            testYAML = writeYAML(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services);
+            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testYAML.getAbsolutePath());
+            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services);
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            if (testYAML != null) {
+                try {
+                    testYAML.delete();
+                } catch (Exception e) {
+                    // Ignore
+                }
+            }
+        }
+    }
+
+
+    private void validateSimpleDescriptor(SimpleDescriptor    sd,
+                                          String              discoveryType,
+                                          String              discoveryAddress,
+                                          String              providerConfig,
+                                          String              clusterName,
+                                          Map<String, List<String>> expectedServices) {
+        assertNotNull(sd);
+        assertEquals(discoveryType, sd.getDiscoveryType());
+        assertEquals(discoveryAddress, sd.getDiscoveryAddress());
+        assertEquals(providerConfig, sd.getProviderConfig());
+        assertEquals(clusterName, sd.getClusterName());
+
+        List<SimpleDescriptor.Service> actualServices = sd.getServices();
+
+        assertEquals(expectedServices.size(), actualServices.size());
+
+        for (SimpleDescriptor.Service actualService : actualServices) {
+            assertTrue(expectedServices.containsKey(actualService.getName()));
+            assertEquals(expectedServices.get(actualService.getName()), actualService.getURLs());
+        }
+    }
+
+
+    private File writeJSON(String path, String content) throws Exception {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write(content);
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+
+    private File writeJSON(String path,
+                           String discoveryType,
+                           String discoveryAddress,
+                           String discoveryUser,
+                           String providerConfig,
+                           String clusterName,
+                           Map<String, List<String>> services) throws Exception {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write("{" + "\n");
+        fw.write("\"discovery-type\":\"" + discoveryType + "\",\n");
+        fw.write("\"discovery-address\":\"" + discoveryAddress + "\",\n");
+        fw.write("\"discovery-user\":\"" + discoveryUser + "\",\n");
+        fw.write("\"provider-config-ref\":\"" + providerConfig + "\",\n");
+        fw.write("\"cluster\":\"" + clusterName + "\",\n");
+        fw.write("\"services\":[\n");
+
+        int i = 0;
+        for (String name : services.keySet()) {
+            fw.write("{\"name\":\"" + name + "\"");
+            List<String> urls = services.get(name);
+            if (urls != null) {
+                fw.write(", \"urls\":[");
+                Iterator<String> urlIter = urls.iterator();
+                while (urlIter.hasNext()) {
+                    fw.write("\"" + urlIter.next() + "\"");
+                    if (urlIter.hasNext()) {
+                        fw.write(", ");
+                    }
+                }
+                fw.write("]");
+            }
+            fw.write("}");
+            if (i++ < services.size() - 1) {
+                fw.write(",");
+            }
+            fw.write("\n");
+        }
+        fw.write("]\n");
+        fw.write("}\n");
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+    private File writeYAML(String path,
+                           String discoveryType,
+                           String discoveryAddress,
+                           String discoveryUser,
+                           String providerConfig,
+                           String clusterName,
+                           Map<String, List<String>> services) throws Exception {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write("---" + "\n");
+        fw.write("discovery-type: " + discoveryType + "\n");
+        fw.write("discovery-address: " + discoveryAddress + "\n");
+        fw.write("discovery-user: " + discoveryUser + "\n");
+        fw.write("provider-config-ref: " + providerConfig + "\n");
+        fw.write("cluster: " + clusterName+ "\n");
+        fw.write("services:\n");
+        for (String name : services.keySet()) {
+            fw.write("    - name: " + name + "\n");
+            List<String> urls = services.get(name);
+            if (urls != null) {
+                fw.write("      urls:\n");
+                for (String url : urls) {
+                    fw.write("          - " + url + "\n");
+                }
+            }
+        }
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
new file mode 100644
index 0000000..90c7146
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
@@ -0,0 +1,239 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import org.apache.hadoop.gateway.topology.validation.TopologyValidator;
+import org.apache.hadoop.gateway.util.XmlUtils;
+import org.easymock.EasyMock;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.xml.sax.SAXException;
+
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathFactory;
+import java.io.*;
+import java.util.*;
+
+import static org.junit.Assert.*;
+
+
+public class SimpleDescriptorHandlerTest {
+
+    private static final String TEST_PROVIDER_CONFIG =
+            "    <gateway>\n" +
+                    "        <provider>\n" +
+                    "            <role>authentication</role>\n" +
+                    "            <name>ShiroProvider</name>\n" +
+                    "            <enabled>true</enabled>\n" +
+                    "            <param>\n" +
+                    "                <!-- \n" +
+                    "                session timeout in minutes,  this is really idle timeout,\n" +
+                    "                defaults to 30mins, if the property value is not defined,, \n" +
+                    "                current client authentication would expire if client idles contiuosly for more than this value\n" +
+                    "                -->\n" +
+                    "                <name>sessionTimeout</name>\n" +
+                    "                <value>30</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm</name>\n" +
+                    "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapContextFactory</name>\n" +
+                    "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.contextFactory</name>\n" +
+                    "                <value>$ldapContextFactory</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.userDnTemplate</name>\n" +
+                    "                <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.contextFactory.url</name>\n" +
+                    "                <value>ldap://localhost:33389</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
+                    "                <value>simple</value>\n" +
+                    "            </param>\n" +
+                    "            <param>\n" +
+                    "                <name>urls./**</name>\n" +
+                    "                <value>authcBasic</value>\n" +
+                    "            </param>\n" +
+                    "        </provider>\n" +
+                    "\n" +
+                    "        <provider>\n" +
+                    "            <role>identity-assertion</role>\n" +
+                    "            <name>Default</name>\n" +
+                    "            <enabled>true</enabled>\n" +
+                    "        </provider>\n" +
+                    "\n" +
+                    "        <!--\n" +
+                    "        Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.\n" +
+                    "        For example, a hadoop service running in AWS may return a response that includes URLs containing the\n" +
+                    "        some AWS internal host name.  If the client needs to make a subsequent request to the host identified\n" +
+                    "        in those URLs they need to be mapped to external host names that the client Knox can use to connect.\n" +
+                    "\n" +
+                    "        If the external hostname and internal host names are same turn of this provider by setting the value of\n" +
+                    "        enabled parameter as false.\n" +
+                    "\n" +
+                    "        The name parameter specifies the external host names in a comma separated list.\n" +
+                    "        The value parameter specifies corresponding internal host names in a comma separated list.\n" +
+                    "\n" +
+                    "        Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out\n" +
+                    "        of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the\n" +
+                    "        Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.\n" +
+                    "        -->\n" +
+                    "        <provider>\n" +
+                    "            <role>hostmap</role>\n" +
+                    "            <name>static</name>\n" +
+                    "            <enabled>true</enabled>\n" +
+                    "            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
+                    "        </provider>\n" +
+                    "    </gateway>\n";
+
+
+    /**
+     * KNOX-1006
+     *
+     * N.B. This test depends on the DummyServiceDiscovery extension being configured:
+     *             org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
+     */
+    @Test
+    public void testSimpleDescriptorHandler() throws Exception {
+
+        final String type = "DUMMY";
+        final String address = "http://c6401.ambari.apache.org:8080";
+        final String clusterName = "dummy";
+        final Map<String, List<String>> serviceURLs = new HashMap<>();
+        serviceURLs.put("NAMENODE", null);
+        serviceURLs.put("JOBTRACKER", null);
+        serviceURLs.put("WEBHDFS", null);
+        serviceURLs.put("WEBHCAT", null);
+        serviceURLs.put("OOZIE", null);
+        serviceURLs.put("WEBHBASE", null);
+        serviceURLs.put("HIVE", null);
+        serviceURLs.put("RESOURCEMANAGER", null);
+        serviceURLs.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
+
+        // Write the externalized provider config to a temp file
+        File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", TEST_PROVIDER_CONFIG);
+
+        File topologyFile = null;
+        try {
+            File destDir = (new File(".")).getCanonicalFile();
+
+            // Mock out the simple descriptor
+            SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
+            EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(address).anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(type).anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
+            EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
+            EasyMock.expect(testDescriptor.getClusterName()).andReturn(clusterName).anyTimes();
+            List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
+            for (String serviceName : serviceURLs.keySet()) {
+                SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
+                EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
+                EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
+                EasyMock.replay(svc);
+                serviceMocks.add(svc);
+            }
+            EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
+            EasyMock.replay(testDescriptor);
+
+            // Invoke the simple descriptor handler
+            Map<String, File> files =
+                           SimpleDescriptorHandler.handle(testDescriptor,
+                                                          providerConfig.getParentFile(), // simple desc co-located with provider config
+                                                          destDir);
+            topologyFile = files.get("topology");
+
+            // Validate the resulting topology descriptor
+            assertTrue(topologyFile.exists());
+
+            // Validate the topology descriptor's correctness
+            TopologyValidator validator = new TopologyValidator( topologyFile.getAbsolutePath() );
+            if( !validator.validateTopology() ){
+                throw new SAXException( validator.getErrorString() );
+            }
+
+            XPathFactory xPathfactory = XPathFactory.newInstance();
+            XPath xpath = xPathfactory.newXPath();
+
+            // Parse the topology descriptor
+            Document topologyXml = XmlUtils.readXml(topologyFile);
+
+            // Validate the provider configuration
+            Document extProviderConf = XmlUtils.readXml(new ByteArrayInputStream(TEST_PROVIDER_CONFIG.getBytes()));
+            Node gatewayNode = (Node) xpath.compile("/topology/gateway").evaluate(topologyXml, XPathConstants.NODE);
+            assertTrue("Resulting provider config should be identical to the referenced content.",
+                       extProviderConf.getDocumentElement().isEqualNode(gatewayNode));
+
+            // Validate the service declarations
+            Map<String, List<String>> topologyServiceURLs = new HashMap<>();
+            NodeList serviceNodes =
+                        (NodeList) xpath.compile("/topology/service").evaluate(topologyXml, XPathConstants.NODESET);
+            for (int serviceNodeIndex=0; serviceNodeIndex < serviceNodes.getLength(); serviceNodeIndex++) {
+                Node serviceNode = serviceNodes.item(serviceNodeIndex);
+                Node roleNode = (Node) xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE);
+                assertNotNull(roleNode);
+                String role = roleNode.getNodeValue();
+                NodeList urlNodes = (NodeList) xpath.compile("url/text()").evaluate(serviceNode, XPathConstants.NODESET);
+                for(int urlNodeIndex = 0 ; urlNodeIndex < urlNodes.getLength(); urlNodeIndex++) {
+                    Node urlNode = urlNodes.item(urlNodeIndex);
+                    assertNotNull(urlNode);
+                    String url = urlNode.getNodeValue();
+                    assertNotNull("Every declared service should have a URL.", url);
+                    if (!topologyServiceURLs.containsKey(role)) {
+                        topologyServiceURLs.put(role, new ArrayList<String>());
+                    }
+                    topologyServiceURLs.get(role).add(url);
+                }
+            }
+            assertEquals("Unexpected number of service declarations.", serviceURLs.size(), topologyServiceURLs.size());
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            fail(e.getMessage());
+        } finally {
+            providerConfig.delete();
+            if (topologyFile != null) {
+                topologyFile.delete();
+            }
+        }
+    }
+
+
+    private File writeProviderConfig(String path, String content) throws IOException {
+        File f = new File(path);
+
+        Writer fw = new FileWriter(f);
+        fw.write(content);
+        fw.flush();
+        fw.close();
+
+        return f;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
new file mode 100644
index 0000000..82a6f86
--- /dev/null
+++ b/gateway-server/src/test/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
@@ -0,0 +1,21 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.topology.discovery.test.extension.SneakyServiceDiscoveryType
+org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscoveryType
+org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscoveryType
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml
new file mode 100644
index 0000000..8223bea
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml
@@ -0,0 +1,74 @@
+<gateway>
+    <provider>
+        <role>authentication</role>
+        <name>ShiroProvider</name>
+        <enabled>true</enabled>
+        <param>
+            <!--
+            session timeout in minutes,  this is really idle timeout,
+            defaults to 30mins, if the property value is not defined,,
+            current client authentication would expire if client idles contiuosly for more than this value
+            -->
+            <name>sessionTimeout</name>
+            <value>30</value>
+        </param>
+        <param>
+            <name>main.ldapRealm</name>
+            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
+        </param>
+        <param>
+            <name>main.ldapContextFactory</name>
+            <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.contextFactory</name>
+            <value>$ldapContextFactory</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.userDnTemplate</name>
+            <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.contextFactory.url</name>
+            <value>ldap://localhost:33389</value>
+        </param>
+        <param>
+            <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
+            <value>simple</value>
+        </param>
+        <param>
+            <name>urls./**</name>
+            <value>authcBasic</value>
+        </param>
+    </provider>
+
+    <provider>
+        <role>identity-assertion</role>
+        <name>Default</name>
+        <enabled>true</enabled>
+    </provider>
+
+    <!--
+    Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.
+    For example, a hadoop service running in AWS may return a response that includes URLs containing the
+    some AWS internal host name.  If the client needs to make a subsequent request to the host identified
+    in those URLs they need to be mapped to external host names that the client Knox can use to connect.
+
+    If the external hostname and internal host names are same turn of this provider by setting the value of
+    enabled parameter as false.
+
+    The name parameter specifies the external host names in a comma separated list.
+    The value parameter specifies corresponding internal host names in a comma separated list.
+
+    Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out
+    of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the
+    Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.
+    -->
+    <provider>
+        <role>hostmap</role>
+        <name>static</name>
+        <enabled>true</enabled>
+        <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
+    </provider>
+
+</gateway>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json
new file mode 100644
index 0000000..45407a7
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-topology-four.json
@@ -0,0 +1,18 @@
+{
+  "discovery-type":"DUMMY",
+  "discovery-address":"http://c6401.ambari.apache.org:8080",
+  "provider-config-ref":"../shared-providers/ambari-cluster-policy.xml",
+  "cluster":"dummy",
+  "services":[
+    {"name":"NAMENODE"},
+    {"name":"JOBTRACKER"},
+    {"name":"WEBHDFS"},
+    {"name":"WEBHCAT"},
+    {"name":"OOZIE"},
+    {"name":"WEBHBASE"},
+    {"name":"HIVE"},
+    {"name":"RESOURCEMANAGER"},
+    {"name":"AMBARI", "urls":["http://c6401.ambari.apache.org:8080"]},
+    {"name":"AMBARIUI", "urls":["http://c6401.ambari.apache.org:8080"]}
+  ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java
new file mode 100644
index 0000000..554ddbe
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/GatewayService.java
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import java.lang.annotation.*;
+
+/**
+ * This annotation can be used to inject gateway services into a ServiceDiscovery implementation.
+ */
+@Documented
+@Target(ElementType.FIELD)
+@Retention(RetentionPolicy.RUNTIME)
+public @interface GatewayService {
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java
new file mode 100644
index 0000000..eefa30b
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscovery.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import java.util.List;
+import java.util.Map;
+
+
+/**
+ * Implementations provide the means by which Hadoop service endpoint URLs are discovered from a source with knowledge
+ * about the service topology of one or more clusters.
+ */
+public interface ServiceDiscovery {
+
+    /**
+     * This is the type specified in a simple descriptor to indicate which ServiceDiscovery implementation to employ.
+     *
+     * @return The identifier for the service discovery type.
+     */
+    String getType();
+
+
+    /**
+     * Discover details of all the clusters known to the target registry.
+     *
+     * @param config The configuration for the discovery invocation
+     *
+     * @return A Map of the discovered service data, keyed by the cluster name.
+     */
+    Map<String, Cluster> discover(ServiceDiscoveryConfig config);
+
+
+    /**
+     * Discover details for a single cluster.
+     *
+     * @param config The configuration for the discovery invocation
+     * @param clusterName The name of a particular cluster
+     *
+     * @return The discovered service data for the specified cluster
+     */
+    Cluster discover(ServiceDiscoveryConfig config, String clusterName);
+
+
+    /**
+     * A handle to the service discovery result.
+     */
+    interface Cluster {
+
+        /**
+         * @return The name of the cluster
+         */
+        String getName();
+
+        /**
+         * @param serviceName The name of the service
+         * @return The URLs for the specified service in this cluster.
+         */
+        List<String> getServiceURLs(String serviceName);
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java
new file mode 100644
index 0000000..6b2e741
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryConfig.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+/**
+ * ServiceDiscovery configuration details.
+ */
+public interface ServiceDiscoveryConfig {
+
+    /**
+     *
+     * @return The address of the discovery source.
+     */
+    String getAddress();
+
+    /**
+     *
+     * @return The username configured for interactions with the discovery source.
+     */
+    String getUser();
+
+    /**
+     *
+     * @return The alias for the password required for interactions with the discovery source.
+     */
+    String getPasswordAlias();
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java
new file mode 100644
index 0000000..cddced1
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryType.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+/**
+ * ServiceDiscovery extensions must implement this interface to add support for a new discovery source.
+ *
+ * The ServiceLoader mechanism is used to include ServiceDiscovery extensions, and implementations of this interface
+ * are the providers.
+ */
+public interface ServiceDiscoveryType {
+
+    /**
+     *
+     * @return The identifier for the discovery type.
+     */
+    String getType();
+
+
+    /**
+     *
+     * @return A new instance of the ServiceDiscovery implementation provided by this type.
+     */
+    ServiceDiscovery newInstance();
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index d2f4176..2708f6b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -45,6 +45,7 @@
         <module>gateway-i18n-logging-log4j</module>
         <module>gateway-i18n-logging-sl4j</module>
         <module>gateway-spi</module>
+        <module>gateway-discovery-ambari</module>
         <module>gateway-server</module>
         <module>gateway-server-launcher</module>
         <module>gateway-server-xforwarded-filter</module>
@@ -684,6 +685,11 @@
             </dependency>
             <dependency>
                 <groupId>${gateway-group}</groupId>
+                <artifactId>gateway-discovery-ambari</artifactId>
+                <version>${gateway-version}</version>
+            </dependency>
+            <dependency>
+                <groupId>${gateway-group}</groupId>
                 <artifactId>gateway-release</artifactId>
                 <version>${gateway-version}</version>
             </dependency>
@@ -702,17 +708,16 @@
                 <artifactId>gateway-shell-samples</artifactId>
                 <version>${gateway-version}</version>
             </dependency>
-
-        <dependency>
-            <groupId>org.picketlink</groupId>
-            <artifactId>picketlink-federation</artifactId>
-            <version>2.7.0.CR3</version>
-        </dependency>
-        <dependency>
-            <groupId>org.jboss.logging</groupId>
-            <artifactId>jboss-logging</artifactId>
-            <version>3.2.0.Final</version>
-        </dependency>
+            <dependency>
+                <groupId>org.picketlink</groupId>
+                <artifactId>picketlink-federation</artifactId>
+                <version>2.7.0.CR3</version>
+            </dependency>
+            <dependency>
+                <groupId>org.jboss.logging</groupId>
+                <artifactId>jboss-logging</artifactId>
+                <version>3.2.0.Final</version>
+            </dependency>
             <dependency>
                 <groupId>org.glassfish.jersey.containers</groupId>
                 <artifactId>jersey-container-servlet</artifactId>


[37/37] knox git commit: Merge branch 'master' into KNOX-1049

Posted by lm...@apache.org.
Merge branch 'master' into KNOX-1049


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/1ee93707
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/1ee93707
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/1ee93707

Branch: refs/heads/KNOX-1049
Commit: 1ee937071b236909a38dc764db451e65a3225ad6
Parents: f549041 986615f
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Oct 26 10:21:54 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Oct 26 10:21:54 2017 -0400

----------------------------------------------------------------------
 gateway-discovery-ambari/pom.xml                |  66 ++
 .../discovery/ambari/AmbariCluster.java         | 115 +++
 .../discovery/ambari/AmbariComponent.java       |  85 ++
 .../ambari/AmbariDynamicServiceURLCreator.java  | 151 ++++
 .../ambari/AmbariServiceDiscovery.java          | 305 +++++++
 .../ambari/AmbariServiceDiscoveryMessages.java  | 121 +++
 .../ambari/AmbariServiceDiscoveryType.java      |  35 +
 .../ambari/ConditionalValueHandler.java         |  24 +
 .../discovery/ambari/PropertyEqualsHandler.java |  76 ++
 .../ambari/ServiceURLPropertyConfig.java        | 324 +++++++
 .../discovery/ambari/SimpleValueHandler.java    |  32 +
 ...eway.topology.discovery.ServiceDiscoveryType |  19 +
 ...iscovery-component-config-mapping.properties |  36 +
 .../ambari-service-discovery-url-mappings.xml   | 398 +++++++++
 .../AmbariDynamicServiceURLCreatorTest.java     | 876 +++++++++++++++++++
 .../ambari/AmbariServiceDiscoveryTest.java      | 858 ++++++++++++++++++
 .../ha/provider/impl/DefaultURLManager.java     |   9 +-
 ...entityAsserterHttpServletRequestWrapper.java |  25 +-
 .../provider/federation/jwt/JWTMessages.java    |   3 +
 .../jwt/filter/AbstractJWTFilter.java           |  59 +-
 .../filter/JWTAccessTokenAssertionFilter.java   |  23 +-
 .../jwt/filter/JWTAuthCodeAssertionFilter.java  |  16 +-
 .../jwt/filter/JWTFederationFilter.java         |   5 +-
 .../jwt/filter/SSOCookieFederationFilter.java   |   5 +-
 .../federation/AbstractJWTFilterTest.java       | 287 +++++-
 .../federation/SSOCookieProviderTest.java       |   5 +-
 gateway-provider-security-picketlink/pom.xml    |  76 --
 .../gateway/picketlink/PicketlinkMessages.java  |  40 -
 .../picketlink/deploy/PicketlinkConf.java       | 194 ----
 ...PicketlinkFederationProviderContributor.java | 132 ---
 .../filter/CaptureOriginalURLFilter.java        |  89 --
 .../filter/PicketlinkIdentityAdapter.java       | 102 ---
 ...gateway.deploy.ProviderDeploymentContributor |  19 -
 .../gateway/picketlink/PicketlinkTest.java      |  30 -
 gateway-release/home/conf/descriptors/README    |   1 +
 .../home/conf/shared-providers/README           |   1 +
 gateway-release/pom.xml                         |   8 +-
 gateway-server/pom.xml                          |   5 +
 .../apache/hadoop/gateway/GatewayMessages.java  |   9 +-
 .../gateway/config/impl/GatewayConfigImpl.java  |  10 +
 .../services/DefaultGatewayServices.java        |   3 +-
 .../impl/DefaultServiceRegistryService.java     |  50 +-
 .../security/impl/DefaultAliasService.java      |  12 +-
 .../services/security/impl/JettySSLService.java |  11 +-
 .../impl/DefaultTokenAuthorityService.java      |  43 +-
 .../topology/impl/DefaultTopologyService.java   | 294 ++++++-
 .../builder/BeanPropertyTopologyBuilder.java    |   2 +-
 .../DefaultServiceDiscoveryConfig.java          |  48 +
 .../discovery/ServiceDiscoveryFactory.java      |  81 ++
 .../topology/simple/SimpleDescriptor.java       |  48 +
 .../simple/SimpleDescriptorFactory.java         |  71 ++
 .../simple/SimpleDescriptorHandler.java         | 267 ++++++
 .../topology/simple/SimpleDescriptorImpl.java   | 123 +++
 .../simple/SimpleDescriptorMessages.java        |  50 ++
 .../websockets/GatewayWebsocketHandler.java     |  41 +-
 .../gateway/websockets/ProxyInboundClient.java  | 107 +++
 .../websockets/ProxyWebSocketAdapter.java       |  20 +-
 .../impl/DefaultTokenAuthorityServiceTest.java  | 253 ++++++
 .../topology/DefaultTopologyServiceTest.java    |  70 +-
 .../PropertiesFileServiceDiscoveryTest.java     |  90 ++
 .../discovery/ServiceDiscoveryFactoryTest.java  |  81 ++
 .../test/extension/DummyServiceDiscovery.java   |  66 ++
 .../extension/DummyServiceDiscoveryType.java    |  32 +
 .../PropertiesFileServiceDiscovery.java         | 108 +++
 .../PropertiesFileServiceDiscoveryType.java     |  35 +
 .../extension/SneakyServiceDiscoveryImpl.java   |  40 +
 .../extension/SneakyServiceDiscoveryType.java   |  33 +
 .../simple/SimpleDescriptorFactoryTest.java     | 422 +++++++++
 .../simple/SimpleDescriptorHandlerTest.java     | 447 ++++++++++
 .../websockets/ProxyInboundClientTest.java      | 374 ++++++++
 ...eway.topology.discovery.ServiceDiscoveryType |  21 +
 .../resources/keystores/server-keystore.jks     | Bin 0 -> 1387 bytes
 .../topology/file/ambari-cluster-policy.xml     |  74 ++
 .../topology/file/simple-topology-four.json     |  18 +
 .../services/ambariui/2.2.0/service.xml         |   5 +
 .../resources/services/atlas/0.8.0/rewrite.xml  |   6 +-
 gateway-service-knoxsso/pom.xml                 |  11 +-
 .../gateway/service/knoxsso/WebSSOResource.java |  29 +-
 .../service/knoxsso/WebSSOResourceTest.java     | 411 ++++++++-
 .../service/knoxtoken/TokenResource.java        |  50 +-
 .../knoxtoken/TokenServiceResourceTest.java     | 288 +++++-
 gateway-shell-release/pom.xml                   |   4 +
 .../apache/hadoop/gateway/shell/job/Sqoop.java  |   2 +-
 .../hadoop/gateway/config/GatewayConfig.java    |   2 +
 .../dispatch/AbstractGatewayDispatch.java       |   8 +
 .../gateway/dispatch/DefaultDispatch.java       |   9 +
 .../hadoop/gateway/dispatch/Dispatch.java       |   6 +
 .../gateway/dispatch/GatewayDispatchFilter.java |   8 +
 .../security/token/JWTokenAuthority.java        |  19 +-
 .../services/security/token/impl/JWT.java       |  42 +-
 .../services/security/token/impl/JWTToken.java  |  59 +-
 .../topology/discovery/GatewayService.java      |  29 +
 .../topology/discovery/ServiceDiscovery.java    |  76 ++
 .../discovery/ServiceDiscoveryConfig.java       |  42 +
 .../discovery/ServiceDiscoveryType.java         |  40 +
 .../security/token/impl/JWTTokenTest.java       |  67 +-
 .../hadoop/gateway/GatewayTestConfig.java       |   5 +
 .../apache/hadoop/gateway/util/HttpUtils.java   |   7 +-
 .../hadoop/gateway/util/urltemplate/Parser.java |  10 +-
 .../gateway/util/urltemplate/ParserTest.java    |  17 +
 pom.xml                                         |  51 +-
 101 files changed, 8396 insertions(+), 1016 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/1ee93707/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
index b33e52c,a30cf13..65278a1
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
@@@ -86,9 -76,8 +86,9 @@@ public class BeanPropertyTopologyBuilde
      public Topology build() {
          Topology topology = new Topology();
          topology.setName(name);
 +        topology.setDefaultServicePath(defaultService);
  
-           for (Provider provider : providers) {
+         for (Provider provider : providers) {
              topology.addProvider(provider);
          }
  


[23/37] knox git commit: KNOX-1070 - Drop support for Java 7 (Rick Kellogg via Sandeep More)

Posted by lm...@apache.org.
KNOX-1070 - Drop support for Java 7 (Rick Kellogg via Sandeep More)


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/d762ed3f
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/d762ed3f
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/d762ed3f

Branch: refs/heads/KNOX-1049
Commit: d762ed3fd9193ad6e837fec1747d18f334872951
Parents: 485769b
Author: Sandeep More <mo...@apache.org>
Authored: Thu Oct 5 09:33:33 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Thu Oct 5 09:33:33 2017 -0400

----------------------------------------------------------------------
 pom.xml | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/d762ed3f/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index fb00d71..e314415 100644
--- a/pom.xml
+++ b/pom.xml
@@ -291,8 +291,8 @@
                 <version>3.3</version>
                 <configuration>
                     <compilerId>javac</compilerId>
-                    <source>1.7</source>
-                    <target>1.7</target>
+                    <source>1.8</source>
+                    <target>1.8</target>
                     <debug>true</debug>
                 </configuration>
                 <dependencies>
@@ -351,9 +351,9 @@
                         <requireMavenVersion>
                           <version>[3.0.2,)</version>
                         </requireMavenVersion>
-                        <!--<requireJavaVersion>
+                        <requireJavaVersion>
                           <version>[1.8,)</version>
-                        </requireJavaVersion> -->
+                        </requireJavaVersion>
                       </rules>
                     </configuration>
                   </execution>


[33/37] knox git commit: KNOX-1077 - Knox should notice simple descriptors on startup (Phil Zampino via lmccay)

Posted by lm...@apache.org.
KNOX-1077 - Knox should notice simple descriptors on startup (Phil Zampino via lmccay)

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/8ecac921
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/8ecac921
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/8ecac921

Branch: refs/heads/KNOX-1049
Commit: 8ecac9216e6512afe84e0325a6e6b27d87689fde
Parents: 2288231
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Oct 19 15:11:45 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Oct 19 15:12:04 2017 -0400

----------------------------------------------------------------------
 .../topology/impl/DefaultTopologyService.java        | 15 +++++++++++++++
 1 file changed, 15 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/8ecac921/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
index a493bc4..a8d6de8 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
@@ -496,6 +496,17 @@ public class DefaultTopologyService
       SharedProviderConfigMonitor spm = new SharedProviderConfigMonitor(dm, descriptorsDirectory);
       initListener(sharedProvidersDirectory, spm, spm);
 
+      // For all the descriptors currently in the descriptors dir at start-up time, trigger topology generation.
+      // This happens prior to the start-up loading of the topologies.
+      String[] descriptorFilenames =  descriptorsDirectory.list();
+      if (descriptorFilenames != null) {
+          for (String descriptorFilename : descriptorFilenames) {
+              if (DescriptorsMonitor.isDescriptorFile(descriptorFilename)) {
+                  dm.onFileChange(new File(descriptorsDirectory, descriptorFilename));
+              }
+          }
+      }
+
     } catch (IOException | SAXException io) {
       throw new ServiceLifecycleException(io.getMessage());
     }
@@ -521,6 +532,10 @@ public class DefaultTopologyService
     private Map<String, List<String>> providerConfigReferences = new HashMap<>();
 
 
+    static boolean isDescriptorFile(String filename) {
+      return SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(filename));
+    }
+
     public DescriptorsMonitor(File topologiesDir, AliasService aliasService) {
       this.topologiesDir  = topologiesDir;
       this.aliasService   = aliasService;


[25/37] knox git commit: KNOX-1064 - Externalize Hadoop Service Configuration Details and Service URL Creation (Phil Zampino via Sandeep More)

Posted by lm...@apache.org.
KNOX-1064 - Externalize Hadoop Service Configuration Details and Service URL Creation (Phil Zampino via Sandeep More)


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/7b401def
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/7b401def
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/7b401def

Branch: refs/heads/KNOX-1049
Commit: 7b401def625630cbf1f9ee5f8993bbcb3269c222
Parents: d762ed3
Author: Sandeep More <mo...@apache.org>
Authored: Thu Oct 5 10:19:07 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Thu Oct 5 10:19:07 2017 -0400

----------------------------------------------------------------------
 .../discovery/ambari/AmbariCluster.java         |   7 +-
 .../discovery/ambari/AmbariComponent.java       |  27 +-
 .../ambari/AmbariDynamicServiceURLCreator.java  | 151 ++++
 .../ambari/AmbariServiceDiscovery.java          |  58 +-
 .../ambari/AmbariServiceDiscoveryMessages.java  |  64 +-
 .../ambari/AmbariServiceURLCreator.java         | 184 ----
 .../ambari/ConditionalValueHandler.java         |  24 +
 .../discovery/ambari/PropertyEqualsHandler.java |  76 ++
 .../ambari/ServiceURLPropertyConfig.java        | 324 +++++++
 .../discovery/ambari/SimpleValueHandler.java    |  32 +
 ...iscovery-component-config-mapping.properties |  36 +
 .../ambari-service-discovery-url-mappings.xml   | 398 +++++++++
 .../AmbariDynamicServiceURLCreatorTest.java     | 876 +++++++++++++++++++
 .../ambari/AmbariServiceDiscoveryTest.java      |   4 +-
 .../simple/SimpleDescriptorHandler.java         |  68 +-
 .../simple/SimpleDescriptorMessages.java        |   8 +-
 .../simple/SimpleDescriptorHandlerTest.java     | 327 +++++--
 17 files changed, 2335 insertions(+), 329 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
index 6eaabd3..eb84433 100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
@@ -27,7 +27,7 @@ class AmbariCluster implements ServiceDiscovery.Cluster {
 
     private String name = null;
 
-    private AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+    private AmbariDynamicServiceURLCreator urlCreator;
 
     private Map<String, Map<String, ServiceConfiguration>> serviceConfigurations = new HashMap<>();
 
@@ -36,7 +36,8 @@ class AmbariCluster implements ServiceDiscovery.Cluster {
 
     AmbariCluster(String name) {
         this.name = name;
-        components = new HashMap<String, AmbariComponent>();
+        components = new HashMap<>();
+        urlCreator = new AmbariDynamicServiceURLCreator(this);
     }
 
     void addServiceConfiguration(String serviceName, String configurationType, ServiceConfiguration serviceConfig) {
@@ -81,7 +82,7 @@ class AmbariCluster implements ServiceDiscovery.Cluster {
     @Override
     public List<String> getServiceURLs(String serviceName) {
         List<String> urls = new ArrayList<>();
-        urls.addAll(urlCreator.create(this, serviceName));
+        urls.addAll(urlCreator.create(serviceName));
         return urls;
     }
 

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
index 55257fb..d9d5b03 100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.gateway.topology.discovery.ambari;
 
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
@@ -27,7 +28,7 @@ class AmbariComponent {
     private String name        = null;
     private String version     = null;
 
-    private List<String> hostNames = null;
+    private List<String> hostNames = new ArrayList<>();
 
     private Map<String, String> properties = null;
 
@@ -41,35 +42,43 @@ class AmbariComponent {
         this.serviceName = service;
         this.clusterName = cluster;
         this.version = version;
-        this.hostNames = hostNames;
         this.properties = properties;
+
+        if (hostNames != null) {
+            // Add the hostnames individually to prevent adding any null values
+            for (String hostName : hostNames) {
+                if (hostName != null) {
+                    this.hostNames.add(hostName);
+                }
+            }
+        }
     }
 
-    public String getVersion() {
+    String getVersion() {
         return version;
     }
 
-    public String getName() {
+    String getName() {
         return name;
     }
 
-    public String getServiceName() {
+    String getServiceName() {
         return serviceName;
     }
 
-    public String getClusterName() {
+    String getClusterName() {
         return clusterName;
     }
 
-    public List<String> getHostNames() {
+    List<String> getHostNames() {
         return hostNames;
     }
 
-    public Map<String, String> getConfigProperties() {
+    Map<String, String> getConfigProperties() {
         return properties;
     }
 
-    public String getConfigProperty(String propertyName) {
+    String getConfigProperty(String propertyName) {
         return properties.get(propertyName);
     }
 

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
new file mode 100644
index 0000000..ed5d3e7
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+
+class AmbariDynamicServiceURLCreator {
+
+    static final String MAPPING_CONFIG_OVERRIDE_PROPERTY = "org.apache.gateway.topology.discovery.ambari.config";
+
+    private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+    private AmbariCluster cluster = null;
+    private ServiceURLPropertyConfig config;
+
+    AmbariDynamicServiceURLCreator(AmbariCluster cluster) {
+        this.cluster = cluster;
+
+        String mappingConfiguration = System.getProperty(MAPPING_CONFIG_OVERRIDE_PROPERTY);
+        if (mappingConfiguration != null) {
+            File mappingConfigFile = new File(mappingConfiguration);
+            if (mappingConfigFile.exists()) {
+                try {
+                    config = new ServiceURLPropertyConfig(mappingConfigFile);
+                    log.loadedComponentConfigMappings(mappingConfigFile.getAbsolutePath());
+                } catch (Exception e) {
+                    log.failedToLoadComponentConfigMappings(mappingConfigFile.getAbsolutePath(), e);
+                }
+            }
+        }
+
+        // If there is no valid override configured, fall-back to the internal mapping configuration
+        if (config == null) {
+            config = new ServiceURLPropertyConfig();
+        }
+    }
+
+    AmbariDynamicServiceURLCreator(AmbariCluster cluster, File mappingConfiguration) throws IOException {
+        this.cluster = cluster;
+        config = new ServiceURLPropertyConfig(new FileInputStream(mappingConfiguration));
+    }
+
+    AmbariDynamicServiceURLCreator(AmbariCluster cluster, String mappings) {
+        this.cluster = cluster;
+        config = new ServiceURLPropertyConfig(new ByteArrayInputStream(mappings.getBytes()));
+    }
+
+    List<String> create(String serviceName) {
+        List<String> urls = new ArrayList<>();
+
+        Map<String, String> placeholderValues = new HashMap<>();
+        List<String> componentHostnames = new ArrayList<>();
+        String hostNamePlaceholder = null;
+
+        ServiceURLPropertyConfig.URLPattern pattern = config.getURLPattern(serviceName);
+        if (pattern != null) {
+            for (String propertyName : pattern.getPlaceholders()) {
+                ServiceURLPropertyConfig.Property configProperty = config.getConfigProperty(serviceName, propertyName);
+
+                String propertyValue = null;
+                String propertyType = configProperty.getType();
+                if (ServiceURLPropertyConfig.Property.TYPE_SERVICE.equals(propertyType)) {
+                    log.lookingUpServiceConfigProperty(configProperty.getService(), configProperty.getServiceConfig(), configProperty.getValue());
+                    AmbariCluster.ServiceConfiguration svcConfig =
+                        cluster.getServiceConfiguration(configProperty.getService(), configProperty.getServiceConfig());
+                    if (svcConfig != null) {
+                        propertyValue = svcConfig.getProperties().get(configProperty.getValue());
+                    }
+                } else if (ServiceURLPropertyConfig.Property.TYPE_COMPONENT.equals(propertyType)) {
+                    String compName = configProperty.getComponent();
+                    if (compName != null) {
+                        AmbariComponent component = cluster.getComponent(compName);
+                        if (component != null) {
+                            if (ServiceURLPropertyConfig.Property.PROP_COMP_HOSTNAME.equals(configProperty.getValue())) {
+                                log.lookingUpComponentHosts(compName);
+                                componentHostnames.addAll(component.getHostNames());
+                                hostNamePlaceholder = propertyName; // Remember the host name placeholder
+                            } else {
+                                log.lookingUpComponentConfigProperty(compName, configProperty.getValue());
+                                propertyValue = component.getConfigProperty(configProperty.getValue());
+                            }
+                        }
+                    }
+                } else { // Derived property
+                    log.handlingDerivedProperty(serviceName, configProperty.getType(), configProperty.getName());
+                    ServiceURLPropertyConfig.Property p = config.getConfigProperty(serviceName, configProperty.getName());
+                    propertyValue = p.getValue();
+                    if (propertyValue == null) {
+                        if (p.getConditionHandler() != null) {
+                            propertyValue = p.getConditionHandler().evaluate(config, cluster);
+                        }
+                    }
+                }
+
+                log.determinedPropertyValue(configProperty.getName(), propertyValue);
+                placeholderValues.put(configProperty.getName(), propertyValue);
+            }
+
+            // For patterns with a placeholder value for the hostname (e.g., multiple URL scenarios)
+            if (!componentHostnames.isEmpty()) {
+                for (String componentHostname : componentHostnames) {
+                    String url = pattern.get().replace("{" + hostNamePlaceholder + "}", componentHostname);
+                    urls.add(createURL(url, placeholderValues));
+                }
+            } else { // Single URL result case
+                urls.add(createURL(pattern.get(), placeholderValues));
+            }
+        }
+
+        return urls;
+    }
+
+    private String createURL(String pattern, Map<String, String> placeholderValues) {
+        String url = null;
+        if (pattern != null) {
+            url = pattern;
+            for (String placeHolder : placeholderValues.keySet()) {
+                String value = placeholderValues.get(placeHolder);
+                if (value != null) {
+                    url = url.replace("{" + placeHolder + "}", value);
+                }
+            }
+        }
+        return url;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
index 34f20a7..37f68ae 100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
@@ -16,6 +16,13 @@
  */
 package org.apache.hadoop.gateway.topology.discovery.ambari;
 
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
 import net.minidev.json.JSONArray;
 import net.minidev.json.JSONObject;
 import net.minidev.json.JSONValue;
@@ -34,9 +41,6 @@ import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.message.BasicHeader;
 import org.apache.http.util.EntityUtils;
 
-import java.io.IOException;
-import java.util.*;
-
 
 class AmbariServiceDiscovery implements ServiceDiscovery {
 
@@ -50,31 +54,33 @@ class AmbariServiceDiscovery implements ServiceDiscovery {
     static final String AMBARI_SERVICECONFIGS_URI =
             AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
 
+    private static final String COMPONENT_CONFIG_MAPPING_FILE =
+                                                        "ambari-service-discovery-component-config-mapping.properties";
+
+    private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
     // Map of component names to service configuration types
     private static Map<String, String> componentServiceConfigs = new HashMap<>();
     static {
-        componentServiceConfigs.put("NAMENODE", "hdfs-site");
-        componentServiceConfigs.put("RESOURCEMANAGER", "yarn-site");
-        componentServiceConfigs.put("OOZIE_SERVER", "oozie-site");
-        componentServiceConfigs.put("HIVE_SERVER", "hive-site");
-        componentServiceConfigs.put("WEBHCAT_SERVER", "webhcat-site");
-        componentServiceConfigs.put("HBASE_MASTER", "hbase-site");
-    } // TODO: Are there other service components, for which the endpoints can be discovered via Ambari?
+        try {
+            Properties configMapping = new Properties();
+            configMapping.load(AmbariServiceDiscovery.class.getClassLoader().getResourceAsStream(COMPONENT_CONFIG_MAPPING_FILE));
+            for (String componentName : configMapping.stringPropertyNames()) {
+                componentServiceConfigs.put(componentName, configMapping.getProperty(componentName));
+            }
+        } catch (Exception e) {
+            log.failedToLoadServiceDiscoveryConfiguration(COMPONENT_CONFIG_MAPPING_FILE, e);
+        }
+    }
 
     private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
     private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
 
-    private static AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
-
-    private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
-
     @GatewayService
     private AliasService aliasService;
 
     private CloseableHttpClient httpClient = null;
 
-    private Map<String, Map<String, String>> serviceConfiguration = new HashMap<>();
-
 
     AmbariServiceDiscovery() {
         httpClient = org.apache.http.impl.client.HttpClients.createDefault();
@@ -141,13 +147,21 @@ class AmbariServiceDiscovery implements ServiceDiscovery {
 
                         serviceComponents.put(componentName, serviceName);
 
-//                    String hostName = (String) hostRoles.get("host_name");
-                        String hostName = (String) hostRoles.get("public_host_name"); // Assuming public host name is most applicable
-                        log.discoveredServiceHost(serviceName, hostName);
-                        if (!componentHostNames.containsKey(componentName)) {
-                            componentHostNames.put(componentName, new ArrayList<String>());
+                        // Assuming public host name is more applicable than host_name
+                        String hostName = (String) hostRoles.get("public_host_name");
+                        if (hostName == null) {
+                            // Some (even slightly) older versions of Ambari/HDP do not return public_host_name,
+                            // so fall back to host_name in those cases.
+                            hostName = (String) hostRoles.get("host_name");
+                        }
+
+                        if (hostName != null) {
+                            log.discoveredServiceHost(serviceName, hostName);
+                            if (!componentHostNames.containsKey(componentName)) {
+                                componentHostNames.put(componentName, new ArrayList<String>());
+                            }
+                            componentHostNames.get(componentName).add(hostName);
                         }
-                        componentHostNames.get(componentName).add(hostName);
                     }
                 }
             }

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
index caa16ed..0661224 100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
@@ -21,61 +21,101 @@ import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
 import org.apache.hadoop.gateway.i18n.messages.Messages;
 import org.apache.hadoop.gateway.i18n.messages.StackTrace;
 
-@Messages(logger="org.apache.gateway.topology.discovery.ambari")
+@Messages(logger="org.apache.hadoop.gateway.topology.discovery.ambari")
 public interface AmbariServiceDiscoveryMessages {
 
     @Message(level = MessageLevel.ERROR,
-            text = "Encountered an error during cluster {0} discovery: {1}")
+            text = "Failed to load service discovery configuration: {1}")
+    void failedToLoadServiceDiscoveryConfiguration(@StackTrace(level = MessageLevel.ERROR) Exception e);
+
+    @Message(level = MessageLevel.ERROR,
+             text = "Failed to load service discovery configuration {0}: {1}")
+    void failedToLoadServiceDiscoveryConfiguration(final String configuration,
+                               @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+    @Message(level = MessageLevel.ERROR,
+             text = "Encountered an error during cluster {0} discovery: {1}")
     void clusterDiscoveryError(final String clusterName,
                                @StackTrace(level = MessageLevel.ERROR) Exception e);
 
 
     @Message(level = MessageLevel.DEBUG,
-            text = "REST invocation {0} failed: {1}")
+             text = "REST invocation {0} failed: {1}")
     void restInvocationError(final String url,
                              @StackTrace(level = MessageLevel.ERROR) Exception e);
 
 
     @Message(level = MessageLevel.ERROR,
-            text = "Encountered an error attempting to determine the user for alias {0} : {1}")
+             text = "Encountered an error attempting to determine the user for alias {0} : {1}")
     void aliasServiceUserError(final String alias, final String error);
 
 
     @Message(level = MessageLevel.ERROR,
-            text = "Encountered an error attempting to determine the password for alias {0} : {1}")
+             text = "Encountered an error attempting to determine the password for alias {0} : {1}")
     void aliasServicePasswordError(final String alias, final String error);
 
 
     @Message(level = MessageLevel.ERROR,
-            text = "No user configured for Ambari service discovery.")
+             text = "No user configured for Ambari service discovery.")
     void aliasServiceUserNotFound();
 
 
     @Message(level = MessageLevel.ERROR,
-            text = "No password configured for Ambari service discovery.")
+             text = "No password configured for Ambari service discovery.")
     void aliasServicePasswordNotFound();
 
 
     @Message(level = MessageLevel.ERROR,
-            text = "Unexpected REST invocation response code for {0} : {1}")
+             text = "Unexpected REST invocation response code for {0} : {1}")
     void unexpectedRestResponseStatusCode(final String url, int responseStatusCode);
 
 
     @Message(level = MessageLevel.ERROR,
-            text = "REST invocation {0} yielded a response without any JSON.")
+             text = "REST invocation {0} yielded a response without any JSON.")
     void noJSON(final String url);
 
 
     @Message(level = MessageLevel.DEBUG,
-            text = "REST invocation result: {0}")
+             text = "REST invocation result: {0}")
     void debugJSON(final String json);
 
+    @Message(level = MessageLevel.DEBUG,
+            text = "Loaded component configuration mappings: {0}")
+    void loadedComponentConfigMappings(final String mappings);
 
-    @Message(level = MessageLevel.INFO,
-            text = "Discovered: Service: {0}, Host: {1}")
+    @Message(level = MessageLevel.ERROR,
+             text = "Failed to load component configuration property mappings {0}: {1}")
+    void failedToLoadComponentConfigMappings(final String mappings,
+                                             @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+    @Message(level = MessageLevel.DEBUG,
+             text = "Discovered: Service: {0}, Host: {1}")
     void discoveredServiceHost(final String serviceName, final String hostName);
 
 
+    @Message(level = MessageLevel.DEBUG,
+             text = "Querying the cluster for the {0} configuration ({1}) property: {2}")
+    void lookingUpServiceConfigProperty(final String serviceName, final String configType, final String propertyName);
+
+
+    @Message(level = MessageLevel.DEBUG,
+             text = "Querying the cluster for the {0} component configuration property: {1}")
+    void lookingUpComponentConfigProperty(final String componentName, final String propertyName);
+
+
+    @Message(level = MessageLevel.DEBUG,
+             text = "Querying the cluster for the {0} component's hosts")
+    void lookingUpComponentHosts(final String componentName);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "Handling a derived service URL mapping property for the {0} service: type = {1}, name = {2}")
+    void handlingDerivedProperty(final String serviceName, final String propertyType, final String propertyName);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "Determined the service URL mapping property {0} value: {1}")
+    void determinedPropertyValue(final String propertyName, final String propertyValue);
 
 
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
deleted file mode 100644
index 0674642..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-
-import java.util.ArrayList;
-import java.util.List;
-
-class AmbariServiceURLCreator {
-
-    private static final String NAMENODE_SERVICE        = "NAMENODE";
-    private static final String JOBTRACKER_SERVICE      = "JOBTRACKER";
-    private static final String WEBHDFS_SERVICE         = "WEBHDFS";
-    private static final String WEBHCAT_SERVICE         = "WEBHCAT";
-    private static final String OOZIE_SERVICE           = "OOZIE";
-    private static final String WEBHBASE_SERVICE        = "WEBHBASE";
-    private static final String HIVE_SERVICE            = "HIVE";
-    private static final String RESOURCEMANAGER_SERVICE = "RESOURCEMANAGER";
-
-
-    /**
-     * Derive the endpoint URL(s) for the specified service, based on the info from the specified Cluster.
-     *
-     * @param cluster The cluster discovery results
-     * @param serviceName The name of a Hadoop service
-     *
-     * @return One or more endpoint URLs for the specified service.
-     */
-    public List<String> create(AmbariCluster cluster, String serviceName) {
-        List<String> result = null;
-
-        if (NAMENODE_SERVICE.equals(serviceName)) {
-            result = createNameNodeURL(cluster);
-        } else if (JOBTRACKER_SERVICE.equals(serviceName)) {
-            result = createJobTrackerURL(cluster);
-        } else if (WEBHDFS_SERVICE.equals(serviceName)) {
-            result = createWebHDFSURL(cluster);
-        } else if (WEBHCAT_SERVICE.equals(serviceName)) {
-            result = createWebHCatURL(cluster);
-        } else if (OOZIE_SERVICE.equals(serviceName)) {
-            result = createOozieURL(cluster);
-        } else if (WEBHBASE_SERVICE.equals(serviceName)) {
-            result = createWebHBaseURL(cluster);
-        } else if (HIVE_SERVICE.equals(serviceName)) {
-            result = createHiveURL(cluster);
-        } else if (RESOURCEMANAGER_SERVICE.equals(serviceName)) {
-            result = createResourceManagerURL(cluster);
-        }
-
-        return result;
-    }
-
-
-    private List<String> createNameNodeURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("NAMENODE");
-        if (comp != null) {
-            result.add("hdfs://" + comp.getConfigProperty("dfs.namenode.rpc-address"));
-        }
-
-        return result;
-    }
-
-
-    private List<String> createJobTrackerURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("RESOURCEMANAGER");
-        if (comp != null) {
-            result.add("rpc://" + comp.getConfigProperty("yarn.resourcemanager.address"));
-        }
-
-        return result;
-    }
-
-
-    private List<String> createWebHDFSURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariCluster.ServiceConfiguration sc = cluster.getServiceConfiguration("HDFS", "hdfs-site");
-        if (sc != null) {
-            String address = sc.getProperties().get("dfs.namenode.http-address");
-            result.add("http://" + address + "/webhdfs");
-        }
-
-        return result;
-    }
-
-
-    private List<String> createWebHCatURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent webhcat = cluster.getComponent("WEBHCAT_SERVER");
-        if (webhcat != null) {
-            String port = webhcat.getConfigProperty("templeton.port");
-            String host = webhcat.getHostNames().get(0);
-
-            result.add("http://" + host + ":" + port + "/templeton");
-        }
-        return result;
-    }
-
-
-    private List<String> createOozieURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("OOZIE_SERVER");
-        if (comp != null) {
-            result.add(comp.getConfigProperty("oozie.base.url"));
-        }
-
-        return result;
-    }
-
-
-    private List<String> createWebHBaseURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("HBASE_MASTER");
-        if (comp != null) {
-            for (String host : comp.getHostNames()) {
-                result.add("http://" + host + ":60080");
-            }
-        }
-
-        return result;
-    }
-
-
-    private List<String> createHiveURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent hive = cluster.getComponent("HIVE_SERVER");
-        if (hive != null) {
-            String path = hive.getConfigProperty("hive.server2.thrift.http.path");
-            String port = hive.getConfigProperty("hive.server2.thrift.http.port");
-            String transport = hive.getConfigProperty("hive.server2.transport.mode");
-            String useSSL = hive.getConfigProperty("hive.server2.use.SSL");
-            String host = hive.getHostNames().get(0);
-
-            String scheme = null; // What is the scheme for the binary transport mode?
-            if ("http".equals(transport)) {
-                scheme = Boolean.valueOf(useSSL) ? "https" : "http";
-            }
-
-            result.add(scheme + "://" + host + ":" + port + "/" + path);
-        }
-        return result;
-    }
-
-
-    private List<String> createResourceManagerURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent resMan = cluster.getComponent("RESOURCEMANAGER");
-        if (resMan != null) {
-            String webappAddress = resMan.getConfigProperty("yarn.resourcemanager.webapp.address");
-            String httpPolicy = resMan.getConfigProperty("yarn.http.policy");
-            String scheme = ("HTTPS_ONLY".equalsIgnoreCase(httpPolicy)) ? "https" : "http";
-
-            result.add(scheme + "://" + webappAddress + "/ws");
-        }
-
-        return result;
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ConditionalValueHandler.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ConditionalValueHandler.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ConditionalValueHandler.java
new file mode 100644
index 0000000..d76a161
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ConditionalValueHandler.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+
+interface ConditionalValueHandler {
+
+    String evaluate(ServiceURLPropertyConfig config, AmbariCluster cluster);
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/PropertyEqualsHandler.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/PropertyEqualsHandler.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/PropertyEqualsHandler.java
new file mode 100644
index 0000000..642a676
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/PropertyEqualsHandler.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+
+class PropertyEqualsHandler implements ConditionalValueHandler {
+
+    private String serviceName                        = null;
+    private String propertyName                       = null;
+    private String propertyValue                      = null;
+    private ConditionalValueHandler affirmativeResult = null;
+    private ConditionalValueHandler negativeResult    = null;
+
+    PropertyEqualsHandler(String                  serviceName,
+                          String                  propertyName,
+                          String                  propertyValue,
+                          ConditionalValueHandler affirmativeResult,
+                          ConditionalValueHandler negativeResult) {
+        this.serviceName       = serviceName;
+        this.propertyName      = propertyName;
+        this.propertyValue     = propertyValue;
+        this.affirmativeResult = affirmativeResult;
+        this.negativeResult    = negativeResult;
+    }
+
+    @Override
+    public String evaluate(ServiceURLPropertyConfig config, AmbariCluster cluster) {
+        String result = null;
+
+        ServiceURLPropertyConfig.Property p = config.getConfigProperty(serviceName, propertyName);
+        if (p != null) {
+            String value = getActualPropertyValue(cluster, p);
+            if (propertyValue.equals(value)) {
+                result = affirmativeResult.evaluate(config, cluster);
+            } else if (negativeResult != null) {
+                result = negativeResult.evaluate(config, cluster);
+            }
+
+            // Check if the result is a reference to a local derived property
+            ServiceURLPropertyConfig.Property derived = config.getConfigProperty(serviceName, result);
+            if (derived != null) {
+                result = getActualPropertyValue(cluster, derived);
+            }
+        }
+
+        return result;
+    }
+
+    private String getActualPropertyValue(AmbariCluster cluster, ServiceURLPropertyConfig.Property property) {
+        String value = null;
+        String propertyType = property.getType();
+        if (ServiceURLPropertyConfig.Property.TYPE_COMPONENT.equals(propertyType)) {
+            AmbariComponent component = cluster.getComponent(property.getComponent());
+            if (component != null) {
+                value = component.getConfigProperty(property.getValue());
+            }
+        } else if (ServiceURLPropertyConfig.Property.TYPE_SERVICE.equals(propertyType)) {
+            value = cluster.getServiceConfiguration(property.getService(), property.getServiceConfig()).getProperties().get(property.getValue());
+        }
+        return value;
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
new file mode 100644
index 0000000..3330cc3
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
@@ -0,0 +1,324 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.util.XmlUtils;
+import org.w3c.dom.Document;
+import org.w3c.dom.NamedNodeMap;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathExpression;
+import javax.xml.xpath.XPathExpressionException;
+import javax.xml.xpath.XPathFactory;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Service URL pattern mapping configuration model.
+ */
+class ServiceURLPropertyConfig {
+
+    private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+    private static final String ATTR_NAME = "name";
+
+    private static XPathExpression SERVICE_URL_PATTERN_MAPPINGS;
+    private static XPathExpression URL_PATTERN;
+    private static XPathExpression PROPERTIES;
+    static {
+        XPath xpath = XPathFactory.newInstance().newXPath();
+        try {
+            SERVICE_URL_PATTERN_MAPPINGS = xpath.compile("/service-discovery-url-mappings/service");
+            URL_PATTERN                  = xpath.compile("url-pattern/text()");
+            PROPERTIES                   = xpath.compile("properties/property");
+        } catch (XPathExpressionException e) {
+            e.printStackTrace();
+        }
+    }
+
+    private static final String DEFAULT_SERVICE_URL_MAPPINGS = "ambari-service-discovery-url-mappings.xml";
+
+    private Map<String, URLPattern> urlPatterns = new HashMap<>();
+
+    private Map<String, Map<String, Property>> properties = new HashMap<>();
+
+
+    /**
+     * The default service URL pattern to property mapping configuration will be used.
+     */
+    ServiceURLPropertyConfig() {
+        this(ServiceURLPropertyConfig.class.getClassLoader().getResourceAsStream(DEFAULT_SERVICE_URL_MAPPINGS));
+    }
+
+    /**
+     * The default service URL pattern to property mapping configuration will be used.
+     */
+    ServiceURLPropertyConfig(File mappingConfigurationFile) throws Exception {
+        this(new FileInputStream(mappingConfigurationFile));
+    }
+
+    /**
+     *
+     * @param source An InputStream for the XML content
+     */
+    ServiceURLPropertyConfig(InputStream source) {
+        // Parse the XML, and build the model
+        try {
+            Document doc = XmlUtils.readXml(source);
+
+            NodeList serviceNodes =
+                    (NodeList) SERVICE_URL_PATTERN_MAPPINGS.evaluate(doc, XPathConstants.NODESET);
+            for (int i=0; i < serviceNodes.getLength(); i++) {
+                Node serviceNode = serviceNodes.item(i);
+                String serviceName = serviceNode.getAttributes().getNamedItem(ATTR_NAME).getNodeValue();
+                properties.put(serviceName, new HashMap<String, Property>());
+
+                Node urlPatternNode = (Node) URL_PATTERN.evaluate(serviceNode, XPathConstants.NODE);
+                if (urlPatternNode != null) {
+                    urlPatterns.put(serviceName, new URLPattern(urlPatternNode.getNodeValue()));
+                }
+
+                NodeList propertiesNode = (NodeList) PROPERTIES.evaluate(serviceNode, XPathConstants.NODESET);
+                if (propertiesNode != null) {
+                    processProperties(serviceName, propertiesNode);
+                }
+            }
+        } catch (Exception e) {
+            log.failedToLoadServiceDiscoveryConfiguration(e);
+        } finally {
+            try {
+                source.close();
+            } catch (IOException e) {
+                // Ignore
+            }
+        }
+    }
+
+    private void processProperties(String serviceName, NodeList propertyNodes) {
+        for (int i = 0; i < propertyNodes.getLength(); i++) {
+            Property p = Property.createProperty(serviceName, propertyNodes.item(i));
+            properties.get(serviceName).put(p.getName(), p);
+        }
+    }
+
+    URLPattern getURLPattern(String service) {
+        return urlPatterns.get(service);
+    }
+
+    Property getConfigProperty(String service, String property) {
+        return properties.get(service).get(property);
+    }
+
+    static class URLPattern {
+        String pattern;
+        List<String> placeholders = new ArrayList<>();
+
+        URLPattern(String pattern) {
+            this.pattern = pattern;
+
+            final Pattern regex = Pattern.compile("\\{(.*?)}", Pattern.DOTALL);
+            final Matcher matcher = regex.matcher(pattern);
+            while( matcher.find() ){
+                placeholders.add(matcher.group(1));
+            }
+        }
+
+        String get() {return pattern; }
+        List<String> getPlaceholders() {
+            return placeholders;
+        }
+    }
+
+    static class Property {
+        static final String TYPE_SERVICE   = "SERVICE";
+        static final String TYPE_COMPONENT = "COMPONENT";
+        static final String TYPE_DERIVED   = "DERIVED";
+
+        static final String PROP_COMP_HOSTNAME = "component.host.name";
+
+        static final String ATTR_NAME     = "name";
+        static final String ATTR_PROPERTY = "property";
+        static final String ATTR_VALUE    = "value";
+
+        static XPathExpression HOSTNAME;
+        static XPathExpression SERVICE_CONFIG;
+        static XPathExpression COMPONENT;
+        static XPathExpression CONFIG_PROPERTY;
+        static XPathExpression IF;
+        static XPathExpression THEN;
+        static XPathExpression ELSE;
+        static XPathExpression TEXT;
+        static {
+            XPath xpath = XPathFactory.newInstance().newXPath();
+            try {
+                HOSTNAME        = xpath.compile("hostname");
+                SERVICE_CONFIG  = xpath.compile("service-config");
+                COMPONENT       = xpath.compile("component");
+                CONFIG_PROPERTY = xpath.compile("config-property");
+                IF              = xpath.compile("if");
+                THEN            = xpath.compile("then");
+                ELSE            = xpath.compile("else");
+                TEXT            = xpath.compile("text()");
+            } catch (XPathExpressionException e) {
+                e.printStackTrace();
+            }
+        }
+
+
+        String type;
+        String name;
+        String component;
+        String service;
+        String serviceConfig;
+        String value;
+        ConditionalValueHandler conditionHandler = null;
+
+        private Property(String type,
+                         String propertyName,
+                         String component,
+                         String service,
+                         String configType,
+                         String value,
+                         ConditionalValueHandler pch) {
+            this.type = type;
+            this.name = propertyName;
+            this.service = service;
+            this.component = component;
+            this.serviceConfig = configType;
+            this.value = value;
+            conditionHandler = pch;
+        }
+
+        static Property createProperty(String serviceName, Node propertyNode) {
+            String propertyName = propertyNode.getAttributes().getNamedItem(ATTR_NAME).getNodeValue();
+            String propertyType = null;
+            String serviceType = null;
+            String configType = null;
+            String componentType = null;
+            String value = null;
+            ConditionalValueHandler pch = null;
+
+            try {
+                Node hostNameNode = (Node) HOSTNAME.evaluate(propertyNode, XPathConstants.NODE);
+                if (hostNameNode != null) {
+                    value = PROP_COMP_HOSTNAME;
+                }
+
+                // Check for a service-config node
+                Node scNode = (Node) SERVICE_CONFIG.evaluate(propertyNode, XPathConstants.NODE);
+                if (scNode != null) {
+                    // Service config property
+                    propertyType = Property.TYPE_SERVICE;
+                    serviceType = scNode.getAttributes().getNamedItem(ATTR_NAME).getNodeValue();
+                    Node scTextNode = (Node) TEXT.evaluate(scNode, XPathConstants.NODE);
+                    configType = scTextNode.getNodeValue();
+                } else { // If not service-config node, check for a component config node
+                    Node cNode = (Node) COMPONENT.evaluate(propertyNode, XPathConstants.NODE);
+                    if (cNode != null) {
+                        // Component config property
+                        propertyType = Property.TYPE_COMPONENT;
+                        componentType = cNode.getFirstChild().getNodeValue();
+                        Node cTextNode = (Node) TEXT.evaluate(cNode, XPathConstants.NODE);
+                        configType = cTextNode.getNodeValue();
+                        componentType = cTextNode.getNodeValue();
+                    }
+                }
+
+                // Check for a config property node
+                Node cpNode = (Node) CONFIG_PROPERTY.evaluate(propertyNode, XPathConstants.NODE);
+                if (cpNode != null) {
+                    // Check for a condition element
+                    Node ifNode = (Node) IF.evaluate(cpNode, XPathConstants.NODE);
+                    if (ifNode != null) {
+                        propertyType = TYPE_DERIVED;
+                        pch = getConditionHandler(serviceName, ifNode);
+                    } else {
+                        Node cpTextNode = (Node) TEXT.evaluate(cpNode, XPathConstants.NODE);
+                        value = cpTextNode.getNodeValue();
+                    }
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+
+            // Create and return the property representation
+            return new Property(propertyType, propertyName, componentType, serviceType, configType, value, pch);
+        }
+
+        private static ConditionalValueHandler getConditionHandler(String serviceName, Node ifNode) throws Exception {
+            ConditionalValueHandler result = null;
+
+            if (ifNode != null) {
+                NamedNodeMap attrs = ifNode.getAttributes();
+                String comparisonPropName = attrs.getNamedItem(ATTR_PROPERTY).getNodeValue();
+                String comparisonValue = attrs.getNamedItem(ATTR_VALUE).getNodeValue();
+
+                ConditionalValueHandler affirmativeResult = null;
+                Node thenNode = (Node) THEN.evaluate(ifNode, XPathConstants.NODE);
+                if (thenNode != null) {
+                    Node subIfNode = (Node) IF.evaluate(thenNode, XPathConstants.NODE);
+                    if (subIfNode != null) {
+                        affirmativeResult = getConditionHandler(serviceName, subIfNode);
+                    } else {
+                        affirmativeResult = new SimpleValueHandler(thenNode.getFirstChild().getNodeValue());
+                    }
+                }
+
+                ConditionalValueHandler negativeResult = null;
+                Node elseNode = (Node) ELSE.evaluate(ifNode, XPathConstants.NODE);
+                if (elseNode != null) {
+                    Node subIfNode = (Node) IF.evaluate(elseNode, XPathConstants.NODE);
+                    if (subIfNode != null) {
+                        negativeResult = getConditionHandler(serviceName, subIfNode);
+                    } else {
+                        negativeResult = new SimpleValueHandler(elseNode.getFirstChild().getNodeValue());
+                    }
+                }
+
+                result = new PropertyEqualsHandler(serviceName,
+                        comparisonPropName,
+                        comparisonValue,
+                        affirmativeResult,
+                        negativeResult);
+            }
+
+            return result;
+        }
+
+        String getType() { return type; }
+        String getName() { return name; }
+        String getComponent() { return component; }
+        String getService() { return service; }
+        String getServiceConfig() { return serviceConfig; }
+        String getValue() {
+            return value;
+        }
+        ConditionalValueHandler getConditionHandler() { return conditionHandler; }
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/SimpleValueHandler.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/SimpleValueHandler.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/SimpleValueHandler.java
new file mode 100644
index 0000000..8e0cd75
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/SimpleValueHandler.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+
+class SimpleValueHandler implements ConditionalValueHandler {
+    private String value;
+
+    SimpleValueHandler(String value) {
+        this.value = value;
+    }
+
+    @Override
+    public String evaluate(ServiceURLPropertyConfig config, AmbariCluster cluster) {
+        return value;
+    }
+}
+

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-component-config-mapping.properties
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-component-config-mapping.properties b/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-component-config-mapping.properties
new file mode 100644
index 0000000..a48b28c
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-component-config-mapping.properties
@@ -0,0 +1,36 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+##########################################################################
+# Hadoop service component configuration mappings
+# Service component name = service component configuration type
+##########################################################################
+NAMENODE=hdfs-site
+RESOURCEMANAGER=yarn-site
+OOZIE_SERVER=oozie-site
+HIVE_SERVER=hive-site
+WEBHCAT_SERVER=webhcat-site
+HBASE_MASTER=hbase-site
+DRUID_COORDINATOR=druid-coordinator
+DRUID_BROKER=druid-broker
+DRUID_ROUTER=druid-router
+DRUID_OVERLORD=druid-overlord
+DRUID_SUPERSET=druid-superset
+ATLAS_SERVER=application-properties
+ZEPPELIN_MASTER=zeppelin-config
+#RANGER=TODO

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-url-mappings.xml
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-url-mappings.xml b/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-url-mappings.xml
new file mode 100644
index 0000000..8953b8d
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-url-mappings.xml
@@ -0,0 +1,398 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<!-- ==================================================================== -->
+<!--  Externalized mapping of Hadoop service URL patterns and the Ambari  -->
+<!--  ServiceDiscovery model properties. This configuration is used by    -->
+<!--  the Ambari ServiceDiscovery implementation to construct service     -->
+<!--  URLs suitable for use in a Knox topology file.                      -->
+<!-- ==================================================================== -->
+<service-discovery-url-mappings>
+
+    <service name="NAMENODE">
+        <url-pattern>hdfs://{DFS_NAMENODE_RPC_ADDRESS}</url-pattern>
+        <properties>
+            <property name="DFS_NAMENODE_RPC_ADDRESS">
+                <component>NAMENODE</component>
+                <config-property>dfs.namenode.rpc-address</config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="JOBTRACKER">
+        <url-pattern>rpc://{YARN_RM_ADDRESS}</url-pattern>
+        <properties>
+            <property name="YARN_RM_ADDRESS">
+                <component>RESOURCEMANAGER</component>
+                <config-property>yarn.resourcemanager.address</config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="WEBHDFS">
+        <url-pattern>http://{WEBHDFS_ADDRESS}/webhdfs</url-pattern>
+        <properties>
+            <property name="WEBHDFS_ADDRESS">
+                <service-config name="HDFS">hdfs-site</service-config>
+                <config-property>dfs.namenode.http-address</config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="WEBHCAT">
+        <url-pattern>http://{HOST}:{PORT}/templeton</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>WEBHCAT_SERVER</component>
+                <hostname/>
+            </property>
+            <property name="PORT">
+                <component>WEBHCAT_SERVER</component>
+                <config-property>templeton.port</config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="OOZIE">
+        <url-pattern>{OOZIE_ADDRESS}</url-pattern>
+        <properties>
+            <property name="OOZIE_ADDRESS">
+                <component>OOZIE_SERVER</component>
+                <config-property>oozie.base.url</config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="WEBHBASE">
+        <url-pattern>http://{HOST}:60080</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>HBASE_MASTER</component>
+                <hostname/>
+            </property>
+        </properties>
+    </service>
+
+    <service name="RESOURCEMANAGER">
+        <url-pattern>{SCHEME}://{WEBAPP_ADDRESS}/ws</url-pattern>
+        <properties>
+            <property name="WEBAPP_HTTP_ADDRESS">
+                <component>RESOURCEMANAGER</component>
+                <config-property>yarn.resourcemanager.webapp.address</config-property>
+            </property>
+            <property name="WEBAPP_HTTPS_ADDRESS">
+                <component>RESOURCEMANAGER</component>
+                <config-property>yarn.resourcemanager.webapp.https.address</config-property>
+            </property>
+            <property name="HTTP_POLICY">
+                <component>RESOURCEMANAGER</component>
+                <config-property>yarn.http.policy</config-property>
+            </property>
+            <property name="SCHEME">
+                <config-property>
+                    <if property="HTTP_POLICY" value="HTTPS_ONLY">
+                        <then>https</then>
+                        <else>http</else>
+                    </if>
+                </config-property>
+            </property>
+            <property name="WEBAPP_ADDRESS">
+                <config-property>
+                    <if property="HTTP_POLICY" value="HTTPS_ONLY">
+                        <then>WEBAPP_HTTPS_ADDRESS</then>
+                        <else>WEBAPP_HTTP_ADDRESS</else>
+                    </if>
+                </config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="HIVE">
+        <url-pattern>{SCHEME}://{HOST}:{PORT}/{PATH}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>HIVE_SERVER</component>
+                <hostname/>
+            </property>
+            <property name="USE_SSL">
+                <component>HIVE_SERVER</component>
+                <config-property>hive.server2.use.SSL</config-property>
+            </property>
+            <property name="PATH">
+                <component>HIVE_SERVER</component>
+                <config-property>hive.server2.thrift.http.path</config-property>
+            </property>
+            <property name="PORT">
+                <component>HIVE_SERVER</component>
+                <config-property>hive.server2.thrift.http.port</config-property>
+            </property>
+            <property name="SCHEME">
+                 <config-property>
+                    <if property="USE_SSL" value="true">
+                        <then>https</then>
+                        <else>http</else>
+                    </if>
+                </config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="DRUID-COORDINATOR">
+        <url-pattern>http://{HOST}:{PORT}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>DRUID_COORDINATOR</component>
+                <hostname/>
+            </property>
+            <property name="PORT">
+                <component>DRUID_COORDINATOR</component>
+                <config-property>druid.port</config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="DRUID-BROKER">
+        <url-pattern>http://{HOST}:{PORT}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>DRUID_BROKER</component>
+                <hostname/>
+            </property>
+            <property name="PORT">
+                <component>DRUID_BROKER</component>
+                <config-property>druid.port</config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="DRUID-ROUTER">
+        <url-pattern>http://{HOST}:{PORT}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>DRUID_ROUTER</component>
+                <hostname/>
+            </property>
+            <property name="PORT">
+                <component>DRUID_ROUTER</component>
+                <config-property>druid.port</config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="DRUID-OVERLORD">
+        <url-pattern>http://{HOST}:{PORT}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>DRUID_OVERLORD</component>
+                <hostname/>
+            </property>
+            <property name="PORT">
+                <component>DRUID_OVERLORD</component>
+                <config-property>druid.port</config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="SUPERSET">
+        <url-pattern>http://{HOST}:{PORT}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>DRUID_SUPERSET</component>
+                <hostname/>
+            </property>
+            <property name="PORT">
+                <component>DRUID_SUPERSET</component>
+                <config-property>SUPERSET_WEBSERVER_PORT</config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="ATLAS-API">
+        <url-pattern>{REST_ADDRESS}</url-pattern>
+        <properties>
+            <property name="REST_ADDRESS">
+                <component>ATLAS_SERVER</component>
+                <config-property>atlas.rest.address</config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="ATLAS">
+        <url-pattern>{SCHEME}://{HOST}:{PORT}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>ATLAS_SERVER</component>
+                <hostname/>
+            </property>
+            <property name="TLS_ENABLED">
+                <component>ATLAS_SERVER</component>
+                <config-property>atlas.enableTLS</config-property>
+            </property>
+            <property name="HTTP_PORT">
+                <component>ATLAS_SERVER</component>
+                <config-property>atlas.server.http.port</config-property>
+            </property>
+            <property name="HTTPS_PORT">
+                <component>ATLAS_SERVER</component>
+                <config-property>atlas.server.https.port</config-property>
+            </property>
+            <property name="PORT">
+                <config-property>
+                    <if property="TLS_ENABLED" value="true">
+                        <then>HTTPS_PORT</then>
+                        <else>HTTP_PORT</else>
+                    </if>
+                </config-property>
+            </property>
+            <property name="SCHEME">
+                <config-property>
+                    <if property="TLS_ENABLED" value="true">
+                        <then>https</then>
+                        <else>http</else>
+                    </if>
+                </config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="ZEPPELIN">
+        <url-pattern>{SCHEME}://{HOST}:{PORT}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>ZEPPELIN_MASTER</component>
+                <hostname/>
+            </property>
+            <property name="SSL">
+                <component>ZEPPELIN_MASTER</component>
+                <config-property>zeppelin.ssl</config-property>
+            </property>
+            <property name="SCHEME">
+                <config-property>
+                    <if property="SSL" value="true">
+                        <then>https</then>
+                        <else>http</else>
+                    </if>
+                </config-property>
+            </property>
+            <property name="HTTPS_PORT">
+                <component>ZEPPELIN_MASTER</component>
+                <config-property>zeppelin.server.ssl.port</config-property>
+            </property>
+            <property name="HTTP_PORT">
+                <component>ZEPPELIN_MASTER</component>
+                <config-property>zeppelin.server.port</config-property>
+            </property>
+            <property name="PORT">
+                <config-property>
+                    <if property="SSL" value="true">
+                        <then>HTTPS_PORT</then>
+                        <else>HTTP_PORT</else>
+                    </if>
+                </config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="ZEPPELINUI">
+        <url-pattern>{SCHEME}://{HOST}:{PORT}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>ZEPPELIN_MASTER</component>
+                <hostname/>
+            </property>
+            <property name="SSL">
+                <component>ZEPPELIN_MASTER</component>
+                <config-property>zeppelin.ssl</config-property>
+            </property>
+            <property name="SCHEME">
+                <config-property>
+                    <if property="SSL" value="true">
+                        <then>https</then>
+                        <else>http</else>
+                    </if>
+                </config-property>
+            </property>
+            <property name="HTTPS_PORT">
+                <component>ZEPPELIN_MASTER</component>
+                <config-property>zeppelin.server.ssl.port</config-property>
+            </property>
+            <property name="HTTP_PORT">
+                <component>ZEPPELIN_MASTER</component>
+                <config-property>zeppelin.server.port</config-property>
+            </property>
+            <property name="PORT">
+                <config-property>
+                    <if property="SSL" value="true">
+                        <then>HTTPS_PORT</then>
+                        <else>HTTP_PORT</else>
+                    </if>
+                </config-property>
+            </property>
+        </properties>
+    </service>
+
+    <service name="ZEPPELINWS">
+        <url-pattern>{SCHEME}://{HOST}:{PORT}</url-pattern>
+        <properties>
+            <property name="HOST">
+                <component>ZEPPELIN_MASTER</component>
+                <hostname/>
+            </property>
+            <property name="SSL">
+                <component>ZEPPELIN_MASTER</component>
+                <config-property>zeppelin.ssl</config-property>
+            </property>
+            <property name="HTTPS_PORT">
+                <component>ZEPPELIN_MASTER</component>
+                <config-property>zeppelin.server.ssl.port</config-property>
+            </property>
+            <property name="HTTP_PORT">
+                <component>ZEPPELIN_MASTER</component>
+                <config-property>zeppelin.server.port</config-property>
+            </property>
+            <property name="SCHEME">
+                <config-property>
+                    <if property="SSL" value="true">
+                        <then>wss</then>
+                        <else>ws</else>
+                    </if>
+                </config-property>
+            </property>
+            <property name="PORT">
+                <config-property>
+                    <if property="SSL" value="true">
+                        <then>HTTPS_PORT</then>
+                        <else>HTTP_PORT</else>
+                    </if>
+                </config-property>
+            </property>
+        </properties>
+    </service>
+
+
+<!-- TODO:
+    <service name="YARNUI">
+    </service>
+
+    <service name="RANGER">
+    </service>
+
+    <service name="RANGERUI">
+    </service>
+-->
+
+</service-discovery-url-mappings>


[17/37] knox git commit: KNOX-1072 - Add Client Cert Required Capability to KnoxToken

Posted by lm...@apache.org.
KNOX-1072 - Add Client Cert Required Capability to KnoxToken

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/7b4755d5
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/7b4755d5
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/7b4755d5

Branch: refs/heads/KNOX-1049
Commit: 7b4755d57c8998d1aed62c100124b8a94a3427db
Parents: 145ed5d
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Sep 28 19:27:38 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Sep 28 19:27:54 2017 -0400

----------------------------------------------------------------------
 .../service/knoxtoken/TokenResource.java        |  35 +++++
 .../knoxtoken/TokenServiceResourceTest.java     | 144 +++++++++++++++++++
 2 files changed, 179 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/7b4755d5/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java b/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
index 43dd526..9d8bae3 100644
--- a/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
+++ b/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.gateway.service.knoxtoken;
 
 import java.io.IOException;
 import java.security.Principal;
+import java.security.cert.X509Certificate;
 import java.util.ArrayList;
 import java.util.Map;
 import java.util.HashMap;
@@ -54,12 +55,16 @@ public class TokenResource {
   private static final String TOKEN_AUDIENCES_PARAM = "knox.token.audiences";
   private static final String TOKEN_TARGET_URL = "knox.token.target.url";
   private static final String TOKEN_CLIENT_DATA = "knox.token.client.data";
+  private static final String TOKEN_CLIENT_CERT_REQUIRED = "knox.token.client.cert.required";
+  private static final String TOKEN_ALLOWED_PRINCIPALS = "knox.token.allowed.principals";
   static final String RESOURCE_PATH = "knoxtoken/api/v1/token";
   private static TokenServiceMessages log = MessagesFactory.get( TokenServiceMessages.class );
   private long tokenTTL = 30000l;
   private List<String> targetAudiences = new ArrayList<>();
   private String tokenTargetUrl = null;
   private Map<String,Object> tokenClientDataMap = null;
+  private ArrayList<String> allowedDNs = new ArrayList<>();
+  private boolean clientCertRequired = false;
 
   @Context
   HttpServletRequest request;
@@ -81,6 +86,17 @@ public class TokenResource {
       }
     }
 
+    String clientCert = context.getInitParameter(TOKEN_CLIENT_CERT_REQUIRED);
+    clientCertRequired = "true".equals(clientCert);
+
+    String principals = context.getInitParameter(TOKEN_ALLOWED_PRINCIPALS);
+    if (principals != null) {
+      String[] dns = principals.split(";");
+      for (int i = 0; i < dns.length; i++) {
+        allowedDNs.add(dns[i]);
+      }
+    }
+
     String ttl = context.getInitParameter(TOKEN_TTL_PARAM);
     if (ttl != null) {
       try {
@@ -113,7 +129,26 @@ public class TokenResource {
     return getAuthenticationToken();
   }
 
+  private X509Certificate extractCertificate(HttpServletRequest req) {
+    X509Certificate[] certs = (X509Certificate[]) req.getAttribute("javax.servlet.request.X509Certificate");
+    if (null != certs && certs.length > 0) {
+        return certs[0];
+    }
+    return null;
+  }
+
   private Response getAuthenticationToken() {
+    if (clientCertRequired) {
+      X509Certificate cert = extractCertificate(request);
+      if (cert != null) {
+        if (!allowedDNs.contains(cert.getSubjectDN().getName())) {
+          return Response.status(403).entity("{ \"Unable to get token - untrusted client cert.\" }").build();
+        }
+      }
+      else {
+        return Response.status(403).entity("{ \"Unable to get token - client cert required.\" }").build();
+      }
+    }
     GatewayServices services = (GatewayServices) request.getServletContext()
             .getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
 

http://git-wip-us.apache.org/repos/asf/knox/blob/7b4755d5/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java b/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
index bddd13d..b4e51e6 100644
--- a/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
+++ b/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.gateway.service.knoxtoken;
 
+import org.apache.hadoop.gateway.security.PrimaryPrincipal;
 import org.apache.hadoop.gateway.service.knoxtoken.TokenResource;
 import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
@@ -49,6 +50,7 @@ import java.security.KeyPair;
 import java.security.KeyPairGenerator;
 import java.security.NoSuchAlgorithmException;
 import java.security.Principal;
+import java.security.cert.X509Certificate;
 import java.security.interfaces.RSAPrivateKey;
 import java.security.interfaces.RSAPublicKey;
 import java.util.ArrayList;
@@ -203,6 +205,148 @@ public class TokenServiceResourceTest {
     assertTrue(audiences.contains("recipient2"));
   }
 
+  @Test
+  public void testValidClientCert() throws Exception {
+
+    ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+    EasyMock.expect(context.getInitParameter("knox.token.client.cert.required")).andReturn("true");
+    EasyMock.expect(context.getInitParameter("knox.token.allowed.principals")).andReturn("CN=localhost, OU=Test, O=Hadoop, L=Test, ST=Test, C=US");
+
+    HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+    EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+    X509Certificate trustedCertMock = EasyMock.createMock(X509Certificate.class);
+    EasyMock.expect(trustedCertMock.getSubjectDN()).andReturn(new PrimaryPrincipal("CN=localhost, OU=Test, O=Hadoop, L=Test, ST=Test, C=US")).anyTimes();
+    ArrayList<X509Certificate> certArrayList = new ArrayList<X509Certificate>();
+    certArrayList.add(trustedCertMock);
+    X509Certificate[] certs = {};
+    EasyMock.expect(request.getAttribute("javax.servlet.request.X509Certificate")).andReturn(certArrayList.toArray(certs)).anyTimes();
+
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+    EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+    GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+    EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+    JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+    EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+    StringWriter writer = new StringWriter();
+    PrintWriter printWriter = new PrintWriter(writer);
+    HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+    EasyMock.expect(response.getWriter()).andReturn(printWriter);
+
+    EasyMock.replay(principal, services, context, request, response, trustedCertMock);
+
+    TokenResource tr = new TokenResource();
+    tr.request = request;
+    tr.response = response;
+    tr.context = context;
+    tr.init();
+
+    // Issue a token
+    Response retResponse = tr.doGet();
+
+    assertEquals(200, retResponse.getStatus());
+
+    // Parse the response
+    String retString = writer.toString();
+    String accessToken = getTagValue(retString, "access_token");
+    assertNotNull(accessToken);
+    String expiry = getTagValue(retString, "expires_in");
+    assertNotNull(expiry);
+
+    // Verify the token
+    JWTToken parsedToken = new JWTToken(accessToken);
+    assertEquals("alice", parsedToken.getSubject());
+    assertTrue(authority.verifyToken(parsedToken));
+  }
+
+  @Test
+  public void testValidClientCertWrongUser() throws Exception {
+
+    ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+    EasyMock.expect(context.getInitParameter("knox.token.client.cert.required")).andReturn("true");
+    EasyMock.expect(context.getInitParameter("knox.token.allowed.principals")).andReturn("CN=remotehost, OU=Test, O=Hadoop, L=Test, ST=Test, C=US");
+
+    HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+    EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+    X509Certificate trustedCertMock = EasyMock.createMock(X509Certificate.class);
+    EasyMock.expect(trustedCertMock.getSubjectDN()).andReturn(new PrimaryPrincipal("CN=localhost, OU=Test, O=Hadoop, L=Test, ST=Test, C=US")).anyTimes();
+    ArrayList<X509Certificate> certArrayList = new ArrayList<X509Certificate>();
+    certArrayList.add(trustedCertMock);
+    X509Certificate[] certs = {};
+    EasyMock.expect(request.getAttribute("javax.servlet.request.X509Certificate")).andReturn(certArrayList.toArray(certs)).anyTimes();
+
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+    EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+    GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+    EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+    JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+    EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+    StringWriter writer = new StringWriter();
+    PrintWriter printWriter = new PrintWriter(writer);
+    HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+    EasyMock.expect(response.getWriter()).andReturn(printWriter);
+
+    EasyMock.replay(principal, services, context, request, response, trustedCertMock);
+
+    TokenResource tr = new TokenResource();
+    tr.request = request;
+    tr.response = response;
+    tr.context = context;
+    tr.init();
+
+    // Issue a token
+    Response retResponse = tr.doGet();
+
+    assertEquals(403, retResponse.getStatus());
+  }
+
+  @Test
+  public void testMissingClientCert() throws Exception {
+
+    ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+    EasyMock.expect(context.getInitParameter("knox.token.client.cert.required")).andReturn("true");
+    EasyMock.expect(context.getInitParameter("knox.token.allowed.principals")).andReturn("CN=remotehost, OU=Test, O=Hadoop, L=Test, ST=Test, C=US");
+
+    HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+    EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+    EasyMock.expect(request.getAttribute("javax.servlet.request.X509Certificate")).andReturn(null).anyTimes();
+
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+    EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+    GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+    EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+    JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+    EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+    StringWriter writer = new StringWriter();
+    PrintWriter printWriter = new PrintWriter(writer);
+    HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+    EasyMock.expect(response.getWriter()).andReturn(printWriter);
+
+    EasyMock.replay(principal, services, context, request, response);
+
+    TokenResource tr = new TokenResource();
+    tr.request = request;
+    tr.response = response;
+    tr.context = context;
+    tr.init();
+
+    // Issue a token
+    Response retResponse = tr.doGet();
+
+    assertEquals(403, retResponse.getStatus());
+  }
+
   private String getTagValue(String token, String tagName) {
     String searchString = tagName + "\":";
     String value = token.substring(token.indexOf(searchString) + searchString.length());


[18/37] knox git commit: KNOX-1074 - Knox Proxy - Workflow Manager view fails to load when using ambari through KNOX (Venkatasairam Lanka via lmccay)

Posted by lm...@apache.org.
KNOX-1074 - Knox Proxy - Workflow Manager view fails to load when using ambari through KNOX (Venkatasairam Lanka via lmccay)

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/c5aedf40
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/c5aedf40
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/c5aedf40

Branch: refs/heads/KNOX-1049
Commit: c5aedf40f21ffc9f884330e576ff8f680fd5f368
Parents: 7b4755d
Author: Larry McCay <lm...@hortonworks.com>
Authored: Sat Sep 30 20:51:05 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Sat Sep 30 20:51:05 2017 -0400

----------------------------------------------------------------------
 .../src/main/resources/services/ambariui/2.2.0/service.xml      | 5 +++++
 1 file changed, 5 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/c5aedf40/gateway-service-definitions/src/main/resources/services/ambariui/2.2.0/service.xml
----------------------------------------------------------------------
diff --git a/gateway-service-definitions/src/main/resources/services/ambariui/2.2.0/service.xml b/gateway-service-definitions/src/main/resources/services/ambariui/2.2.0/service.xml
index ab4ab2b..0ed0a31 100644
--- a/gateway-service-definitions/src/main/resources/services/ambariui/2.2.0/service.xml
+++ b/gateway-service-definitions/src/main/resources/services/ambariui/2.2.0/service.xml
@@ -80,6 +80,11 @@
             <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint/noroot" to="response.body"/>
         </route>
 
+        <!-- Wfmanager view -->
+        <route path="/ambari/views/WORKFLOW_MANAGER/*/*/">
+            <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint/html" to="response.body"/>
+        </route>
+
         <!-- SmartSense view -->
         <route path="/ambari/views/SMARTSENSE/**/assets/hstapp-*.js">
             <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint" to="response.body"/>


[34/37] knox git commit: KNOX-1062 - Service-Level Parameter Support in Service Discovery and Topology Generation (Phil Zampino via lmccay)

Posted by lm...@apache.org.
KNOX-1062 - Service-Level Parameter Support in Service Discovery and Topology Generation (Phil Zampino via lmccay)

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/0e13dc72
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/0e13dc72
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/0e13dc72

Branch: refs/heads/KNOX-1049
Commit: 0e13dc72e7719163ffca638a7f809dce3f9cf54c
Parents: 8ecac92
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Oct 19 15:45:37 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Oct 19 15:45:37 2017 -0400

----------------------------------------------------------------------
 .../discovery/ambari/AmbariCluster.java         |   2 +-
 .../topology/simple/SimpleDescriptor.java       |   4 +-
 .../simple/SimpleDescriptorHandler.java         |  43 +++-
 .../topology/simple/SimpleDescriptorImpl.java   |  12 +
 .../simple/SimpleDescriptorFactoryTest.java     | 230 +++++++++++++++++--
 .../simple/SimpleDescriptorHandlerTest.java     |  79 ++++++-
 6 files changed, 338 insertions(+), 32 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/0e13dc72/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
index eb84433..c841d9c 100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
@@ -42,7 +42,7 @@ class AmbariCluster implements ServiceDiscovery.Cluster {
 
     void addServiceConfiguration(String serviceName, String configurationType, ServiceConfiguration serviceConfig) {
         if (!serviceConfigurations.keySet().contains(serviceName)) {
-            serviceConfigurations.put(serviceName, new HashMap<String, ServiceConfiguration>());
+            serviceConfigurations.put(serviceName, new HashMap<>());
         }
         serviceConfigurations.get(serviceName).put(configurationType, serviceConfig);
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/0e13dc72/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
index aa28469..7072965 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
@@ -17,6 +17,7 @@
 package org.apache.hadoop.gateway.topology.simple;
 
 import java.util.List;
+import java.util.Map;
 
 public interface SimpleDescriptor {
 
@@ -40,7 +41,8 @@ public interface SimpleDescriptor {
     interface Service {
         String getName();
 
+        Map<String, String> getParams();
+
         List<String> getURLs();
     }
-
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/0e13dc72/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
index 521b5b4..70c9539 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
@@ -80,7 +80,10 @@ public class SimpleDescriptorHandler {
         ServiceDiscovery sd = ServiceDiscoveryFactory.get(desc.getDiscoveryType(), gatewayServices);
         ServiceDiscovery.Cluster cluster = sd.discover(sdc, desc.getClusterName());
 
-        Map<String, List<String>> serviceURLs = new HashMap<>();
+        List<String> validServiceNames = new ArrayList<>();
+
+        Map<String, Map<String, String>> serviceParams = new HashMap<>();
+        Map<String, List<String>>        serviceURLs   = new HashMap<>();
 
         if (cluster != null) {
             for (SimpleDescriptor.Service descService : desc.getServices()) {
@@ -100,6 +103,10 @@ public class SimpleDescriptorHandler {
                             validURLs.add(descServiceURL);
                         }
                     }
+
+                    if (!validURLs.isEmpty()) {
+                        validServiceNames.add(serviceName);
+                    }
                 }
 
                 // If there is at least one valid URL associated with the service, then add it to the map
@@ -108,6 +115,14 @@ public class SimpleDescriptorHandler {
                 } else {
                     log.failedToDiscoverClusterServiceURLs(serviceName, cluster.getName());
                 }
+
+                // Service params
+                if (descService.getParams() != null) {
+                    serviceParams.put(serviceName, descService.getParams());
+                    if (!validServiceNames.contains(serviceName)) {
+                        validServiceNames.add(serviceName);
+                    }
+                }
             }
         } else {
             log.failedToDiscoverClusterServices(desc.getClusterName());
@@ -115,7 +130,7 @@ public class SimpleDescriptorHandler {
 
         BufferedWriter fw = null;
         topologyDescriptor = null;
-        File providerConfig = null;
+        File providerConfig;
         try {
             // Verify that the referenced provider configuration exists before attempting to reading it
             providerConfig = resolveProviderConfigurationReference(desc.getProviderConfig(), srcDirectory);
@@ -147,16 +162,33 @@ public class SimpleDescriptorHandler {
             policyReader.close();
 
             // Sort the service names to write the services alphabetically
-            List<String> serviceNames = new ArrayList<>(serviceURLs.keySet());
+            List<String> serviceNames = new ArrayList<>(validServiceNames);
             Collections.sort(serviceNames);
 
             // Write the service declarations
             for (String serviceName : serviceNames) {
                 fw.write("    <service>\n");
                 fw.write("        <role>" + serviceName + "</role>\n");
-                for (String url : serviceURLs.get(serviceName)) {
-                    fw.write("        <url>" + url + "</url>\n");
+
+                // URLs
+                List<String> urls = serviceURLs.get(serviceName);
+                if (urls != null) {
+                    for (String url : urls) {
+                        fw.write("        <url>" + url + "</url>\n");
+                    }
                 }
+
+                // Params
+                Map<String, String> svcParams = serviceParams.get(serviceName);
+                if (svcParams != null) {
+                    for (String paramName : svcParams.keySet()) {
+                        fw.write("        <param>\n");
+                        fw.write("            <name>" + paramName + "</name>\n");
+                        fw.write("            <value>" + svcParams.get(paramName) + "</value>\n");
+                        fw.write("        </param>\n");
+                    }
+                }
+
                 fw.write("    </service>\n");
             }
 
@@ -195,6 +227,7 @@ public class SimpleDescriptorHandler {
         return result;
     }
 
+
     private static File resolveProviderConfigurationReference(String reference, File srcDirectory) {
         File providerConfig;
 

http://git-wip-us.apache.org/repos/asf/knox/blob/0e13dc72/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
index 32ceba9..bb430a1 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorImpl.java
@@ -21,6 +21,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
 
 class SimpleDescriptorImpl implements SimpleDescriptor {
 
@@ -94,7 +95,13 @@ class SimpleDescriptorImpl implements SimpleDescriptor {
     }
 
     public static class ServiceImpl implements Service {
+        @JsonProperty("name")
         private String name;
+
+        @JsonProperty("params")
+        private Map<String, String> params;
+
+        @JsonProperty("urls")
         private List<String> urls;
 
         @Override
@@ -103,6 +110,11 @@ class SimpleDescriptorImpl implements SimpleDescriptor {
         }
 
         @Override
+        public Map<String, String> getParams() {
+            return params;
+        }
+
+        @Override
         public List<String> getURLs() {
             return urls;
         }

http://git-wip-us.apache.org/repos/asf/knox/blob/0e13dc72/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
index 3dac66a..2ba8758 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactoryTest.java
@@ -64,6 +64,68 @@ public class SimpleDescriptorFactoryTest {
     }
 
     @Test
+    public void testParseJSONSimpleDescriptorWithServiceParams() throws Exception {
+
+        final String   discoveryType    = "AMBARI";
+        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
+        final String   discoveryUser    = "admin";
+        final String   providerConfig   = "ambari-cluster-policy.xml";
+        final String   clusterName      = "myCluster";
+
+        final Map<String, List<String>> services = new HashMap<>();
+        services.put("NODEMANAGER", null);
+        services.put("JOBTRACKER", null);
+        services.put("RESOURCEMANAGER", null);
+        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
+        services.put("AMBARIUI", Collections.singletonList("http://c6401.ambari.apache.org:8080"));
+        services.put("KNOXSSO", null);
+        services.put("KNOXTOKEN", null);
+        services.put("CustomRole", Collections.singletonList("http://c6402.ambari.apache.org:1234"));
+
+        final Map<String, Map<String, String>> serviceParams = new HashMap<>();
+        Map<String, String> knoxSSOParams = new HashMap<>();
+        knoxSSOParams.put("knoxsso.cookie.secure.only", "true");
+        knoxSSOParams.put("knoxsso.token.ttl", "100000");
+        serviceParams.put("KNOXSSO", knoxSSOParams);
+
+        Map<String, String> knoxTokenParams = new HashMap<>();
+        knoxTokenParams.put("knox.token.ttl", "36000000");
+        knoxTokenParams.put("knox.token.audiences", "tokenbased");
+        knoxTokenParams.put("knox.token.target.url", "https://localhost:8443/gateway/tokenbased");
+        serviceParams.put("KNOXTOKEN", knoxTokenParams);
+
+        Map<String, String> customRoleParams = new HashMap<>();
+        customRoleParams.put("custom.param.1", "value1");
+        customRoleParams.put("custom.param.2", "value2");
+        serviceParams.put("CustomRole", customRoleParams);
+
+        String fileName = "test-topology.json";
+        File testJSON = null;
+        try {
+            testJSON = writeJSON(fileName,
+                                 discoveryType,
+                                 discoveryAddress,
+                                 discoveryUser,
+                                 providerConfig,
+                                 clusterName,
+                                 services,
+                                 serviceParams);
+            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testJSON.getAbsolutePath());
+            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services, serviceParams);
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            if (testJSON != null) {
+                try {
+                    testJSON.delete();
+                } catch (Exception e) {
+                    // Ignore
+                }
+            }
+        }
+    }
+
+    @Test
     public void testParseYAMLSimpleDescriptor() throws Exception {
 
         final String   discoveryType    = "AMBARI";
@@ -99,12 +161,79 @@ public class SimpleDescriptorFactoryTest {
     }
 
 
-    private void validateSimpleDescriptor(SimpleDescriptor    sd,
-                                          String              discoveryType,
-                                          String              discoveryAddress,
-                                          String              providerConfig,
-                                          String              clusterName,
+    @Test
+    public void testParseYAMLSimpleDescriptorWithServiceParams() throws Exception {
+
+        final String   discoveryType    = "AMBARI";
+        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
+        final String   discoveryUser    = "joeblow";
+        final String   providerConfig   = "ambari-cluster-policy.xml";
+        final String   clusterName      = "myCluster";
+
+        final Map<String, List<String>> services = new HashMap<>();
+        services.put("NODEMANAGER", null);
+        services.put("JOBTRACKER", null);
+        services.put("RESOURCEMANAGER", null);
+        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
+        services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
+        services.put("KNOXSSO", null);
+        services.put("KNOXTOKEN", null);
+        services.put("CustomRole", Collections.singletonList("http://c6402.ambari.apache.org:1234"));
+
+        final Map<String, Map<String, String>> serviceParams = new HashMap<>();
+        Map<String, String> knoxSSOParams = new HashMap<>();
+        knoxSSOParams.put("knoxsso.cookie.secure.only", "true");
+        knoxSSOParams.put("knoxsso.token.ttl", "100000");
+        serviceParams.put("KNOXSSO", knoxSSOParams);
+
+        Map<String, String> knoxTokenParams = new HashMap<>();
+        knoxTokenParams.put("knox.token.ttl", "36000000");
+        knoxTokenParams.put("knox.token.audiences", "tokenbased");
+        knoxTokenParams.put("knox.token.target.url", "https://localhost:8443/gateway/tokenbased");
+        serviceParams.put("KNOXTOKEN", knoxTokenParams);
+
+        Map<String, String> customRoleParams = new HashMap<>();
+        customRoleParams.put("custom.param.1", "value1");
+        customRoleParams.put("custom.param.2", "value2");
+        serviceParams.put("CustomRole", customRoleParams);
+
+        String fileName = "test-topology.yml";
+        File testYAML = null;
+        try {
+            testYAML = writeYAML(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services, serviceParams);
+            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testYAML.getAbsolutePath());
+            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services, serviceParams);
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            if (testYAML != null) {
+                try {
+                    testYAML.delete();
+                } catch (Exception e) {
+                    // Ignore
+                }
+            }
+        }
+    }
+
+
+    private void validateSimpleDescriptor(SimpleDescriptor          sd,
+                                          String                    discoveryType,
+                                          String                    discoveryAddress,
+                                          String                    providerConfig,
+                                          String                    clusterName,
                                           Map<String, List<String>> expectedServices) {
+        validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, expectedServices, null);
+    }
+
+
+    private void validateSimpleDescriptor(SimpleDescriptor                 sd,
+                                          String                           discoveryType,
+                                          String                           discoveryAddress,
+                                          String                           providerConfig,
+                                          String                           clusterName,
+                                          Map<String, List<String>>        expectedServices,
+                                          Map<String, Map<String, String>> expectedServiceParameters) {
         assertNotNull(sd);
         assertEquals(discoveryType, sd.getDiscoveryType());
         assertEquals(discoveryAddress, sd.getDiscoveryAddress());
@@ -118,6 +247,25 @@ public class SimpleDescriptorFactoryTest {
         for (SimpleDescriptor.Service actualService : actualServices) {
             assertTrue(expectedServices.containsKey(actualService.getName()));
             assertEquals(expectedServices.get(actualService.getName()), actualService.getURLs());
+
+            // Validate service parameters
+            if (expectedServiceParameters != null) {
+                if (expectedServiceParameters.containsKey(actualService.getName())) {
+                    Map<String, String> expectedParams = expectedServiceParameters.get(actualService.getName());
+
+                    Map<String, String> actualServiceParams = actualService.getParams();
+                    assertNotNull(actualServiceParams);
+
+                    // Validate the size of the service parameter set
+                    assertEquals(expectedParams.size(), actualServiceParams.size());
+
+                    // Validate the parameter contents
+                    for (String paramName : actualServiceParams.keySet()) {
+                        assertTrue(expectedParams.containsKey(paramName));
+                        assertEquals(expectedParams.get(paramName), actualServiceParams.get(paramName));
+                    }
+                }
+            }
         }
     }
 
@@ -141,6 +289,17 @@ public class SimpleDescriptorFactoryTest {
                            String providerConfig,
                            String clusterName,
                            Map<String, List<String>> services) throws Exception {
+        return writeJSON(path, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services, null);
+    }
+
+    private File writeJSON(String path,
+                           String discoveryType,
+                           String discoveryAddress,
+                           String discoveryUser,
+                           String providerConfig,
+                           String clusterName,
+                           Map<String, List<String>> services,
+                           Map<String, Map<String, String>> serviceParams) throws Exception {
         File f = new File(path);
 
         Writer fw = new FileWriter(f);
@@ -155,9 +314,27 @@ public class SimpleDescriptorFactoryTest {
         int i = 0;
         for (String name : services.keySet()) {
             fw.write("{\"name\":\"" + name + "\"");
+
+            // Service params
+            if (serviceParams != null && !serviceParams.isEmpty()) {
+                Map<String, String> params = serviceParams.get(name);
+                if (params != null && !params.isEmpty()) {
+                    fw.write(",\n\"params\":{\n");
+                    Iterator<String> paramNames = params.keySet().iterator();
+                    while (paramNames.hasNext()) {
+                        String paramName = paramNames.next();
+                        String paramValue = params.get(paramName);
+                        fw.write("\"" + paramName + "\":\"" + paramValue + "\"");
+                        fw.write(paramNames.hasNext() ? ",\n" : "");
+                    }
+                    fw.write("\n}");
+                }
+            }
+
+            // Service URLs
             List<String> urls = services.get(name);
             if (urls != null) {
-                fw.write(", \"urls\":[");
+                fw.write(",\n\"urls\":[");
                 Iterator<String> urlIter = urls.iterator();
                 while (urlIter.hasNext()) {
                     fw.write("\"" + urlIter.next() + "\"");
@@ -165,8 +342,9 @@ public class SimpleDescriptorFactoryTest {
                         fw.write(", ");
                     }
                 }
-                fw.write("]");
+                fw.write("]\n");
             }
+
             fw.write("}");
             if (i++ < services.size() - 1) {
                 fw.write(",");
@@ -181,13 +359,26 @@ public class SimpleDescriptorFactoryTest {
         return f;
     }
 
-    private File writeYAML(String path,
-                           String discoveryType,
-                           String discoveryAddress,
-                           String discoveryUser,
-                           String providerConfig,
-                           String clusterName,
+
+    private File writeYAML(String                    path,
+                           String                    discoveryType,
+                           String                    discoveryAddress,
+                           String                    discoveryUser,
+                           String                    providerConfig,
+                           String                    clusterName,
                            Map<String, List<String>> services) throws Exception {
+        return writeYAML(path, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services, null);
+    }
+
+
+    private File writeYAML(String                           path,
+                           String                           discoveryType,
+                           String                           discoveryAddress,
+                           String                           discoveryUser,
+                           String                           providerConfig,
+                           String                           clusterName,
+                           Map<String, List<String>>        services,
+                           Map<String, Map<String, String>> serviceParams) throws Exception {
         File f = new File(path);
 
         Writer fw = new FileWriter(f);
@@ -200,6 +391,19 @@ public class SimpleDescriptorFactoryTest {
         fw.write("services:\n");
         for (String name : services.keySet()) {
             fw.write("    - name: " + name + "\n");
+
+            // Service params
+            if (serviceParams != null && !serviceParams.isEmpty()) {
+                if (serviceParams.containsKey(name)) {
+                    Map<String, String> params = serviceParams.get(name);
+                    fw.write("      params:\n");
+                    for (String paramName : params.keySet()) {
+                        fw.write("            " + paramName + ": " + params.get(paramName) + "\n");
+                    }
+                }
+            }
+
+            // Service URLs
             List<String> urls = services.get(name);
             if (urls != null) {
                 fw.write("      urls:\n");

http://git-wip-us.apache.org/repos/asf/knox/blob/0e13dc72/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
index f79ef23..a1fda1c 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.gateway.topology.simple;
 
 import java.io.ByteArrayInputStream;
 import java.io.File;
+import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
 
@@ -131,15 +132,37 @@ public class SimpleDescriptorHandlerTest {
     /**
      * KNOX-1006
      *
-     * N.B. This test depends on the DummyServiceDiscovery extension being configured:
-     *             org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
+     * N.B. This test depends on the PropertiesFileServiceDiscovery extension being configured:
+     *             org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscovery
      */
     @Test
     public void testSimpleDescriptorHandler() throws Exception {
 
-        final String type = "DUMMY";
-        final String address = "http://c6401.ambari.apache.org:8080";
+        final String type = "PROPERTIES_FILE";
         final String clusterName = "dummy";
+
+        // Create a properties file to be the source of service discovery details for this test
+        final File discoveryConfig = File.createTempFile(getClass().getName() + "_discovery-config", ".properties");
+
+        final String address = discoveryConfig.getAbsolutePath();
+
+        final Properties DISCOVERY_PROPERTIES = new Properties();
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".name", clusterName);
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".NAMENODE", "hdfs://namenodehost:8020");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".JOBTRACKER", "rpc://jobtrackerhostname:8050");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHDFS", "http://webhdfshost:1234");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHCAT", "http://webhcathost:50111/templeton");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".OOZIE", "http://ooziehost:11000/oozie");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHBASE", "http://webhbasehost:1234");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".HIVE", "http://hivehostname:10001/clipath");
+        DISCOVERY_PROPERTIES.setProperty(clusterName + ".RESOURCEMANAGER", "http://remanhost:8088/ws");
+
+        try {
+            DISCOVERY_PROPERTIES.store(new FileOutputStream(discoveryConfig), null);
+        } catch (FileNotFoundException e) {
+            fail(e.getMessage());
+        }
+
         final Map<String, List<String>> serviceURLs = new HashMap<>();
         serviceURLs.put("NAMENODE", null);
         serviceURLs.put("JOBTRACKER", null);
@@ -150,13 +173,21 @@ public class SimpleDescriptorHandlerTest {
         serviceURLs.put("HIVE", null);
         serviceURLs.put("RESOURCEMANAGER", null);
         serviceURLs.put("AMBARIUI", Collections.singletonList("http://c6401.ambari.apache.org:8080"));
+        serviceURLs.put("KNOXSSO", null);
 
         // Write the externalized provider config to a temp file
-        File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", TEST_PROVIDER_CONFIG);
+        File providerConfig = new File(System.getProperty("java.io.tmpdir"), "ambari-cluster-policy.xml");
+        FileUtils.write(providerConfig, TEST_PROVIDER_CONFIG);
 
         File topologyFile = null;
         try {
-            File destDir = (new File(".")).getCanonicalFile();
+            File destDir = new File(System.getProperty("java.io.tmpdir")).getCanonicalFile();
+
+            Map<String, Map<String, String>> serviceParameters = new HashMap<>();
+            Map<String, String> knoxssoParams = new HashMap<>();
+            knoxssoParams.put("knoxsso.cookie.secure.only", "true");
+            knoxssoParams.put("knoxsso.token.ttl", "100000");
+            serviceParameters.put("KNOXSSO", knoxssoParams);
 
             // Mock out the simple descriptor
             SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
@@ -171,6 +202,7 @@ public class SimpleDescriptorHandlerTest {
                 SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
                 EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
                 EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
+                EasyMock.expect(svc.getParams()).andReturn(serviceParameters.get(serviceName)).anyTimes();
                 EasyMock.replay(svc);
                 serviceMocks.add(svc);
             }
@@ -211,28 +243,51 @@ public class SimpleDescriptorHandlerTest {
                         (NodeList) xpath.compile("/topology/service").evaluate(topologyXml, XPathConstants.NODESET);
             for (int serviceNodeIndex=0; serviceNodeIndex < serviceNodes.getLength(); serviceNodeIndex++) {
                 Node serviceNode = serviceNodes.item(serviceNodeIndex);
+
+                // Validate the role
                 Node roleNode = (Node) xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE);
                 assertNotNull(roleNode);
                 String role = roleNode.getNodeValue();
+
+                // Validate the URLs
                 NodeList urlNodes = (NodeList) xpath.compile("url/text()").evaluate(serviceNode, XPathConstants.NODESET);
                 for(int urlNodeIndex = 0 ; urlNodeIndex < urlNodes.getLength(); urlNodeIndex++) {
                     Node urlNode = urlNodes.item(urlNodeIndex);
                     assertNotNull(urlNode);
                     String url = urlNode.getNodeValue();
-                    assertNotNull("Every declared service should have a URL.", url);
-                    if (!topologyServiceURLs.containsKey(role)) {
-                        topologyServiceURLs.put(role, new ArrayList<String>());
+
+                    // If the service should have a URL (some don't require it)
+                    if (serviceURLs.containsKey(role)) {
+                        assertNotNull("Declared service should have a URL.", url);
+                        if (!topologyServiceURLs.containsKey(role)) {
+                            topologyServiceURLs.put(role, new ArrayList<>());
+                        }
+                        topologyServiceURLs.get(role).add(url); // Add it for validation later
+                    }
+                }
+
+                // If params were declared in the descriptor, then validate them in the resulting topology file
+                Map<String, String> params = serviceParameters.get(role);
+                if (params != null) {
+                    NodeList paramNodes = (NodeList) xpath.compile("param").evaluate(serviceNode, XPathConstants.NODESET);
+                    for (int paramNodeIndex = 0; paramNodeIndex < paramNodes.getLength(); paramNodeIndex++) {
+                        Node paramNode = paramNodes.item(paramNodeIndex);
+                        String paramName = (String) xpath.compile("name/text()").evaluate(paramNode, XPathConstants.STRING);
+                        String paramValue = (String) xpath.compile("value/text()").evaluate(paramNode, XPathConstants.STRING);
+                        assertTrue(params.keySet().contains(paramName));
+                        assertEquals(params.get(paramName), paramValue);
                     }
-                    topologyServiceURLs.get(role).add(url);
                 }
+
             }
-            assertEquals("Unexpected number of service declarations.", serviceURLs.size(), topologyServiceURLs.size());
+            assertEquals("Unexpected number of service declarations.", (serviceURLs.size() - 1), topologyServiceURLs.size());
 
         } catch (Exception e) {
             e.printStackTrace();
             fail(e.getMessage());
         } finally {
             providerConfig.delete();
+            discoveryConfig.delete();
             if (topologyFile != null) {
                 topologyFile.delete();
             }
@@ -358,7 +413,7 @@ public class SimpleDescriptorHandlerTest {
                     String url = urlNode.getNodeValue();
                     assertNotNull("Every declared service should have a URL.", url);
                     if (!topologyServiceURLs.containsKey(role)) {
-                        topologyServiceURLs.put(role, new ArrayList<String>());
+                        topologyServiceURLs.put(role, new ArrayList<>());
                     }
                     topologyServiceURLs.get(role).add(url);
                 }


[20/37] knox git commit: KNOX-1069 - KnoxSSO token audience config should trim values

Posted by lm...@apache.org.
KNOX-1069 - KnoxSSO token audience config should trim values


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/5de920bd
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/5de920bd
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/5de920bd

Branch: refs/heads/KNOX-1049
Commit: 5de920bd092d2822a32aa546d01bb8e64de3a5a9
Parents: 90f1df7
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Wed Oct 4 11:00:40 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Wed Oct 4 11:00:40 2017 +0100

----------------------------------------------------------------------
 .../jwt/filter/AbstractJWTFilter.java           |  2 +-
 .../federation/AbstractJWTFilterTest.java       | 31 +++++++++++
 .../gateway/service/knoxsso/WebSSOResource.java |  2 +-
 .../service/knoxsso/WebSSOResourceTest.java     | 58 ++++++++++++++++++++
 .../service/knoxtoken/TokenResource.java        |  2 +-
 .../knoxtoken/TokenServiceResourceTest.java     | 58 ++++++++++++++++++++
 6 files changed, 150 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/5de920bd/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
index d4c6717..7f8e733 100644
--- a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
+++ b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
@@ -118,7 +118,7 @@ public abstract class AbstractJWTFilter implements Filter {
       String[] audArray = expectedAudiences.split(",");
       audList = new ArrayList<String>();
       for (String a : audArray) {
-        audList.add(a);
+        audList.add(a.trim());
       }
     }
     return audList;

http://git-wip-us.apache.org/repos/asf/knox/blob/5de920bd/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
index bdde3e6..bd34c04 100644
--- a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
+++ b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
@@ -203,6 +203,37 @@ public abstract class AbstractJWTFilterTest  {
   }
 
   @Test
+  public void testValidAudienceJWTWhitespace() throws Exception {
+    try {
+      Properties props = getProperties();
+      props.put(getAudienceProperty(), " foo, bar ");
+      handler.init(new TestFilterConfig(props));
+
+      SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 5000), privateKey, props);
+
+      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+      setTokenOnRequest(request, jwt);
+
+      EasyMock.expect(request.getRequestURL()).andReturn(
+          new StringBuffer(SERVICE_URL)).anyTimes();
+      EasyMock.expect(request.getQueryString()).andReturn(null);
+      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+          SERVICE_URL);
+      EasyMock.replay(request);
+
+      TestFilterChain chain = new TestFilterChain();
+      handler.doFilter(request, response, chain);
+      Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled );
+      Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
+      Assert.assertTrue("No PrimaryPrincipal", !principals.isEmpty());
+      Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
+    } catch (ServletException se) {
+      fail("Should NOT have thrown a ServletException.");
+    }
+  }
+
+  @Test
   public void testValidVerificationPEM() throws Exception {
     try {
       Properties props = getProperties();

http://git-wip-us.apache.org/repos/asf/knox/blob/5de920bd/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java b/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
index 0d9e6dd..70228d3 100644
--- a/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
+++ b/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
@@ -127,7 +127,7 @@ public class WebSSOResource {
     if (audiences != null) {
       String[] auds = audiences.split(",");
       for (int i = 0; i < auds.length; i++) {
-        targetAudiences.add(auds[i]);
+        targetAudiences.add(auds[i].trim());
       }
     }
 

http://git-wip-us.apache.org/repos/asf/knox/blob/5de920bd/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java b/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
index 4e9e76b..568f0fe 100644
--- a/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
+++ b/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
@@ -229,6 +229,64 @@ public class WebSSOResourceTest {
     assertTrue(audiences.contains("recipient2"));
   }
 
+  @Test
+  public void testAudiencesWhitespace() throws Exception {
+
+    ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.name")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.secure.only")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.max.age")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.domain.suffix")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.redirect.whitelist.regex")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.token.audiences")).andReturn(" recipient1, recipient2 ");
+    EasyMock.expect(context.getInitParameter("knoxsso.token.ttl")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.enable.session")).andReturn(null);
+
+    HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+    EasyMock.expect(request.getParameter("originalUrl")).andReturn("http://localhost:9080/service");
+    EasyMock.expect(request.getParameterMap()).andReturn(Collections.<String,String[]>emptyMap());
+    EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+    EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+    GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+    EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+    JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+    EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+    HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+    ServletOutputStream outputStream = EasyMock.createNiceMock(ServletOutputStream.class);
+    CookieResponseWrapper responseWrapper = new CookieResponseWrapper(response, outputStream);
+
+    EasyMock.replay(principal, services, context, request);
+
+    WebSSOResource webSSOResponse = new WebSSOResource();
+    webSSOResponse.request = request;
+    webSSOResponse.response = responseWrapper;
+    webSSOResponse.context = context;
+    webSSOResponse.init();
+
+    // Issue a token
+    webSSOResponse.doGet();
+
+    // Check the cookie
+    Cookie cookie = responseWrapper.getCookie("hadoop-jwt");
+    assertNotNull(cookie);
+
+    JWTToken parsedToken = new JWTToken(cookie.getValue());
+    assertEquals("alice", parsedToken.getSubject());
+    assertTrue(authority.verifyToken(parsedToken));
+
+    // Verify the audiences
+    List<String> audiences = Arrays.asList(parsedToken.getAudienceClaims());
+    assertEquals(2, audiences.size());
+    assertTrue(audiences.contains("recipient1"));
+    assertTrue(audiences.contains("recipient2"));
+  }
+
   /**
    * A wrapper for HttpServletResponseWrapper to store the cookies
    */

http://git-wip-us.apache.org/repos/asf/knox/blob/5de920bd/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java b/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
index 9d8bae3..8dddf02 100644
--- a/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
+++ b/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
@@ -82,7 +82,7 @@ public class TokenResource {
     if (audiences != null) {
       String[] auds = audiences.split(",");
       for (int i = 0; i < auds.length; i++) {
-        targetAudiences.add(auds[i]);
+        targetAudiences.add(auds[i].trim());
       }
     }
 

http://git-wip-us.apache.org/repos/asf/knox/blob/5de920bd/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java b/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
index b4e51e6..0046bd9 100644
--- a/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
+++ b/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
@@ -206,6 +206,64 @@ public class TokenServiceResourceTest {
   }
 
   @Test
+  public void testAudiencesWhitespace() throws Exception {
+
+    ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+    EasyMock.expect(context.getInitParameter("knox.token.audiences")).andReturn(" recipient1, recipient2 ");
+    EasyMock.expect(context.getInitParameter("knox.token.ttl")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knox.token.target.url")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knox.token.client.data")).andReturn(null);
+
+    HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+    EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+    EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+    GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+    EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+    JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+    EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+    StringWriter writer = new StringWriter();
+    PrintWriter printWriter = new PrintWriter(writer);
+    HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+    EasyMock.expect(response.getWriter()).andReturn(printWriter);
+
+    EasyMock.replay(principal, services, context, request, response);
+
+    TokenResource tr = new TokenResource();
+    tr.request = request;
+    tr.response = response;
+    tr.context = context;
+    tr.init();
+
+    // Issue a token
+    Response retResponse = tr.doGet();
+
+    assertEquals(200, retResponse.getStatus());
+
+    // Parse the response
+    String retString = writer.toString();
+    String accessToken = getTagValue(retString, "access_token");
+    assertNotNull(accessToken);
+    String expiry = getTagValue(retString, "expires_in");
+    assertNotNull(expiry);
+
+    // Verify the token
+    JWTToken parsedToken = new JWTToken(accessToken);
+    assertEquals("alice", parsedToken.getSubject());
+    assertTrue(authority.verifyToken(parsedToken));
+
+    // Verify the audiences
+    List<String> audiences = Arrays.asList(parsedToken.getAudienceClaims());
+    assertEquals(2, audiences.size());
+    assertTrue(audiences.contains("recipient1"));
+    assertTrue(audiences.contains("recipient2"));
+  }
+
+  @Test
   public void testValidClientCert() throws Exception {
 
     ServletContext context = EasyMock.createNiceMock(ServletContext.class);


[07/37] knox git commit: KNOX-1061 - KnoxSSO Redirects with Query Params in the OriginalUrl Broken

Posted by lm...@apache.org.
KNOX-1061 - KnoxSSO Redirects with Query Params in the OriginalUrl Broken

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/3a0119b2
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/3a0119b2
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/3a0119b2

Branch: refs/heads/KNOX-1049
Commit: 3a0119b217bb71d107b335c27abac77847b2bfe4
Parents: 5432c87
Author: Larry McCay <lm...@hortonworks.com>
Authored: Fri Sep 22 17:30:13 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Fri Sep 22 17:30:13 2017 -0400

----------------------------------------------------------------------
 ...entityAsserterHttpServletRequestWrapper.java | 25 +++++++++++++-------
 .../apache/hadoop/gateway/util/HttpUtils.java   |  7 +++---
 2 files changed, 20 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/3a0119b2/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java
----------------------------------------------------------------------
diff --git a/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java
index 961fef7..dfce6cd 100644
--- a/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java
+++ b/gateway-provider-identity-assertion-common/src/main/java/org/apache/hadoop/gateway/identityasserter/common/filter/IdentityAsserterHttpServletRequestWrapper.java
@@ -71,21 +71,30 @@ private static SpiGatewayMessages log = MessagesFactory.get( SpiGatewayMessages.
     return super.getParameter(name);
   }
   
-  @SuppressWarnings("rawtypes")
   @Override
-  public Map getParameterMap() {
-    Map map = null;
+  public Map<String, String[]> getParameterMap() {
+    Map<String, String[]> map = null;
     try {
-      map = getParams();
+      map = convertValuesToStringArrays(getParams());
     } catch (UnsupportedEncodingException e) {
       log.unableToGetParamsFromQueryString(e);
     }
     return map;
   }
 
-  @SuppressWarnings({ "unchecked", "rawtypes" })
+  private Map<String, String[]> convertValuesToStringArrays(Map<String, List<String>> params) {
+    Map<String, String[]> arrayMap = new HashMap<String, String[]>();
+    String name = null;
+    Enumeration<String> names = getParameterNames();
+    while (names.hasMoreElements()) {
+      name = (String) names.nextElement();
+      arrayMap.put(name, getParameterValues(name));
+    }
+    return arrayMap;
+  }
+
   @Override
-  public Enumeration getParameterNames() {
+  public Enumeration<String> getParameterNames() {
     Enumeration<String> e = null;
     Map<String, List<String>> params;
     try {
@@ -103,14 +112,14 @@ private static SpiGatewayMessages log = MessagesFactory.get( SpiGatewayMessages.
 
   @Override
   public String[] getParameterValues(String name) {
-    String[] p = null;
+    String[] p = {};
     Map<String, List<String>> params;
     try {
       params = getParams();
       if (params == null) {
         params = new HashMap<>();
       }
-      p = (String[]) params.get(name).toArray();
+      p = (String[]) params.get(name).toArray(p);
     } catch (UnsupportedEncodingException e) {
       log.unableToGetParamsFromQueryString(e);
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/3a0119b2/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/HttpUtils.java
----------------------------------------------------------------------
diff --git a/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/HttpUtils.java b/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/HttpUtils.java
index 7f5e96a..2b2d0eb 100644
--- a/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/HttpUtils.java
+++ b/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/HttpUtils.java
@@ -20,10 +20,9 @@ package org.apache.hadoop.gateway.util;
 import java.io.UnsupportedEncodingException;
 import java.net.URL;
 import java.net.URLDecoder;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.StringTokenizer;
@@ -32,7 +31,7 @@ public class HttpUtils {
 
   public static Map<String, List<String>> splitQuery(String queryString)
       throws UnsupportedEncodingException {
-    final Map<String, List<String>> queryPairs = new LinkedHashMap<String, List<String>>();
+    final Map<String, List<String>> queryPairs = new HashMap<String, List<String>>();
     if (queryString == null || queryString.trim().isEmpty()) {
       return queryPairs;
     }
@@ -41,7 +40,7 @@ public class HttpUtils {
       final int idx = pair.indexOf("=");
       final String key = idx > 0 ? URLDecoder.decode(pair.substring(0, idx), "UTF-8") : pair;
       if (!queryPairs.containsKey(key)) {
-        queryPairs.put(key, new LinkedList<String>());
+        queryPairs.put(key, new ArrayList<String>());
       }
       final String value = idx > 0 && pair.length() > idx + 1 
           ? URLDecoder.decode(pair.substring(idx + 1), "UTF-8") : "";


[12/37] knox git commit: KNOX-1065 - Add some unit tests for the DefaultTokenAuthorityService

Posted by lm...@apache.org.
KNOX-1065 - Add some unit tests for the DefaultTokenAuthorityService


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/78ef4e50
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/78ef4e50
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/78ef4e50

Branch: refs/heads/KNOX-1049
Commit: 78ef4e50ba6f7522aa363d5d1df8f8d04320003e
Parents: c2ca443
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Mon Sep 25 11:58:24 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Mon Sep 25 11:58:24 2017 +0100

----------------------------------------------------------------------
 .../impl/DefaultTokenAuthorityServiceTest.java  | 160 +++++++++++++++++++
 .../resources/keystores/server-keystore.jks     | Bin 0 -> 1387 bytes
 2 files changed, 160 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/78ef4e50/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
new file mode 100644
index 0000000..7cc9971
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
@@ -0,0 +1,160 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.services.token.impl;
+
+import java.io.File;
+import java.security.Principal;
+import java.util.HashMap;
+
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.services.security.KeystoreService;
+import org.apache.hadoop.gateway.services.security.MasterService;
+import org.apache.hadoop.gateway.services.security.impl.DefaultKeystoreService;
+import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
+import org.apache.hadoop.gateway.services.security.token.impl.JWT;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+/**
+ * Some unit tests for the DefaultTokenAuthorityService.
+ */
+public class DefaultTokenAuthorityServiceTest extends org.junit.Assert {
+
+  @Test
+  public void testTokenCreation() throws Exception {
+
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
+
+    GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+    String basedir = System.getProperty("basedir");
+    if (basedir == null) {
+      basedir = new File(".").getCanonicalPath();
+    }
+
+    EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
+    EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
+    EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
+
+    MasterService ms = EasyMock.createNiceMock(MasterService.class);
+    EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
+
+    AliasService as = EasyMock.createNiceMock(AliasService.class);
+    EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
+
+    EasyMock.replay(principal, config, ms, as);
+
+    KeystoreService ks = new DefaultKeystoreService();
+    ((DefaultKeystoreService)ks).setMasterService(ms);
+
+    ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
+
+    JWTokenAuthority ta = new DefaultTokenAuthorityService();
+    ((DefaultTokenAuthorityService)ta).setAliasService(as);
+    ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
+
+    ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
+
+    JWT token = ta.issueToken(principal, "RS256");
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+  }
+
+  @Test
+  public void testTokenCreationAudience() throws Exception {
+
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
+
+    GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+    String basedir = System.getProperty("basedir");
+    if (basedir == null) {
+      basedir = new File(".").getCanonicalPath();
+    }
+
+    EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
+    EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
+    EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
+
+    MasterService ms = EasyMock.createNiceMock(MasterService.class);
+    EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
+
+    AliasService as = EasyMock.createNiceMock(AliasService.class);
+    EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
+
+    EasyMock.replay(principal, config, ms, as);
+
+    KeystoreService ks = new DefaultKeystoreService();
+    ((DefaultKeystoreService)ks).setMasterService(ms);
+
+    ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
+
+    JWTokenAuthority ta = new DefaultTokenAuthorityService();
+    ((DefaultTokenAuthorityService)ta).setAliasService(as);
+    ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
+
+    ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
+
+    JWT token = ta.issueToken(principal, "https://login.example.com", "RS256");
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+  }
+
+  @Test
+  public void testTokenCreationNullAudience() throws Exception {
+
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
+
+    GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+    String basedir = System.getProperty("basedir");
+    if (basedir == null) {
+      basedir = new File(".").getCanonicalPath();
+    }
+
+    EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
+    EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
+    EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
+
+    MasterService ms = EasyMock.createNiceMock(MasterService.class);
+    EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
+
+    AliasService as = EasyMock.createNiceMock(AliasService.class);
+    EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
+
+    EasyMock.replay(principal, config, ms, as);
+
+    KeystoreService ks = new DefaultKeystoreService();
+    ((DefaultKeystoreService)ks).setMasterService(ms);
+
+    ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
+
+    JWTokenAuthority ta = new DefaultTokenAuthorityService();
+    ((DefaultTokenAuthorityService)ta).setAliasService(as);
+    ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
+
+    ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
+
+    JWT token = ta.issueToken(principal, null, "RS256");
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/78ef4e50/gateway-server/src/test/resources/keystores/server-keystore.jks
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/keystores/server-keystore.jks b/gateway-server/src/test/resources/keystores/server-keystore.jks
new file mode 100644
index 0000000..570c92c
Binary files /dev/null and b/gateway-server/src/test/resources/keystores/server-keystore.jks differ


[21/37] knox git commit: KNOX-1048 - Knoxshell samples missing hadoop-examples.jar

Posted by lm...@apache.org.
KNOX-1048 - Knoxshell samples missing hadoop-examples.jar


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/ff3af36d
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/ff3af36d
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/ff3af36d

Branch: refs/heads/KNOX-1049
Commit: ff3af36d63609cd0b60400071e9df26ec41e96d3
Parents: 5de920b
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Wed Oct 4 15:47:01 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Wed Oct 4 15:47:01 2017 +0100

----------------------------------------------------------------------
 gateway-shell-release/pom.xml | 4 ++++
 1 file changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/ff3af36d/gateway-shell-release/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-shell-release/pom.xml b/gateway-shell-release/pom.xml
index 3365641..1e6c5f8 100644
--- a/gateway-shell-release/pom.xml
+++ b/gateway-shell-release/pom.xml
@@ -170,6 +170,10 @@
             <artifactId>gateway-shell-samples</artifactId>
         </dependency>
         <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>hadoop-examples</artifactId>
+        </dependency>
+        <dependency>
             <groupId>junit</groupId>
             <artifactId>junit</artifactId>
             <scope>test</scope>


[03/37] knox git commit: KNOX-1036 - Fix a number of issues relating to JWTokenAuthority

Posted by lm...@apache.org.
KNOX-1036 - Fix a number of issues relating to JWTokenAuthority

Signed-off-by: Colm O hEigeartaigh <co...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/c833bf90
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/c833bf90
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/c833bf90

Branch: refs/heads/KNOX-1049
Commit: c833bf907566301e525f514354dcb0325f5e0738
Parents: d3f507f
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Wed Sep 20 11:26:33 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Thu Sep 21 15:42:41 2017 +0100

----------------------------------------------------------------------
 .../filter/JWTAccessTokenAssertionFilter.java   | 23 ++++++------
 .../jwt/filter/JWTAuthCodeAssertionFilter.java  | 16 ++++----
 .../federation/AbstractJWTFilterTest.java       | 19 +++++-----
 .../impl/DefaultTokenAuthorityService.java      | 21 ++++++-----
 .../service/knoxsso/WebSSOResourceTest.java     | 14 +++----
 .../knoxtoken/TokenServiceResourceTest.java     | 14 +++----
 .../security/token/JWTokenAuthority.java        | 19 +++++-----
 .../services/security/token/impl/JWT.java       | 39 +++++++++++---------
 .../services/security/token/impl/JWTToken.java  | 27 +++++++-------
 9 files changed, 97 insertions(+), 95 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAccessTokenAssertionFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAccessTokenAssertionFilter.java b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAccessTokenAssertionFilter.java
index f8d9a02..e2ef32e 100644
--- a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAccessTokenAssertionFilter.java
+++ b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAccessTokenAssertionFilter.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
 import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
 import org.apache.hadoop.gateway.services.security.token.TokenServiceException;
+import org.apache.hadoop.gateway.services.security.token.impl.JWT;
 import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
 import org.apache.hadoop.gateway.util.JsonUtils;
 
@@ -66,12 +67,12 @@ public class JWTAccessTokenAssertionFilter extends AbstractIdentityAssertionFilt
     authority = (JWTokenAuthority) services.getService(GatewayServices.TOKEN_SERVICE);
     sr = (ServiceRegistry) services.getService(GatewayServices.SERVICE_REGISTRY_SERVICE);
   }
-  
+
   @Override
   public void doFilter(ServletRequest request, ServletResponse response,
       FilterChain chain) throws IOException, ServletException {
     String jsonResponse = null;
-    
+
     String header = ((HttpServletRequest) request).getHeader("Authorization");
     if (header != null && header.startsWith(BEARER)) {
       // what follows the bearer designator should be the JWT token being used to request or as an access token
@@ -94,7 +95,7 @@ public class JWTAccessTokenAssertionFilter extends AbstractIdentityAssertionFilt
       else {
         throw new ServletException("Expected JWT Token not provided as Bearer token");
       }
-      
+
       // authorization of the user for the requested service (and resource?) should have been done by
       // the JWTFederationFilter - once we get here we can assume that it is authorized and we just need
       // to assert the identity via an access token
@@ -102,27 +103,27 @@ public class JWTAccessTokenAssertionFilter extends AbstractIdentityAssertionFilt
       Subject subject = Subject.getSubject(AccessController.getContext());
       String principalName = getPrincipalName(subject);
       principalName = mapper.mapUserPrincipal(principalName);
-      
+
       // calculate expiration timestamp: validity * 1000 + currentTimeInMillis
       long expires = System.currentTimeMillis() + validity * 1000;
-      
+
       String serviceName = request.getParameter("service-name");
       String clusterName = request.getParameter("cluster-name");
       String accessToken = getAccessToken(principalName, serviceName, expires);
-      
+
       String serviceURL = sr.lookupServiceURL(clusterName, serviceName);
-      
+
       HashMap<String, Object> map = new HashMap<>();
       // TODO: populate map from JWT authorization code
       map.put(ACCESS_TOKEN, accessToken);
       map.put(TOKEN_TYPE, BEARER);
       map.put(EXPIRES_IN, expires);
-      
+
       // TODO: this url needs to be rewritten when in gateway deployments....
       map.put(SVC_URL, serviceURL);
-      
+
       jsonResponse = JsonUtils.renderAsJsonString(map);
-      
+
       response.getWriter().write(jsonResponse);
       //KNOX-685: response.getWriter().flush();
       return; // break filter chain
@@ -147,7 +148,7 @@ public class JWTAccessTokenAssertionFilter extends AbstractIdentityAssertionFilt
         return principalName;
       }
     };
-    JWTToken token = null;
+    JWT token = null;
     try {
       token = authority.issueToken(p, serviceName, "RS256", expires);
       // Coverity CID 1327961

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAuthCodeAssertionFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAuthCodeAssertionFilter.java b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAuthCodeAssertionFilter.java
index 07cdf62..74b154f 100644
--- a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAuthCodeAssertionFilter.java
+++ b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTAuthCodeAssertionFilter.java
@@ -33,12 +33,12 @@ import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
 import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
 import org.apache.hadoop.gateway.services.security.token.TokenServiceException;
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
+import org.apache.hadoop.gateway.services.security.token.impl.JWT;
 import org.apache.hadoop.gateway.util.JsonUtils;
 
 public class JWTAuthCodeAssertionFilter extends AbstractIdentityAssertionFilter {
   private static final String BEARER = "Bearer ";
-  
+
   private JWTokenAuthority authority = null;
 
   private ServiceRegistry sr;
@@ -56,7 +56,7 @@ public class JWTAuthCodeAssertionFilter extends AbstractIdentityAssertionFilter
     authority = (JWTokenAuthority) services.getService(GatewayServices.TOKEN_SERVICE);
     sr = (ServiceRegistry) services.getService(GatewayServices.SERVICE_REGISTRY_SERVICE);
   }
-  
+
   @Override
   public void doFilter(ServletRequest request, ServletResponse response,
       FilterChain chain) throws IOException, ServletException {
@@ -64,15 +64,15 @@ public class JWTAuthCodeAssertionFilter extends AbstractIdentityAssertionFilter
       Subject subject = Subject.getSubject(AccessController.getContext());
       String principalName = getPrincipalName(subject);
       principalName = mapper.mapUserPrincipal(principalName);
-      JWTToken authCode;
+      JWT authCode;
       try {
         authCode = authority.issueToken(subject, "RS256");
         // get the url for the token service
-        String url = null; 
+        String url = null;
         if (sr != null) {
           url = sr.lookupServiceURL("token", "TGS");
         }
-        
+
         HashMap<String, Object> map = new HashMap<>();
         // TODO: populate map from JWT authorization code
         // Coverity CID 1327960
@@ -86,9 +86,9 @@ public class JWTAuthCodeAssertionFilter extends AbstractIdentityAssertionFilter
         if (url != null) {
           map.put("tke", url);
         }
-        
+
         String jsonResponse = JsonUtils.renderAsJsonString(map);
-        
+
         response.getWriter().write(jsonResponse);
         //KNOX-685: response.getWriter().flush();
       } catch (TokenServiceException e) {

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
index d477f1f..bdde3e6 100644
--- a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
+++ b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
@@ -56,7 +56,6 @@ import org.apache.hadoop.gateway.services.security.impl.X509CertificateUtil;
 import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
 import org.apache.hadoop.gateway.services.security.token.TokenServiceException;
 import org.apache.hadoop.gateway.services.security.token.impl.JWT;
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
 import org.easymock.EasyMock;
 import org.junit.After;
 import org.junit.Assert;
@@ -550,7 +549,7 @@ public abstract class AbstractJWTFilterTest  {
      * @see org.apache.hadoop.gateway.services.security.token.JWTokenAuthority#issueToken(javax.security.auth.Subject, java.lang.String)
      */
     @Override
-    public JWTToken issueToken(Subject subject, String algorithm)
+    public JWT issueToken(Subject subject, String algorithm)
         throws TokenServiceException {
       // TODO Auto-generated method stub
       return null;
@@ -560,7 +559,7 @@ public abstract class AbstractJWTFilterTest  {
      * @see org.apache.hadoop.gateway.services.security.token.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String)
      */
     @Override
-    public JWTToken issueToken(Principal p, String algorithm)
+    public JWT issueToken(Principal p, String algorithm)
         throws TokenServiceException {
       // TODO Auto-generated method stub
       return null;
@@ -570,16 +569,16 @@ public abstract class AbstractJWTFilterTest  {
      * @see org.apache.hadoop.gateway.services.security.token.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, java.lang.String)
      */
     @Override
-    public JWTToken issueToken(Principal p, String audience, String algorithm)
+    public JWT issueToken(Principal p, String audience, String algorithm)
         throws TokenServiceException {
       return null;
     }
 
     /* (non-Javadoc)
-     * @see org.apache.hadoop.gateway.services.security.token.JWTokenAuthority#verifyToken(org.apache.hadoop.gateway.services.security.token.impl.JWTToken)
+     * @see org.apache.hadoop.gateway.services.security.token.JWTokenAuthority#verifyToken(org.apache.hadoop.gateway.services.security.token.impl.JWT)
      */
     @Override
-    public boolean verifyToken(JWTToken token) throws TokenServiceException {
+    public boolean verifyToken(JWT token) throws TokenServiceException {
       JWSVerifier verifier = new RSASSAVerifier((RSAPublicKey) verifyingKey);
       return token.verify(verifier);
     }
@@ -588,13 +587,13 @@ public abstract class AbstractJWTFilterTest  {
      * @see org.apache.hadoop.gateway.services.security.token.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, java.lang.String, long)
      */
     @Override
-    public JWTToken issueToken(Principal p, String audience, String algorithm,
+    public JWT issueToken(Principal p, String audience, String algorithm,
         long expires) throws TokenServiceException {
       return null;
     }
 
     @Override
-    public JWTToken issueToken(Principal p, List<String> audiences, String algorithm,
+    public JWT issueToken(Principal p, List<String> audiences, String algorithm,
         long expires) throws TokenServiceException {
       return null;
     }
@@ -603,14 +602,14 @@ public abstract class AbstractJWTFilterTest  {
      * @see org.apache.hadoop.gateway.services.security.token.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, long)
      */
     @Override
-    public JWT issueToken(Principal p, String audience, long l)
+    public JWT issueToken(Principal p, String algorithm, long expires)
         throws TokenServiceException {
       // TODO Auto-generated method stub
       return null;
     }
 
     @Override
-    public boolean verifyToken(JWTToken token, RSAPublicKey publicKey) throws TokenServiceException {
+    public boolean verifyToken(JWT token, RSAPublicKey publicKey) throws TokenServiceException {
       JWSVerifier verifier = new RSASSAVerifier(publicKey);
       return token.verify(verifier);
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-server/src/main/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityService.java
index fc0a266..33b86bd 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityService.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.gateway.services.security.KeystoreService;
 import org.apache.hadoop.gateway.services.security.KeystoreServiceException;
 import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
 import org.apache.hadoop.gateway.services.security.token.TokenServiceException;
+import org.apache.hadoop.gateway.services.security.token.impl.JWT;
 import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
 
 import com.nimbusds.jose.JWSSigner;
@@ -63,28 +64,28 @@ public class DefaultTokenAuthorityService implements JWTokenAuthority, Service {
    * @see org.apache.hadoop.gateway.provider.federation.jwt.JWTokenAuthority#issueToken(javax.security.auth.Subject, java.lang.String)
    */
   @Override
-  public JWTToken issueToken(Subject subject, String algorithm) throws TokenServiceException {
+  public JWT issueToken(Subject subject, String algorithm) throws TokenServiceException {
     Principal p = (Principal) subject.getPrincipals().toArray()[0];
     return issueToken(p, algorithm);
   }
-  
+
   /* (non-Javadoc)
    * @see org.apache.hadoop.gateway.provider.federation.jwt.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String)
    */
   @Override
-  public JWTToken issueToken(Principal p, String algorithm) throws TokenServiceException {
+  public JWT issueToken(Principal p, String algorithm) throws TokenServiceException {
     return issueToken(p, null, algorithm);
   }
-  
+
   /* (non-Javadoc)
    * @see org.apache.hadoop.gateway.provider.federation.jwt.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, long expires)
    */
   @Override
-  public JWTToken issueToken(Principal p, String algorithm, long expires) throws TokenServiceException {
+  public JWT issueToken(Principal p, String algorithm, long expires) throws TokenServiceException {
     return issueToken(p, (String)null, algorithm, expires);
   }
 
-  public JWTToken issueToken(Principal p, String audience, String algorithm)
+  public JWT issueToken(Principal p, String audience, String algorithm)
       throws TokenServiceException {
     return issueToken(p, audience, algorithm, -1);
   }
@@ -93,7 +94,7 @@ public class DefaultTokenAuthorityService implements JWTokenAuthority, Service {
    * @see org.apache.hadoop.gateway.provider.federation.jwt.JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, java.lang.String)
    */
   @Override
-  public JWTToken issueToken(Principal p, String audience, String algorithm, long expires)
+  public JWT issueToken(Principal p, String audience, String algorithm, long expires)
       throws TokenServiceException {
     ArrayList<String> audiences = null;
     if (audience != null) {
@@ -104,7 +105,7 @@ public class DefaultTokenAuthorityService implements JWTokenAuthority, Service {
   }
 
   @Override
-  public JWTToken issueToken(Principal p, List<String> audiences, String algorithm, long expires)
+  public JWT issueToken(Principal p, List<String> audiences, String algorithm, long expires)
       throws TokenServiceException {
     String[] claimArray = new String[4];
     claimArray[0] = "KNOXSSO";
@@ -159,13 +160,13 @@ public class DefaultTokenAuthorityService implements JWTokenAuthority, Service {
   }
 
   @Override
-  public boolean verifyToken(JWTToken token)
+  public boolean verifyToken(JWT token)
       throws TokenServiceException {
     return verifyToken(token, null);
   }
 
   @Override
-  public boolean verifyToken(JWTToken token, RSAPublicKey publicKey)
+  public boolean verifyToken(JWT token, RSAPublicKey publicKey)
       throws TokenServiceException {
     boolean rc = false;
     PublicKey key;

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java b/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
index c953c91..4e9e76b 100644
--- a/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
+++ b/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
@@ -274,32 +274,32 @@ public class WebSSOResourceTest {
     }
 
     @Override
-    public JWTToken issueToken(Subject subject, String algorithm)
+    public JWT issueToken(Subject subject, String algorithm)
       throws TokenServiceException {
       Principal p = (Principal) subject.getPrincipals().toArray()[0];
       return issueToken(p, algorithm);
     }
 
     @Override
-    public JWTToken issueToken(Principal p, String algorithm)
+    public JWT issueToken(Principal p, String algorithm)
       throws TokenServiceException {
       return issueToken(p, null, algorithm);
     }
 
     @Override
-    public JWTToken issueToken(Principal p, String audience, String algorithm)
+    public JWT issueToken(Principal p, String audience, String algorithm)
       throws TokenServiceException {
       return issueToken(p, audience, algorithm, -1);
     }
 
     @Override
-    public boolean verifyToken(JWTToken token) throws TokenServiceException {
+    public boolean verifyToken(JWT token) throws TokenServiceException {
       JWSVerifier verifier = new RSASSAVerifier(publicKey);
       return token.verify(verifier);
     }
 
     @Override
-    public JWTToken issueToken(Principal p, String audience, String algorithm,
+    public JWT issueToken(Principal p, String audience, String algorithm,
                                long expires) throws TokenServiceException {
       List<String> audiences = null;
       if (audience != null) {
@@ -310,7 +310,7 @@ public class WebSSOResourceTest {
     }
 
     @Override
-    public JWTToken issueToken(Principal p, List<String> audiences, String algorithm,
+    public JWT issueToken(Principal p, List<String> audiences, String algorithm,
                                long expires) throws TokenServiceException {
       String[] claimArray = new String[4];
       claimArray[0] = "KNOXSSO";
@@ -341,7 +341,7 @@ public class WebSSOResourceTest {
     }
 
     @Override
-    public boolean verifyToken(JWTToken token, RSAPublicKey publicKey) throws TokenServiceException {
+    public boolean verifyToken(JWT token, RSAPublicKey publicKey) throws TokenServiceException {
       JWSVerifier verifier = new RSASSAVerifier(publicKey);
       return token.verify(verifier);
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java b/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
index 9faa073..bddd13d 100644
--- a/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
+++ b/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
@@ -229,32 +229,32 @@ public class TokenServiceResourceTest {
     }
 
     @Override
-    public JWTToken issueToken(Subject subject, String algorithm)
+    public JWT issueToken(Subject subject, String algorithm)
       throws TokenServiceException {
       Principal p = (Principal) subject.getPrincipals().toArray()[0];
       return issueToken(p, algorithm);
     }
 
     @Override
-    public JWTToken issueToken(Principal p, String algorithm)
+    public JWT issueToken(Principal p, String algorithm)
       throws TokenServiceException {
       return issueToken(p, null, algorithm);
     }
 
     @Override
-    public JWTToken issueToken(Principal p, String audience, String algorithm)
+    public JWT issueToken(Principal p, String audience, String algorithm)
       throws TokenServiceException {
       return issueToken(p, audience, algorithm, -1);
     }
 
     @Override
-    public boolean verifyToken(JWTToken token) throws TokenServiceException {
+    public boolean verifyToken(JWT token) throws TokenServiceException {
       JWSVerifier verifier = new RSASSAVerifier(publicKey);
       return token.verify(verifier);
     }
 
     @Override
-    public JWTToken issueToken(Principal p, String audience, String algorithm,
+    public JWT issueToken(Principal p, String audience, String algorithm,
                                long expires) throws TokenServiceException {
       ArrayList<String> audiences = null;
       if (audience != null) {
@@ -265,7 +265,7 @@ public class TokenServiceResourceTest {
     }
 
     @Override
-    public JWTToken issueToken(Principal p, List<String> audiences, String algorithm,
+    public JWT issueToken(Principal p, List<String> audiences, String algorithm,
                                long expires) throws TokenServiceException {
       String[] claimArray = new String[4];
       claimArray[0] = "KNOXSSO";
@@ -296,7 +296,7 @@ public class TokenServiceResourceTest {
     }
 
     @Override
-    public boolean verifyToken(JWTToken token, RSAPublicKey publicKey) throws TokenServiceException {
+    public boolean verifyToken(JWT token, RSAPublicKey publicKey) throws TokenServiceException {
       JWSVerifier verifier = new RSASSAVerifier(publicKey);
       return token.verify(verifier);
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/JWTokenAuthority.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/JWTokenAuthority.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/JWTokenAuthority.java
index 9cb82ec..155b239 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/JWTokenAuthority.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/JWTokenAuthority.java
@@ -24,29 +24,28 @@ import java.util.List;
 import javax.security.auth.Subject;
 
 import org.apache.hadoop.gateway.services.security.token.impl.JWT;
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
 
 public interface JWTokenAuthority {
 
-  JWTToken issueToken(Subject subject, String algorithm)
+  JWT issueToken(Subject subject, String algorithm)
       throws TokenServiceException;
 
-  JWTToken issueToken(Principal p, String algorithm)
+  JWT issueToken(Principal p, String algorithm)
       throws TokenServiceException;
 
-  JWTToken issueToken(Principal p, String audience,
+  JWT issueToken(Principal p, String audience,
       String algorithm) throws TokenServiceException;
 
-  boolean verifyToken(JWTToken token) throws TokenServiceException;
+  boolean verifyToken(JWT token) throws TokenServiceException;
 
-  boolean verifyToken(JWTToken token, RSAPublicKey publicKey)
+  boolean verifyToken(JWT token, RSAPublicKey publicKey)
       throws TokenServiceException;
 
-  JWTToken issueToken(Principal p, String audience, String algorithm,
-      long expires) throws TokenServiceException;
+  JWT issueToken(Principal p, String algorithm, long expires) throws TokenServiceException;
 
-  JWT issueToken(Principal p, String audience, long l) throws TokenServiceException;
+  JWT issueToken(Principal p, String audience, String algorithm,
+      long expires) throws TokenServiceException;
 
-  JWTToken issueToken(Principal p, List<String> audience, String algorithm,
+  JWT issueToken(Principal p, List<String> audience, String algorithm,
       long expires) throws TokenServiceException;
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWT.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWT.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWT.java
index b834649..1a6f4f9 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWT.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWT.java
@@ -20,41 +20,44 @@ package org.apache.hadoop.gateway.services.security.token.impl;
 import java.util.Date;
 
 import com.nimbusds.jose.JWSSigner;
+import com.nimbusds.jose.JWSVerifier;
 
 public interface JWT {
 
-  public static final String PRINCIPAL = "prn";
-  public static final String SUBJECT = "sub";
-  public static final String ISSUER = "iss";
-  public static final String AUDIENCE = "aud";
-  public static final String EXPIRES = "exp";
+  String PRINCIPAL = "prn";
+  String SUBJECT = "sub";
+  String ISSUER = "iss";
+  String AUDIENCE = "aud";
+  String EXPIRES = "exp";
 
-  public abstract String getPayload();
+  String getPayload();
 
-  public abstract void setSignaturePayload(byte[] payload);
+  void setSignaturePayload(byte[] payload);
 
-  public abstract byte[] getSignaturePayload();
+  byte[] getSignaturePayload();
 
-  public abstract String getClaim(String claimName);
+  String getClaim(String claimName);
 
-  public abstract String getPrincipal();
+  String getPrincipal();
 
-  public abstract String getIssuer();
+  String getIssuer();
 
-  public abstract String getAudience();
+  String getAudience();
 
   public String[] getAudienceClaims();
 
-  public abstract String getExpires();
+  String getExpires();
 
-  public abstract Date getExpiresDate();
+  Date getExpiresDate();
 
-  public abstract String getSubject();
+  String getSubject();
 
-  public abstract String getHeader();
+  String getHeader();
 
-  public abstract String getClaims();
+  String getClaims();
 
-  public abstract void sign(JWSSigner signer);
+  void sign(JWSSigner signer);
+
+  boolean verify(JWSVerifier verifier);
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c833bf90/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
index cc2ccfe..49d8609 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
@@ -39,10 +39,10 @@ public class JWTToken implements JWT {
   private static JWTProviderMessages log = MessagesFactory.get( JWTProviderMessages.class );
 
   SignedJWT jwt = null;
-  
+
   private JWTToken(byte[] header, byte[] claims, byte[] signature) throws ParseException {
     try {
-      jwt = new SignedJWT(new Base64URL(new String(header, "UTF8")), new Base64URL(new String(claims, "UTF8")), 
+      jwt = new SignedJWT(new Base64URL(new String(header, "UTF8")), new Base64URL(new String(claims, "UTF8")),
           new Base64URL(new String(signature, "UTF8")));
     } catch (UnsupportedEncodingException e) {
       log.unsupportedEncoding(e);
@@ -79,7 +79,7 @@ public class JWTToken implements JWT {
     if(claimsArray[3] != null) {
       builder = builder.expirationTime(new Date(Long.parseLong(claimsArray[3])));
     }
-    
+
     claims = builder.build();
 
     jwt = new SignedJWT(header, claims);
@@ -151,7 +151,7 @@ public class JWTToken implements JWT {
 //    System.out.println("header: " + token.header);
 //    System.out.println("claims: " + token.claims);
 //    System.out.println("payload: " + new String(token.payload));
-    
+
     return jwt;
   }
 
@@ -161,13 +161,13 @@ public class JWTToken implements JWT {
   @Override
   public String getClaim(String claimName) {
     String claim = null;
-    
+
     try {
       claim = jwt.getJWTClaimsSet().getStringClaim(claimName);
     } catch (ParseException e) {
       log.unableToParseToken(e);
     }
-    
+
     return claim;
   }
 
@@ -246,9 +246,9 @@ public class JWTToken implements JWT {
     return getClaim(JWT.PRINCIPAL);
   }
 
-  
+
   /* (non-Javadoc)
-   * @see org.apache.hadoop.gateway.services.security.token.impl.JWT#getPrincipal()
+   * @see org.apache.hadoop.gateway.services.security.token.impl.JWT#sign(JWSSigner)
    */
   @Override
   public void sign(JWSSigner signer) {
@@ -259,20 +259,19 @@ public class JWTToken implements JWT {
     }
   }
 
-  /**
-   * @param verifier
-   * @return
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.services.security.token.impl.JWT#verify(JWSVerifier)
    */
   public boolean verify(JWSVerifier verifier) {
     boolean rc = false;
-    
+
     try {
       rc = jwt.verify(verifier);
     } catch (JOSEException e) {
       // TODO Auto-generated catch block
       log.unableToVerifyToken(e);
     }
-    
+
     return rc;
-  }  
+  }
 }


[19/37] knox git commit: KNOX-1055 - Spark page characters & should be treated as a delimiter (Mars via Sandeep More)

Posted by lm...@apache.org.
KNOX-1055 - Spark page characters &amp; should be treated as a delimiter (Mars via Sandeep More)


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/90f1df7f
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/90f1df7f
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/90f1df7f

Branch: refs/heads/KNOX-1049
Commit: 90f1df7f5b4ed33a9462a46f2fc5afff032890a2
Parents: c5aedf4
Author: Sandeep More <mo...@apache.org>
Authored: Mon Oct 2 11:37:24 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Mon Oct 2 11:37:24 2017 -0400

----------------------------------------------------------------------
 .../hadoop/gateway/util/urltemplate/Parser.java    | 10 +++++++---
 .../gateway/util/urltemplate/ParserTest.java       | 17 +++++++++++++++++
 2 files changed, 24 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/90f1df7f/gateway-util-urltemplate/src/main/java/org/apache/hadoop/gateway/util/urltemplate/Parser.java
----------------------------------------------------------------------
diff --git a/gateway-util-urltemplate/src/main/java/org/apache/hadoop/gateway/util/urltemplate/Parser.java b/gateway-util-urltemplate/src/main/java/org/apache/hadoop/gateway/util/urltemplate/Parser.java
index e59cd32..a752b22 100644
--- a/gateway-util-urltemplate/src/main/java/org/apache/hadoop/gateway/util/urltemplate/Parser.java
+++ b/gateway-util-urltemplate/src/main/java/org/apache/hadoop/gateway/util/urltemplate/Parser.java
@@ -238,10 +238,14 @@ public class Parser {
 
   private static void consumeQueryToken( final Builder builder, String token ) {
     if( token != null ) {
-      StringTokenizer tokenizer = new StringTokenizer( token, "?&" );
-      while( tokenizer.hasMoreTokens() ) {
-        consumeQuerySegment( builder, tokenizer.nextToken() );
+      //add "&amp;" as a delimiter
+      String[] tokens = token.split("(&amp;|\\?|&)");
+      if (tokens != null){
+        for (String nextToken : tokens){
+          consumeQuerySegment(builder,nextToken);
+        }
       }
+
     }
   }
 

http://git-wip-us.apache.org/repos/asf/knox/blob/90f1df7f/gateway-util-urltemplate/src/test/java/org/apache/hadoop/gateway/util/urltemplate/ParserTest.java
----------------------------------------------------------------------
diff --git a/gateway-util-urltemplate/src/test/java/org/apache/hadoop/gateway/util/urltemplate/ParserTest.java b/gateway-util-urltemplate/src/test/java/org/apache/hadoop/gateway/util/urltemplate/ParserTest.java
index f6d1ab2..4305e11 100644
--- a/gateway-util-urltemplate/src/test/java/org/apache/hadoop/gateway/util/urltemplate/ParserTest.java
+++ b/gateway-util-urltemplate/src/test/java/org/apache/hadoop/gateway/util/urltemplate/ParserTest.java
@@ -343,6 +343,23 @@ public class ParserTest {
     assertThat( template.toString(), is( text ) );
   }
 
+  /**
+   *  KNOX-1055
+   *  In some cases & could be encoded as &amp;
+   */
+  @Test
+  public void testEncodedChar() throws URISyntaxException {
+    Parser parser = new Parser();
+    String text;
+    Template template;
+
+    text = "stage?id=007&amp;attempt=0";
+    template = parser.parseTemplate( text );
+    assertBasics( template, false, false, true, 1, 2 );
+    assertQuery( template, "id", "", "007" );
+    assertQuery( template, "attempt", "", "0" );
+  }
+
   @Test
   public void testParameterizedPathTemplatesWithWildcardAndRegex() throws URISyntaxException {
     String text;


[30/37] knox git commit: KNOX-1067 - Support different signature algorithms for JWTs

Posted by lm...@apache.org.
KNOX-1067 - Support different signature algorithms for JWTs


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/9c7aa7e1
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/9c7aa7e1
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/9c7aa7e1

Branch: refs/heads/KNOX-1049
Commit: 9c7aa7e1c7471f71c783681b68beea8e6f3fc2dc
Parents: 6acfa43
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Mon Oct 16 12:26:28 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Mon Oct 16 15:41:08 2017 +0100

----------------------------------------------------------------------
 .../jwt/filter/AbstractJWTFilter.java           |  45 ++++++--
 .../jwt/filter/JWTFederationFilter.java         |   5 +-
 .../jwt/filter/SSOCookieFederationFilter.java   |   5 +-
 .../federation/AbstractJWTFilterTest.java       | 102 ++++++++++++++++---
 .../federation/SSOCookieProviderTest.java       |   5 +-
 .../impl/DefaultTokenAuthorityService.java      |  22 +++-
 .../impl/DefaultTokenAuthorityServiceTest.java  |  93 +++++++++++++++++
 .../gateway/service/knoxsso/WebSSOResource.java |  11 +-
 .../service/knoxsso/WebSSOResourceTest.java     |  69 +++++++++++--
 .../service/knoxtoken/TokenResource.java        |  11 +-
 .../knoxtoken/TokenServiceResourceTest.java     |  76 +++++++++++---
 .../services/security/token/impl/JWTToken.java  |   3 -
 .../security/token/impl/JWTTokenTest.java       |  45 +++++---
 13 files changed, 412 insertions(+), 80 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/9c7aa7e1/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
index 7f8e733..deb3d5b 100644
--- a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
+++ b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
@@ -22,6 +22,7 @@ import java.security.Principal;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedExceptionAction;
 import java.security.interfaces.RSAPublicKey;
+import java.text.ParseException;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashSet;
@@ -54,7 +55,9 @@ import org.apache.hadoop.gateway.security.PrimaryPrincipal;
 import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
 import org.apache.hadoop.gateway.services.security.token.TokenServiceException;
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
+import org.apache.hadoop.gateway.services.security.token.impl.JWT;
+
+import com.nimbusds.jose.JWSHeader;
 
 /**
  *
@@ -67,6 +70,13 @@ public abstract class AbstractJWTFilter implements Filter {
   public static final String JWT_EXPECTED_ISSUER = "jwt.expected.issuer";
   public static final String JWT_DEFAULT_ISSUER = "KNOXSSO";
 
+  /**
+   * If specified, this configuration property refers to the signature algorithm which a received
+   * token must match. Otherwise, the default value "RS256" is used
+   */
+  public static final String JWT_EXPECTED_SIGALG = "jwt.expected.sigalg";
+  public static final String JWT_DEFAULT_SIGALG = "RS256";
+
   static JWTMessages log = MessagesFactory.get( JWTMessages.class );
   private static AuditService auditService = AuditServiceFactory.getAuditService();
   private static Auditor auditor = auditService.getAuditor(
@@ -77,6 +87,7 @@ public abstract class AbstractJWTFilter implements Filter {
   protected JWTokenAuthority authority;
   protected RSAPublicKey publicKey = null;
   private String expectedIssuer;
+  private String expectedSigAlg;
 
   public abstract void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
       throws IOException, ServletException;
@@ -99,11 +110,16 @@ public abstract class AbstractJWTFilter implements Filter {
     }
   }
 
-  protected void configureExpectedIssuer(FilterConfig filterConfig) {
-    expectedIssuer = filterConfig.getInitParameter(JWT_EXPECTED_ISSUER);;
+  protected void configureExpectedParameters(FilterConfig filterConfig) {
+    expectedIssuer = filterConfig.getInitParameter(JWT_EXPECTED_ISSUER);
     if (expectedIssuer == null) {
       expectedIssuer = JWT_DEFAULT_ISSUER;
     }
+
+    expectedSigAlg = filterConfig.getInitParameter(JWT_EXPECTED_SIGALG);
+    if (expectedSigAlg == null) {
+      expectedSigAlg = JWT_DEFAULT_SIGALG;
+    }
   }
 
   /**
@@ -111,7 +127,7 @@ public abstract class AbstractJWTFilter implements Filter {
    * @return
    */
   protected List<String> parseExpectedAudiences(String expectedAudiences) {
-    ArrayList<String> audList = null;
+    List<String> audList = null;
     // setup the list of valid audiences for token validation
     if (expectedAudiences != null) {
       // parse into the list
@@ -124,7 +140,7 @@ public abstract class AbstractJWTFilter implements Filter {
     return audList;
   }
 
-  protected boolean tokenIsStillValid(JWTToken jwtToken) {
+  protected boolean tokenIsStillValid(JWT jwtToken) {
     // if there is no expiration date then the lifecycle is tied entirely to
     // the cookie validity - otherwise ensure that the current time is before
     // the designated expiration time
@@ -141,7 +157,7 @@ public abstract class AbstractJWTFilter implements Filter {
    *          the JWT token where the allowed audiences will be found
    * @return true if an expected audience is present, otherwise false
    */
-  protected boolean validateAudiences(JWTToken jwtToken) {
+  protected boolean validateAudiences(JWT jwtToken) {
     boolean valid = false;
 
     String[] tokenAudienceList = jwtToken.getAudienceClaims();
@@ -202,7 +218,7 @@ public abstract class AbstractJWTFilter implements Filter {
     }
   }
 
-  protected Subject createSubjectFromToken(JWTToken token) {
+  protected Subject createSubjectFromToken(JWT token) {
     final String principal = token.getSubject();
 
     @SuppressWarnings("rawtypes")
@@ -223,7 +239,7 @@ public abstract class AbstractJWTFilter implements Filter {
   }
 
   protected boolean validateToken(HttpServletRequest request, HttpServletResponse response,
-      FilterChain chain, JWTToken token)
+      FilterChain chain, JWT token)
       throws IOException, ServletException {
     boolean verified = false;
     try {
@@ -237,6 +253,19 @@ public abstract class AbstractJWTFilter implements Filter {
       log.unableToVerifyToken(e);
     }
 
+    // Check received signature algorithm
+    if (verified) {
+      try {
+        String receivedSigAlg = JWSHeader.parse(token.getHeader()).getAlgorithm().getName();
+        if (!receivedSigAlg.equals(expectedSigAlg)) {
+          verified = false;
+        }
+      } catch (ParseException e) {
+        log.unableToVerifyToken(e);
+        verified = false;
+      }
+    }
+
     if (verified) {
       // confirm that issue matches intended target
       if (expectedIssuer.equals(token.getIssuer())) {

http://git-wip-us.apache.org/repos/asf/knox/blob/9c7aa7e1/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTFederationFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTFederationFilter.java b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTFederationFilter.java
index 401e449..dcc52c0 100644
--- a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTFederationFilter.java
+++ b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/JWTFederationFilter.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.gateway.provider.federation.jwt.filter;
 
+import org.apache.hadoop.gateway.services.security.token.impl.JWT;
 import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
 import org.apache.hadoop.gateway.util.CertificateUtils;
 
@@ -63,7 +64,7 @@ public class JWTFederationFilter extends AbstractJWTFilter {
       publicKey = CertificateUtils.parseRSAPublicKey(verificationPEM);
     }
 
-    configureExpectedIssuer(filterConfig);
+    configureExpectedParameters(filterConfig);
   }
 
   public void destroy() {
@@ -84,7 +85,7 @@ public class JWTFederationFilter extends AbstractJWTFilter {
 
     if (wireToken != null) {
       try {
-        JWTToken token = new JWTToken(wireToken);
+        JWT token = new JWTToken(wireToken);
         if (validateToken((HttpServletRequest)request, (HttpServletResponse)response, chain, token)) {
           Subject subject = createSubjectFromToken(token);
           continueWithEstablishedSecurityContext(subject, (HttpServletRequest)request, (HttpServletResponse)response, chain);

http://git-wip-us.apache.org/repos/asf/knox/blob/9c7aa7e1/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/SSOCookieFederationFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/SSOCookieFederationFilter.java b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/SSOCookieFederationFilter.java
index cf14863..7e1c64a 100644
--- a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/SSOCookieFederationFilter.java
+++ b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/SSOCookieFederationFilter.java
@@ -33,6 +33,7 @@ import javax.servlet.http.HttpServletResponse;
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
 import org.apache.hadoop.gateway.provider.federation.jwt.JWTMessages;
 import org.apache.hadoop.gateway.security.PrimaryPrincipal;
+import org.apache.hadoop.gateway.services.security.token.impl.JWT;
 import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
 import org.apache.hadoop.gateway.util.CertificateUtils;
 
@@ -78,7 +79,7 @@ public class SSOCookieFederationFilter extends AbstractJWTFilter {
       publicKey = CertificateUtils.parseRSAPublicKey(verificationPEM);
     }
 
-    configureExpectedIssuer(filterConfig);
+    configureExpectedParameters(filterConfig);
   }
 
   public void destroy() {
@@ -105,7 +106,7 @@ public class SSOCookieFederationFilter extends AbstractJWTFilter {
     }
     else {
       try {
-        JWTToken token = new JWTToken(wireToken);
+        JWT token = new JWTToken(wireToken);
         if (validateToken((HttpServletRequest)request, (HttpServletResponse)response, chain, token)) {
           Subject subject = createSubjectFromToken(token);
           continueWithEstablishedSecurityContext(subject, (HttpServletRequest)request, (HttpServletResponse)response, chain);

http://git-wip-us.apache.org/repos/asf/knox/blob/9c7aa7e1/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
index bd34c04..b261081 100644
--- a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
+++ b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
@@ -116,7 +116,8 @@ public abstract class AbstractJWTFilterTest  {
       Properties props = getProperties();
       handler.init(new TestFilterConfig(props));
 
-      SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 5000), privateKey, props);
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice",
+                             new Date(new Date().getTime() + 5000), privateKey);
 
       HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
       setTokenOnRequest(request, jwt);
@@ -147,7 +148,8 @@ public abstract class AbstractJWTFilterTest  {
       props.put(getAudienceProperty(), "bar");
       handler.init(new TestFilterConfig(props));
 
-      SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 5000), privateKey, props);
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice",
+                             new Date(new Date().getTime() + 5000), privateKey);
 
       HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
       setTokenOnRequest(request, jwt);
@@ -180,7 +182,8 @@ public abstract class AbstractJWTFilterTest  {
 
       handler.init(new TestFilterConfig(props));
 
-      SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 5000), privateKey, props);
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice",
+                             new Date(new Date().getTime() + 5000), privateKey);
 
       HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
       setTokenOnRequest(request, jwt);
@@ -209,7 +212,8 @@ public abstract class AbstractJWTFilterTest  {
       props.put(getAudienceProperty(), " foo, bar ");
       handler.init(new TestFilterConfig(props));
 
-      SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 5000), privateKey, props);
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice",
+                             new Date(new Date().getTime() + 5000), privateKey);
 
       HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
       setTokenOnRequest(request, jwt);
@@ -245,7 +249,8 @@ public abstract class AbstractJWTFilterTest  {
       props.put(getVerificationPemProperty(), pem);
       handler.init(new TestFilterConfig(props));
 
-      SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 50000), privateKey, props);
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice",
+                             new Date(new Date().getTime() + 50000), privateKey);
 
       HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
       setTokenOnRequest(request, jwt);
@@ -275,7 +280,8 @@ public abstract class AbstractJWTFilterTest  {
       Properties props = getProperties();
       handler.init(new TestFilterConfig(props));
 
-      SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() - 1000), privateKey, props);
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice",
+                             new Date(new Date().getTime() - 1000), privateKey);
 
       HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
       setTokenOnRequest(request, jwt);
@@ -303,7 +309,7 @@ public abstract class AbstractJWTFilterTest  {
       Properties props = getProperties();
       handler.init(new TestFilterConfig(props));
 
-      SignedJWT jwt = getJWT("alice", null, privateKey, props);
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice", null, privateKey);
 
       HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
       setTokenOnRequest(request, jwt);
@@ -333,7 +339,8 @@ public abstract class AbstractJWTFilterTest  {
       Properties props = getProperties();
       handler.init(new TestFilterConfig(props));
 
-      SignedJWT jwt = getJWT("bob", new Date(new Date().getTime() + 5000), privateKey, props);
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "bob",
+                             new Date(new Date().getTime() + 5000), privateKey);
 
       HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
       setGarbledTokenOnRequest(request, jwt);
@@ -367,8 +374,8 @@ public abstract class AbstractJWTFilterTest  {
       Properties props = getProperties();
       handler.init(new TestFilterConfig(props));
 
-      SignedJWT jwt = getJWT("bob", new Date(new Date().getTime() + 5000),
-                             (RSAPrivateKey)kp.getPrivate(), props);
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "bob",
+                             new Date(new Date().getTime() + 5000), (RSAPrivateKey)kp.getPrivate());
 
       HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
       setTokenOnRequest(request, jwt);
@@ -409,7 +416,8 @@ public abstract class AbstractJWTFilterTest  {
       props.put(getVerificationPemProperty(), failingPem);
       handler.init(new TestFilterConfig(props));
 
-      SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 50000), privateKey, props);
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice",
+                             new Date(new Date().getTime() + 50000), privateKey);
 
       HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
       setTokenOnRequest(request, jwt);
@@ -489,6 +497,67 @@ public abstract class AbstractJWTFilterTest  {
     }
   }
 
+  @Test
+  public void testRS512SignatureAlgorithm() throws Exception {
+    try {
+      Properties props = getProperties();
+      props.put(AbstractJWTFilter.JWT_EXPECTED_SIGALG, "RS512");
+      handler.init(new TestFilterConfig(props));
+
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice", new Date(new Date().getTime() + 5000),
+                             privateKey, JWSAlgorithm.RS512.getName());
+
+      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+      setTokenOnRequest(request, jwt);
+
+      EasyMock.expect(request.getRequestURL()).andReturn(
+          new StringBuffer(SERVICE_URL)).anyTimes();
+      EasyMock.expect(request.getQueryString()).andReturn(null);
+      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+          SERVICE_URL);
+      EasyMock.replay(request);
+
+      TestFilterChain chain = new TestFilterChain();
+      handler.doFilter(request, response, chain);
+      Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled );
+      Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
+      Assert.assertTrue("No PrimaryPrincipal", !principals.isEmpty());
+      Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
+    } catch (ServletException se) {
+      fail("Should NOT have thrown a ServletException.");
+    }
+  }
+
+  @Test
+  public void testInvalidSignatureAlgorithm() throws Exception {
+    try {
+      Properties props = getProperties();
+      handler.init(new TestFilterConfig(props));
+
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice", new Date(new Date().getTime() + 5000),
+                             privateKey, JWSAlgorithm.RS384.getName());
+
+      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+      setTokenOnRequest(request, jwt);
+
+      EasyMock.expect(request.getRequestURL()).andReturn(
+          new StringBuffer(SERVICE_URL)).anyTimes();
+      EasyMock.expect(request.getQueryString()).andReturn(null);
+      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+          SERVICE_URL);
+      EasyMock.replay(request);
+
+      TestFilterChain chain = new TestFilterChain();
+      handler.doFilter(request, response, chain);
+      Assert.assertTrue("doFilterCalled should not be false.", !chain.doFilterCalled );
+      Assert.assertTrue("No Subject should be returned.", chain.subject == null);
+    } catch (ServletException se) {
+      fail("Should NOT have thrown a ServletException.");
+    }
+  }
+
   protected Properties getProperties() {
     Properties props = new Properties();
     props.setProperty(
@@ -497,12 +566,13 @@ public abstract class AbstractJWTFilterTest  {
     return props;
   }
 
-  protected SignedJWT getJWT(String sub, Date expires, RSAPrivateKey privateKey,
-      Properties props) throws Exception {
-    return getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, sub, expires, privateKey);
+  protected SignedJWT getJWT(String issuer, String sub, Date expires, RSAPrivateKey privateKey)
+      throws Exception {
+    return getJWT(issuer, sub, expires, privateKey, JWSAlgorithm.RS256.getName());
   }
 
-  protected SignedJWT getJWT(String issuer, String sub, Date expires, RSAPrivateKey privateKey)
+  protected SignedJWT getJWT(String issuer, String sub, Date expires, RSAPrivateKey privateKey,
+                             String signatureAlgorithm)
       throws Exception {
     List<String> aud = new ArrayList<String>();
     aud.add("bar");
@@ -515,7 +585,7 @@ public abstract class AbstractJWTFilterTest  {
     .claim("scope", "openid")
     .build();
 
-    JWSHeader header = new JWSHeader.Builder(JWSAlgorithm.RS256).build();
+    JWSHeader header = new JWSHeader.Builder(JWSAlgorithm.parse(signatureAlgorithm)).build();
 
     SignedJWT signedJWT = new SignedJWT(header, claims);
     JWSSigner signer = new RSASSASigner(privateKey);

http://git-wip-us.apache.org/repos/asf/knox/blob/9c7aa7e1/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/SSOCookieProviderTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/SSOCookieProviderTest.java b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/SSOCookieProviderTest.java
index 768755b..38e7381 100644
--- a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/SSOCookieProviderTest.java
+++ b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/SSOCookieProviderTest.java
@@ -30,6 +30,7 @@ import javax.servlet.http.Cookie;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.hadoop.gateway.provider.federation.jwt.filter.AbstractJWTFilter;
 import org.apache.hadoop.gateway.provider.federation.jwt.filter.SSOCookieFederationFilter;
 import org.apache.hadoop.gateway.security.PrimaryPrincipal;
 import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
@@ -70,8 +71,8 @@ public class SSOCookieProviderTest extends AbstractJWTFilterTest {
       props.put("sso.cookie.name", "jowt");
       handler.init(new TestFilterConfig(props));
 
-      SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 5000),
-          privateKey, props);
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice",
+                             new Date(new Date().getTime() + 5000), privateKey);
 
       Cookie cookie = new Cookie("jowt", jwt.serialize());
       HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);

http://git-wip-us.apache.org/repos/asf/knox/blob/9c7aa7e1/gateway-server/src/main/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityService.java
index 33b86bd..0c33cdf 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityService.java
@@ -23,8 +23,10 @@ import java.security.PublicKey;
 import java.security.interfaces.RSAPrivateKey;
 import java.security.interfaces.RSAPublicKey;
 import java.util.Map;
+import java.util.Set;
 import java.util.List;
 import java.util.ArrayList;
+import java.util.HashSet;
 
 import javax.security.auth.Subject;
 
@@ -48,10 +50,22 @@ import com.nimbusds.jose.crypto.RSASSAVerifier;
 public class DefaultTokenAuthorityService implements JWTokenAuthority, Service {
 
   private static final String SIGNING_KEY_PASSPHRASE = "signing.key.passphrase";
+  private static final Set<String> SUPPORTED_SIG_ALGS = new HashSet<>();
   private AliasService as = null;
   private KeystoreService ks = null;
   String signingKeyAlias = null;
 
+  static {
+      // Only standard RSA signature algorithms are accepted
+      // https://tools.ietf.org/html/rfc7518
+      SUPPORTED_SIG_ALGS.add("RS256");
+      SUPPORTED_SIG_ALGS.add("RS384");
+      SUPPORTED_SIG_ALGS.add("RS512");
+      SUPPORTED_SIG_ALGS.add("PS256");
+      SUPPORTED_SIG_ALGS.add("PS384");
+      SUPPORTED_SIG_ALGS.add("PS512");
+  }
+
   public void setKeystoreService(KeystoreService ks) {
     this.ks = ks;
   }
@@ -96,7 +110,7 @@ public class DefaultTokenAuthorityService implements JWTokenAuthority, Service {
   @Override
   public JWT issueToken(Principal p, String audience, String algorithm, long expires)
       throws TokenServiceException {
-    ArrayList<String> audiences = null;
+    List<String> audiences = null;
     if (audience != null) {
       audiences = new ArrayList<String>();
       audiences.add(audience);
@@ -118,9 +132,9 @@ public class DefaultTokenAuthorityService implements JWTokenAuthority, Service {
       claimArray[3] = String.valueOf(expires);
     }
 
-    JWTToken token = null;
-    if ("RS256".equals(algorithm)) {
-      token = new JWTToken("RS256", claimArray, audiences);
+    JWT token = null;
+    if (SUPPORTED_SIG_ALGS.contains(algorithm)) {
+      token = new JWTToken(algorithm, claimArray, audiences);
       RSAPrivateKey key;
       char[] passphrase = null;
       try {

http://git-wip-us.apache.org/repos/asf/knox/blob/9c7aa7e1/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
index 7cc9971..48616c0 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.gateway.services.security.KeystoreService;
 import org.apache.hadoop.gateway.services.security.MasterService;
 import org.apache.hadoop.gateway.services.security.impl.DefaultKeystoreService;
 import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
+import org.apache.hadoop.gateway.services.security.token.TokenServiceException;
 import org.apache.hadoop.gateway.services.security.token.impl.JWT;
 import org.easymock.EasyMock;
 import org.junit.Test;
@@ -74,6 +75,8 @@ public class DefaultTokenAuthorityServiceTest extends org.junit.Assert {
     JWT token = ta.issueToken(principal, "RS256");
     assertEquals("KNOXSSO", token.getIssuer());
     assertEquals("john.doe@example.com", token.getSubject());
+
+    assertTrue(ta.verifyToken(token));
   }
 
   @Test
@@ -115,6 +118,8 @@ public class DefaultTokenAuthorityServiceTest extends org.junit.Assert {
     assertEquals("KNOXSSO", token.getIssuer());
     assertEquals("john.doe@example.com", token.getSubject());
     assertEquals("https://login.example.com", token.getAudience());
+
+    assertTrue(ta.verifyToken(token));
   }
 
   @Test
@@ -155,6 +160,94 @@ public class DefaultTokenAuthorityServiceTest extends org.junit.Assert {
     JWT token = ta.issueToken(principal, null, "RS256");
     assertEquals("KNOXSSO", token.getIssuer());
     assertEquals("john.doe@example.com", token.getSubject());
+
+    assertTrue(ta.verifyToken(token));
+  }
+
+  @Test
+  public void testTokenCreationSignatureAlgorithm() throws Exception {
+
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
+
+    GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+    String basedir = System.getProperty("basedir");
+    if (basedir == null) {
+      basedir = new File(".").getCanonicalPath();
+    }
+
+    EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
+    EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
+    EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
+
+    MasterService ms = EasyMock.createNiceMock(MasterService.class);
+    EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
+
+    AliasService as = EasyMock.createNiceMock(AliasService.class);
+    EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
+
+    EasyMock.replay(principal, config, ms, as);
+
+    KeystoreService ks = new DefaultKeystoreService();
+    ((DefaultKeystoreService)ks).setMasterService(ms);
+
+    ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
+
+    JWTokenAuthority ta = new DefaultTokenAuthorityService();
+    ((DefaultTokenAuthorityService)ta).setAliasService(as);
+    ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
+
+    ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
+
+    JWT token = ta.issueToken(principal, "RS512");
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertTrue(token.getHeader().contains("RS512"));
+
+    assertTrue(ta.verifyToken(token));
+  }
+
+  @Test
+  public void testTokenCreationBadSignatureAlgorithm() throws Exception {
+
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
+
+    GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+    String basedir = System.getProperty("basedir");
+    if (basedir == null) {
+      basedir = new File(".").getCanonicalPath();
+    }
+
+    EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
+    EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
+    EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
+
+    MasterService ms = EasyMock.createNiceMock(MasterService.class);
+    EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
+
+    AliasService as = EasyMock.createNiceMock(AliasService.class);
+    EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
+
+    EasyMock.replay(principal, config, ms, as);
+
+    KeystoreService ks = new DefaultKeystoreService();
+    ((DefaultKeystoreService)ks).setMasterService(ms);
+
+    ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
+
+    JWTokenAuthority ta = new DefaultTokenAuthorityService();
+    ((DefaultTokenAuthorityService)ta).setAliasService(as);
+    ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
+
+    ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
+
+    try {
+      ta.issueToken(principal, "none");
+      fail("Failure expected on a bad signature algorithm");
+    } catch (TokenServiceException ex) {
+        // expected
+    }
   }
 
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/9c7aa7e1/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java b/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
index 70228d3..36aa075 100644
--- a/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
+++ b/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
@@ -60,6 +60,7 @@ public class WebSSOResource {
   private static final String SSO_COOKIE_DOMAIN_SUFFIX_PARAM = "knoxsso.cookie.domain.suffix";
   private static final String SSO_COOKIE_TOKEN_TTL_PARAM = "knoxsso.token.ttl";
   private static final String SSO_COOKIE_TOKEN_AUDIENCES_PARAM = "knoxsso.token.audiences";
+  private static final String SSO_COOKIE_TOKEN_SIG_ALG = "knoxsso.token.sigalg";
   private static final String SSO_COOKIE_TOKEN_WHITELIST_PARAM = "knoxsso.redirect.whitelist.regex";
   private static final String SSO_ENABLE_SESSION_PARAM = "knoxsso.enable.session";
   private static final String ORIGINAL_URL_REQUEST_PARAM = "originalUrl";
@@ -77,6 +78,7 @@ public class WebSSOResource {
   private String domainSuffix = null;
   private List<String> targetAudiences = new ArrayList<>();
   private boolean enableSession = false;
+  private String signatureAlgorithm = "RS256";
 
   @Context
   HttpServletRequest request;
@@ -143,6 +145,11 @@ public class WebSSOResource {
 
     String enableSession = context.getInitParameter(SSO_ENABLE_SESSION_PARAM);
     this.enableSession = ("true".equals(enableSession));
+
+    String sigAlg = context.getInitParameter(SSO_COOKIE_TOKEN_SIG_ALG);
+    if (sigAlg != null) {
+      signatureAlgorithm = sigAlg;
+    }
   }
 
   @GET
@@ -185,9 +192,9 @@ public class WebSSOResource {
     try {
       JWT token = null;
       if (targetAudiences.isEmpty()) {
-        token = ts.issueToken(p, "RS256", getExpiry());
+        token = ts.issueToken(p, signatureAlgorithm, getExpiry());
       } else {
-        token = ts.issueToken(p, targetAudiences, "RS256", getExpiry());
+        token = ts.issueToken(p, targetAudiences, signatureAlgorithm, getExpiry());
       }
 
       // Coverity CID 1327959

http://git-wip-us.apache.org/repos/asf/knox/blob/9c7aa7e1/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java b/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
index 568f0fe..516f9ae 100644
--- a/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
+++ b/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
@@ -166,7 +166,7 @@ public class WebSSOResourceTest {
     Cookie cookie = responseWrapper.getCookie("hadoop-jwt");
     assertNotNull(cookie);
 
-    JWTToken parsedToken = new JWTToken(cookie.getValue());
+    JWT parsedToken = new JWTToken(cookie.getValue());
     assertEquals("alice", parsedToken.getSubject());
     assertTrue(authority.verifyToken(parsedToken));
   }
@@ -218,7 +218,7 @@ public class WebSSOResourceTest {
     Cookie cookie = responseWrapper.getCookie("hadoop-jwt");
     assertNotNull(cookie);
 
-    JWTToken parsedToken = new JWTToken(cookie.getValue());
+    JWT parsedToken = new JWTToken(cookie.getValue());
     assertEquals("alice", parsedToken.getSubject());
     assertTrue(authority.verifyToken(parsedToken));
 
@@ -287,6 +287,60 @@ public class WebSSOResourceTest {
     assertTrue(audiences.contains("recipient2"));
   }
 
+  @Test
+  public void testSignatureAlgorithm() throws Exception {
+
+    ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.name")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.secure.only")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.max.age")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.cookie.domain.suffix")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.redirect.whitelist.regex")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.token.audiences")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.token.ttl")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.enable.session")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knoxsso.token.sigalg")).andReturn("RS512");
+
+    HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+    EasyMock.expect(request.getParameter("originalUrl")).andReturn("http://localhost:9080/service");
+    EasyMock.expect(request.getParameterMap()).andReturn(Collections.<String,String[]>emptyMap());
+    EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+    EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+    GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+    EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+    JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+    EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+    HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+    ServletOutputStream outputStream = EasyMock.createNiceMock(ServletOutputStream.class);
+    CookieResponseWrapper responseWrapper = new CookieResponseWrapper(response, outputStream);
+
+    EasyMock.replay(principal, services, context, request);
+
+    WebSSOResource webSSOResponse = new WebSSOResource();
+    webSSOResponse.request = request;
+    webSSOResponse.response = responseWrapper;
+    webSSOResponse.context = context;
+    webSSOResponse.init();
+
+    // Issue a token
+    webSSOResponse.doGet();
+
+    // Check the cookie
+    Cookie cookie = responseWrapper.getCookie("hadoop-jwt");
+    assertNotNull(cookie);
+
+    JWT parsedToken = new JWTToken(cookie.getValue());
+    assertEquals("alice", parsedToken.getSubject());
+    assertTrue(authority.verifyToken(parsedToken));
+    assertTrue(parsedToken.getHeader().contains("RS512"));
+  }
+
   /**
    * A wrapper for HttpServletResponseWrapper to store the cookies
    */
@@ -380,14 +434,9 @@ public class WebSSOResourceTest {
         claimArray[3] = String.valueOf(expires);
       }
 
-      JWTToken token = null;
-      if ("RS256".equals(algorithm)) {
-        token = new JWTToken("RS256", claimArray, audiences);
-        JWSSigner signer = new RSASSASigner(privateKey);
-        token.sign(signer);
-      } else {
-        throw new TokenServiceException("Cannot issue token - Unsupported algorithm");
-      }
+      JWT token = new JWTToken(algorithm, claimArray, audiences);
+      JWSSigner signer = new RSASSASigner(privateKey);
+      token.sign(signer);
 
       return token;
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/9c7aa7e1/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java b/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
index df8288a..afa6a3a 100644
--- a/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
+++ b/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
@@ -57,6 +57,7 @@ public class TokenResource {
   private static final String TOKEN_CLIENT_DATA = "knox.token.client.data";
   private static final String TOKEN_CLIENT_CERT_REQUIRED = "knox.token.client.cert.required";
   private static final String TOKEN_ALLOWED_PRINCIPALS = "knox.token.allowed.principals";
+  private static final String TOKEN_SIG_ALG = "knox.token.sigalg";
   static final String RESOURCE_PATH = "knoxtoken/api/v1/token";
   private static TokenServiceMessages log = MessagesFactory.get( TokenServiceMessages.class );
   private long tokenTTL = 30000l;
@@ -65,6 +66,7 @@ public class TokenResource {
   private Map<String,Object> tokenClientDataMap = null;
   private ArrayList<String> allowedDNs = new ArrayList<>();
   private boolean clientCertRequired = false;
+  private String signatureAlgorithm = "RS256";
 
   @Context
   HttpServletRequest request;
@@ -115,6 +117,11 @@ public class TokenResource {
       String[] tokenClientData = clientData.split(",");
       addClientDataToMap(tokenClientData, tokenClientDataMap);
     }
+
+    String sigAlg = context.getInitParameter(TOKEN_SIG_ALG);
+    if (sigAlg != null) {
+      signatureAlgorithm = sigAlg;
+    }
   }
 
   @GET
@@ -159,9 +166,9 @@ public class TokenResource {
     try {
       JWT token = null;
       if (targetAudiences.isEmpty()) {
-        token = ts.issueToken(p, "RS256", expires);
+        token = ts.issueToken(p, signatureAlgorithm, expires);
       } else {
-        token = ts.issueToken(p, targetAudiences, "RS256", expires);
+        token = ts.issueToken(p, targetAudiences, signatureAlgorithm, expires);
       }
 
       if (token != null) {

http://git-wip-us.apache.org/repos/asf/knox/blob/9c7aa7e1/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java b/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
index 0046bd9..80f359d 100644
--- a/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
+++ b/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
@@ -101,11 +101,8 @@ public class TokenServiceResourceTest {
 
   @Test
   public void testGetToken() throws Exception {
-    TokenResource tr = new TokenResource();
 
     ServletContext context = EasyMock.createNiceMock(ServletContext.class);
-    //tr.context = context;
-    // tr.init();
 
     HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
     EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
@@ -126,6 +123,7 @@ public class TokenServiceResourceTest {
 
     EasyMock.replay(principal, services, context, request, response);
 
+    TokenResource tr = new TokenResource();
     tr.request = request;
     tr.response = response;
 
@@ -142,7 +140,7 @@ public class TokenServiceResourceTest {
     assertNotNull(expiry);
 
     // Verify the token
-    JWTToken parsedToken = new JWTToken(accessToken);
+    JWT parsedToken = new JWTToken(accessToken);
     assertEquals("alice", parsedToken.getSubject());
     assertTrue(authority.verifyToken(parsedToken));
   }
@@ -194,7 +192,7 @@ public class TokenServiceResourceTest {
     assertNotNull(expiry);
 
     // Verify the token
-    JWTToken parsedToken = new JWTToken(accessToken);
+    JWT parsedToken = new JWTToken(accessToken);
     assertEquals("alice", parsedToken.getSubject());
     assertTrue(authority.verifyToken(parsedToken));
 
@@ -252,7 +250,7 @@ public class TokenServiceResourceTest {
     assertNotNull(expiry);
 
     // Verify the token
-    JWTToken parsedToken = new JWTToken(accessToken);
+    JWT parsedToken = new JWTToken(accessToken);
     assertEquals("alice", parsedToken.getSubject());
     assertTrue(authority.verifyToken(parsedToken));
 
@@ -315,7 +313,7 @@ public class TokenServiceResourceTest {
     assertNotNull(expiry);
 
     // Verify the token
-    JWTToken parsedToken = new JWTToken(accessToken);
+    JWT parsedToken = new JWTToken(accessToken);
     assertEquals("alice", parsedToken.getSubject());
     assertTrue(authority.verifyToken(parsedToken));
   }
@@ -405,6 +403,59 @@ public class TokenServiceResourceTest {
     assertEquals(403, retResponse.getStatus());
   }
 
+  @Test
+  public void testSignatureAlgorithm() throws Exception {
+    ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+    EasyMock.expect(context.getInitParameter("knox.token.audiences")).andReturn("recipient1,recipient2");
+    EasyMock.expect(context.getInitParameter("knox.token.ttl")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knox.token.target.url")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knox.token.client.data")).andReturn(null);
+    EasyMock.expect(context.getInitParameter("knox.token.sigalg")).andReturn("RS512");
+
+    HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+    EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+    Principal principal = EasyMock.createNiceMock(Principal.class);
+    EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+    EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+    GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+    EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+    JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+    EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+    StringWriter writer = new StringWriter();
+    PrintWriter printWriter = new PrintWriter(writer);
+    HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+    EasyMock.expect(response.getWriter()).andReturn(printWriter);
+
+    EasyMock.replay(principal, services, context, request, response);
+
+    TokenResource tr = new TokenResource();
+    tr.request = request;
+    tr.response = response;
+    tr.context = context;
+    tr.init();
+
+    // Issue a token
+    Response retResponse = tr.doGet();
+
+    assertEquals(200, retResponse.getStatus());
+
+    // Parse the response
+    String retString = writer.toString();
+    String accessToken = getTagValue(retString, "access_token");
+    assertNotNull(accessToken);
+    String expiry = getTagValue(retString, "expires_in");
+    assertNotNull(expiry);
+
+    // Verify the token
+    JWT parsedToken = new JWTToken(accessToken);
+    assertEquals("alice", parsedToken.getSubject());
+    assertTrue(authority.verifyToken(parsedToken));
+    assertTrue(parsedToken.getHeader().contains("RS512"));
+  }
+
   private String getTagValue(String token, String tagName) {
     String searchString = tagName + "\":";
     String value = token.substring(token.indexOf(searchString) + searchString.length());
@@ -479,14 +530,9 @@ public class TokenServiceResourceTest {
         claimArray[3] = String.valueOf(expires);
       }
 
-      JWTToken token = null;
-      if ("RS256".equals(algorithm)) {
-        token = new JWTToken("RS256", claimArray, audiences);
-        JWSSigner signer = new RSASSASigner(privateKey);
-        token.sign(signer);
-      } else {
-        throw new TokenServiceException("Cannot issue token - Unsupported algorithm");
-      }
+      JWT token = new JWTToken(algorithm, claimArray, audiences);
+      JWSSigner signer = new RSASSASigner(privateKey);
+      token.sign(signer);
 
       return token;
     }

http://git-wip-us.apache.org/repos/asf/knox/blob/9c7aa7e1/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
index 567c156..be2a331 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
@@ -17,14 +17,11 @@
    */
 package org.apache.hadoop.gateway.services.security.token.impl;
 
-import java.io.UnsupportedEncodingException;
 import java.text.ParseException;
 import java.util.Date;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Map;
 
-import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
 
 import com.nimbusds.jose.JOSEException;

http://git-wip-us.apache.org/repos/asf/knox/blob/9c7aa7e1/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java b/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
index 6372f0c..d971eca 100644
--- a/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
+++ b/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
@@ -22,9 +22,12 @@ import java.security.KeyPairGenerator;
 import java.security.NoSuchAlgorithmException;
 import java.security.interfaces.RSAPrivateKey;
 import java.security.interfaces.RSAPublicKey;
+import java.text.ParseException;
 import java.util.ArrayList;
 import java.util.Date;
+import java.util.List;
 
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 import com.nimbusds.jose.JWSAlgorithm;
@@ -37,10 +40,11 @@ public class JWTTokenTest extends org.junit.Assert {
   private static final String JWT_TOKEN = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOjE0MTY5MjkxMDksImp0aSI6ImFhN2Y4ZDBhOTVjIiwic2NvcGVzIjpbInJlcG8iLCJwdWJsaWNfcmVwbyJdfQ.XCEwpBGvOLma4TCoh36FU7XhUbcskygS81HE1uHLf0E";
   private static final String HEADER = "{\"typ\":\"JWT\",\"alg\":\"HS256\"}";
 
-  private RSAPublicKey publicKey;
-  private RSAPrivateKey privateKey;
+  private static RSAPublicKey publicKey;
+  private static RSAPrivateKey privateKey;
 
-  public JWTTokenTest() throws Exception, NoSuchAlgorithmException {
+  @BeforeClass
+  public static void setup() throws Exception, NoSuchAlgorithmException {
     KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
     kpg.initialize(2048);
 
@@ -64,7 +68,7 @@ public class JWTTokenTest extends org.junit.Assert {
     claims[1] = "john.doe@example.com";
     claims[2] = "https://login.example.com";
     claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    JWTToken token = new JWTToken("RS256", claims);
+    JWT token = new JWTToken("RS256", claims);
 
     assertEquals("KNOXSSO", token.getIssuer());
     assertEquals("john.doe@example.com", token.getSubject());
@@ -78,10 +82,10 @@ public class JWTTokenTest extends org.junit.Assert {
     claims[1] = "john.doe@example.com";
     claims[2] = null;
     claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    ArrayList<String> audiences = new ArrayList<String>();
+    List<String> audiences = new ArrayList<String>();
     audiences.add("https://login.example.com");
 
-    JWTToken token = new JWTToken("RS256", claims, audiences);
+    JWT token = new JWTToken("RS256", claims, audiences);
 
     assertEquals("KNOXSSO", token.getIssuer());
     assertEquals("john.doe@example.com", token.getSubject());
@@ -96,11 +100,11 @@ public class JWTTokenTest extends org.junit.Assert {
     claims[1] = "john.doe@example.com";
     claims[2] = null;
     claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    ArrayList<String> audiences = new ArrayList<String>();
+    List<String> audiences = new ArrayList<String>();
     audiences.add("https://login.example.com");
     audiences.add("KNOXSSO");
 
-    JWTToken token = new JWTToken("RS256", claims, audiences);
+    JWT token = new JWTToken("RS256", claims, audiences);
 
     assertEquals("KNOXSSO", token.getIssuer());
     assertEquals("john.doe@example.com", token.getSubject());
@@ -134,9 +138,9 @@ public class JWTTokenTest extends org.junit.Assert {
     claims[1] = "john.doe@example.com";
     claims[2] = null;
     claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    ArrayList<String> audiences = null;
+    List<String> audiences = null;
 
-    JWTToken token = new JWTToken("RS256", claims, audiences);
+    JWT token = new JWTToken("RS256", claims, audiences);
 
     assertEquals("KNOXSSO", token.getIssuer());
     assertEquals("john.doe@example.com", token.getSubject());
@@ -166,8 +170,7 @@ public class JWTTokenTest extends org.junit.Assert {
     claims[1] = "john.doe@example.com";
     claims[2] = "https://login.example.com";
     claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    JWTToken token = new JWTToken("RS256", claims);
-
+    JWT token = new JWTToken("RS256", claims);
 
     assertEquals("KNOXSSO", token.getIssuer());
     assertEquals("john.doe@example.com", token.getSubject());
@@ -190,7 +193,7 @@ public class JWTTokenTest extends org.junit.Assert {
     claims[1] = "john.doe@example.com";
     claims[2] = "https://login.example.com";
     claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    JWTToken token = new JWTToken(JWSAlgorithm.RS512.getName(), claims);
+    JWT token = new JWTToken(JWSAlgorithm.RS512.getName(), claims);
 
     assertEquals("KNOXSSO", token.getIssuer());
     assertEquals("john.doe@example.com", token.getSubject());
@@ -214,10 +217,24 @@ public class JWTTokenTest extends org.junit.Assert {
     claims[1] = "john.doe@example.com";
     claims[2] = "https://login.example.com";
     claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    JWTToken token = new JWTToken("RS256", claims);
+    JWT token = new JWTToken("RS256", claims);
 
     assertNotNull(token.getExpires());
     assertNotNull(token.getExpiresDate());
     assertEquals(token.getExpiresDate(), new Date(Long.valueOf(token.getExpires())));
   }
+
+  @Test
+  public void testUnsignedToken() throws Exception {
+      String unsignedToken = "eyJhbGciOiJub25lIn0.eyJzdWIiOiJhbGljZSIsImp0aSI6ImY2YmNj"
+          + "MDVjLWI4MTktNGM0Mi1iMGMyLWJlYmY1MDE4YWFiZiJ9.";
+
+      try {
+          new JWTToken(unsignedToken);
+          fail("Failure expected on an unsigned token");
+      } catch (ParseException ex) {
+          // expected
+      }
+  }
+
 }


[11/37] knox git commit: KNOX-1014 - Service Discovery and Topology Generation Framework (Phil Zampino via lmccay)

Posted by lm...@apache.org.
KNOX-1014 - Service Discovery and Topology Generation Framework (Phil Zampino via lmccay)

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/c2ca4432
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/c2ca4432
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/c2ca4432

Branch: refs/heads/KNOX-1049
Commit: c2ca443262a848ae0d56e03e92ecba32fbf149f2
Parents: 3a0119b
Author: Larry McCay <lm...@hortonworks.com>
Authored: Sat Sep 23 11:04:39 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Sat Sep 23 11:04:39 2017 -0400

----------------------------------------------------------------------
 b/gateway-discovery-ambari/pom.xml              |  66 ++
 .../discovery/ambari/AmbariCluster.java         | 114 +++
 .../discovery/ambari/AmbariComponent.java       |  76 ++
 .../ambari/AmbariServiceDiscovery.java          | 291 +++++++
 .../ambari/AmbariServiceDiscoveryMessages.java  |  81 ++
 .../ambari/AmbariServiceDiscoveryType.java      |  35 +
 .../ambari/AmbariServiceURLCreator.java         | 184 ++++
 ...eway.topology.discovery.ServiceDiscoveryType |  19 +
 .../ambari/AmbariServiceDiscoveryTest.java      | 856 +++++++++++++++++++
 b/gateway-release/home/conf/descriptors/README  |   1 +
 .../home/conf/shared-providers/README           |   1 +
 gateway-discovery-ambari/pom.xml                |  66 ++
 .../discovery/ambari/AmbariCluster.java         | 114 +++
 .../discovery/ambari/AmbariComponent.java       |  76 ++
 .../ambari/AmbariServiceDiscovery.java          | 291 +++++++
 .../ambari/AmbariServiceDiscoveryMessages.java  |  81 ++
 .../ambari/AmbariServiceDiscoveryType.java      |  35 +
 .../ambari/AmbariServiceURLCreator.java         | 184 ++++
 ...eway.topology.discovery.ServiceDiscoveryType |  19 +
 .../ambari/AmbariServiceDiscoveryTest.java      | 856 +++++++++++++++++++
 gateway-release/home/conf/descriptors/README    |   1 +
 .../home/conf/shared-providers/README           |   1 +
 gateway-release/pom.xml                         |   4 +
 gateway-server/pom.xml                          |   5 +
 .../apache/hadoop/gateway/GatewayMessages.java  |   9 +-
 .../services/DefaultGatewayServices.java        |   3 +-
 .../topology/impl/DefaultTopologyService.java   | 278 +++++-
 .../builder/BeanPropertyTopologyBuilder.java    |   2 +-
 .../DefaultServiceDiscoveryConfig.java          |  48 ++
 .../discovery/ServiceDiscoveryFactory.java      |  81 ++
 .../topology/simple/SimpleDescriptor.java       |  46 +
 .../simple/SimpleDescriptorFactory.java         |  71 ++
 .../simple/SimpleDescriptorHandler.java         | 186 ++++
 .../topology/simple/SimpleDescriptorImpl.java   | 111 +++
 .../simple/SimpleDescriptorMessages.java        |  44 +
 .../topology/DefaultTopologyServiceTest.java    |  70 +-
 .../PropertiesFileServiceDiscoveryTest.java     |  90 ++
 .../discovery/ServiceDiscoveryFactoryTest.java  |  81 ++
 .../test/extension/DummyServiceDiscovery.java   |  66 ++
 .../extension/DummyServiceDiscoveryType.java    |  32 +
 .../PropertiesFileServiceDiscovery.java         | 108 +++
 .../PropertiesFileServiceDiscoveryType.java     |  35 +
 .../extension/SneakyServiceDiscoveryImpl.java   |  40 +
 .../extension/SneakyServiceDiscoveryType.java   |  33 +
 .../simple/SimpleDescriptorFactoryTest.java     | 218 +++++
 .../simple/SimpleDescriptorHandlerTest.java     | 239 ++++++
 ...eway.topology.discovery.ServiceDiscoveryType |  21 +
 .../topology/file/ambari-cluster-policy.xml     |  74 ++
 .../topology/file/simple-topology-four.json     |  18 +
 .../topology/discovery/GatewayService.java      |  29 +
 .../topology/discovery/ServiceDiscovery.java    |  76 ++
 .../discovery/ServiceDiscoveryConfig.java       |  42 +
 .../discovery/ServiceDiscoveryType.java         |  40 +
 pom.xml                                         |  27 +-
 54 files changed, 5626 insertions(+), 49 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/pom.xml
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/pom.xml b/b/gateway-discovery-ambari/pom.xml
new file mode 100644
index 0000000..924e89c
--- /dev/null
+++ b/b/gateway-discovery-ambari/pom.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.knox</groupId>
+        <artifactId>gateway</artifactId>
+        <version>0.14.0-SNAPSHOT</version>
+    </parent>
+    <artifactId>gateway-discovery-ambari</artifactId>
+
+    <name>gateway-discovery-ambari</name>
+    <description>The extension to the gateway for service discovery using Apache Ambari.</description>
+
+    <licenses>
+        <license>
+            <name>The Apache Software License, Version 2.0</name>
+            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+            <distribution>repo</distribution>
+        </license>
+    </licenses>
+
+    <dependencies>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-spi</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-test-utils</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.easymock</groupId>
+            <artifactId>easymock</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
new file mode 100644
index 0000000..6eaabd3
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+class AmbariCluster implements ServiceDiscovery.Cluster {
+
+    private String name = null;
+
+    private AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+
+    private Map<String, Map<String, ServiceConfiguration>> serviceConfigurations = new HashMap<>();
+
+    private Map<String, AmbariComponent> components = null;
+
+
+    AmbariCluster(String name) {
+        this.name = name;
+        components = new HashMap<String, AmbariComponent>();
+    }
+
+    void addServiceConfiguration(String serviceName, String configurationType, ServiceConfiguration serviceConfig) {
+        if (!serviceConfigurations.keySet().contains(serviceName)) {
+            serviceConfigurations.put(serviceName, new HashMap<String, ServiceConfiguration>());
+        }
+        serviceConfigurations.get(serviceName).put(configurationType, serviceConfig);
+    }
+
+
+    void addComponent(AmbariComponent component) {
+        components.put(component.getName(), component);
+    }
+
+
+    ServiceConfiguration getServiceConfiguration(String serviceName, String configurationType) {
+        ServiceConfiguration sc = null;
+        Map<String, ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+        if (configs != null) {
+            sc = configs.get(configurationType);
+        }
+        return sc;
+    }
+
+
+    Map<String, AmbariComponent> getComponents() {
+        return components;
+    }
+
+
+    AmbariComponent getComponent(String name) {
+        return components.get(name);
+    }
+
+
+    @Override
+    public String getName() {
+        return name;
+    }
+
+
+    @Override
+    public List<String> getServiceURLs(String serviceName) {
+        List<String> urls = new ArrayList<>();
+        urls.addAll(urlCreator.create(this, serviceName));
+        return urls;
+    }
+
+
+    static class ServiceConfiguration {
+
+        private String type;
+        private String version;
+        private Map<String, String> props;
+
+        ServiceConfiguration(String type, String version, Map<String, String> properties) {
+            this.type = type;
+            this.version = version;
+            this.props = properties;
+        }
+
+        public String getVersion() {
+            return version;
+        }
+
+        public String getType() {
+            return type;
+        }
+
+        public Map<String, String> getProperties() {
+            return props;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
new file mode 100644
index 0000000..55257fb
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import java.util.List;
+import java.util.Map;
+
+class AmbariComponent {
+
+    private String clusterName = null;
+    private String serviceName = null;
+    private String name        = null;
+    private String version     = null;
+
+    private List<String> hostNames = null;
+
+    private Map<String, String> properties = null;
+
+    AmbariComponent(String              name,
+                    String              version,
+                    String              cluster,
+                    String              service,
+                    List<String>        hostNames,
+                    Map<String, String> properties) {
+        this.name = name;
+        this.serviceName = service;
+        this.clusterName = cluster;
+        this.version = version;
+        this.hostNames = hostNames;
+        this.properties = properties;
+    }
+
+    public String getVersion() {
+        return version;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public String getServiceName() {
+        return serviceName;
+    }
+
+    public String getClusterName() {
+        return clusterName;
+    }
+
+    public List<String> getHostNames() {
+        return hostNames;
+    }
+
+    public Map<String, String> getConfigProperties() {
+        return properties;
+    }
+
+    public String getConfigProperty(String propertyName) {
+        return properties.get(propertyName);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
new file mode 100644
index 0000000..34f20a7
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
@@ -0,0 +1,291 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.hadoop.gateway.config.ConfigurationException;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.services.security.AliasServiceException;
+import org.apache.hadoop.gateway.topology.discovery.GatewayService;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.message.BasicHeader;
+import org.apache.http.util.EntityUtils;
+
+import java.io.IOException;
+import java.util.*;
+
+
+class AmbariServiceDiscovery implements ServiceDiscovery {
+
+    static final String TYPE = "AMBARI";
+
+    static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
+
+    static final String AMBARI_HOSTROLES_URI =
+                                       AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
+
+    static final String AMBARI_SERVICECONFIGS_URI =
+            AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
+
+    // Map of component names to service configuration types
+    private static Map<String, String> componentServiceConfigs = new HashMap<>();
+    static {
+        componentServiceConfigs.put("NAMENODE", "hdfs-site");
+        componentServiceConfigs.put("RESOURCEMANAGER", "yarn-site");
+        componentServiceConfigs.put("OOZIE_SERVER", "oozie-site");
+        componentServiceConfigs.put("HIVE_SERVER", "hive-site");
+        componentServiceConfigs.put("WEBHCAT_SERVER", "webhcat-site");
+        componentServiceConfigs.put("HBASE_MASTER", "hbase-site");
+    } // TODO: Are there other service components, for which the endpoints can be discovered via Ambari?
+
+    private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
+    private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
+
+    private static AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+
+    private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+    @GatewayService
+    private AliasService aliasService;
+
+    private CloseableHttpClient httpClient = null;
+
+    private Map<String, Map<String, String>> serviceConfiguration = new HashMap<>();
+
+
+    AmbariServiceDiscovery() {
+        httpClient = org.apache.http.impl.client.HttpClients.createDefault();
+    }
+
+
+    @Override
+    public String getType() {
+        return TYPE;
+    }
+
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+        Map<String, Cluster> clusters = new HashMap<String, Cluster>();
+
+        String discoveryAddress = config.getAddress();
+
+        // Invoke Ambari REST API to discover the available clusters
+        String clustersDiscoveryURL = String.format("%s" + AMBARI_CLUSTERS_URI, discoveryAddress);
+
+        JSONObject json = invokeREST(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
+
+        // Parse the cluster names from the response, and perform the cluster discovery
+        JSONArray clusterItems = (JSONArray) json.get("items");
+        for (Object clusterItem : clusterItems) {
+            String clusterName = (String) ((JSONObject)((JSONObject) clusterItem).get("Clusters")).get("cluster_name");
+            try {
+                Cluster c = discover(config, clusterName);
+                clusters.put(clusterName, c);
+            } catch (Exception e) {
+                log.clusterDiscoveryError(clusterName, e);
+            }
+        }
+
+        return clusters;
+    }
+
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        AmbariCluster cluster = new AmbariCluster(clusterName);
+
+        Map<String, String> serviceComponents = new HashMap<>();
+
+        String discoveryAddress = config.getAddress();
+        String discoveryUser = config.getUser();
+        String discoveryPwdAlias = config.getPasswordAlias();
+
+        Map<String, List<String>> componentHostNames = new HashMap<>();
+        String hostRolesURL = String.format("%s" + AMBARI_HOSTROLES_URI, discoveryAddress, clusterName);
+        JSONObject hostRolesJSON = invokeREST(hostRolesURL, discoveryUser, discoveryPwdAlias);
+        if (hostRolesJSON != null) {
+            // Process the host roles JSON
+            JSONArray items = (JSONArray) hostRolesJSON.get("items");
+            for (Object obj : items) {
+                JSONArray components = (JSONArray) ((JSONObject) obj).get("components");
+                for (Object component : components) {
+                    JSONArray hostComponents = (JSONArray) ((JSONObject) component).get("host_components");
+                    for (Object hostComponent : hostComponents) {
+                        JSONObject hostRoles = (JSONObject) ((JSONObject) hostComponent).get("HostRoles");
+                        String serviceName = (String) hostRoles.get("service_name");
+                        String componentName = (String) hostRoles.get("component_name");
+
+                        serviceComponents.put(componentName, serviceName);
+
+//                    String hostName = (String) hostRoles.get("host_name");
+                        String hostName = (String) hostRoles.get("public_host_name"); // Assuming public host name is most applicable
+                        log.discoveredServiceHost(serviceName, hostName);
+                        if (!componentHostNames.containsKey(componentName)) {
+                            componentHostNames.put(componentName, new ArrayList<String>());
+                        }
+                        componentHostNames.get(componentName).add(hostName);
+                    }
+                }
+            }
+        }
+
+        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations =
+                                                 new HashMap<String, Map<String, AmbariCluster.ServiceConfiguration>>();
+        String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
+        JSONObject serviceConfigsJSON = invokeREST(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
+        if (serviceConfigsJSON != null) {
+            // Process the service configurations
+            JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
+            for (Object serviceConfig : serviceConfigs) {
+                String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
+                JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
+                for (Object configuration : configurations) {
+                    String configType = (String) ((JSONObject) configuration).get("type");
+                    String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
+
+                    Map<String, String> configProps = new HashMap<String, String>();
+                    JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
+                    for (String propertyName : configProperties.keySet()) {
+                        configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
+                    }
+                    if (!serviceConfigurations.containsKey(serviceName)) {
+                        serviceConfigurations.put(serviceName, new HashMap<String, AmbariCluster.ServiceConfiguration>());
+                    }
+                    serviceConfigurations.get(serviceName).put(configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+                    cluster.addServiceConfiguration(serviceName, configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+                }
+            }
+        }
+
+        // Construct the AmbariCluster model
+        for (String componentName : serviceComponents.keySet()) {
+            String serviceName = serviceComponents.get(componentName);
+            List<String> hostNames = componentHostNames.get(componentName);
+
+            Map<String, AmbariCluster.ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+            String configType = componentServiceConfigs.get(componentName);
+            if (configType != null) {
+                AmbariCluster.ServiceConfiguration svcConfig = configs.get(configType);
+                AmbariComponent c = new AmbariComponent(componentName,
+                                                        svcConfig.getVersion(),
+                                                        clusterName,
+                                                        serviceName,
+                                                        hostNames,
+                                                        svcConfig.getProperties());
+                cluster.addComponent(c);
+            }
+        }
+
+        return cluster;
+    }
+
+
+    protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+        JSONObject result = null;
+
+        CloseableHttpResponse response = null;
+        try {
+            HttpGet request = new HttpGet(url);
+
+            // If no configured username, then use default username alias
+            String password = null;
+            if (username == null) {
+                if (aliasService != null) {
+                    try {
+                        char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
+                        if (defaultUser != null) {
+                            username = new String(defaultUser);
+                        }
+                    } catch (AliasServiceException e) {
+                        log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
+                    }
+                }
+
+                // If username is still null
+                if (username == null) {
+                    log.aliasServiceUserNotFound();
+                    throw new ConfigurationException("No username is configured for Ambari service discovery.");
+                }
+            }
+
+            if (aliasService != null) {
+                // If not password alias is configured, then try the default alias
+                if (passwordAlias == null) {
+                    passwordAlias = DEFAULT_PWD_ALIAS;
+                }
+                try {
+                    char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
+                    if (pwd != null) {
+                        password = new String(pwd);
+                    }
+
+                } catch (AliasServiceException e) {
+                    log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
+                }
+            }
+
+            // If the password could not be determined
+            if (password == null) {
+                log.aliasServicePasswordNotFound();
+                throw new ConfigurationException("No password is configured for Ambari service discovery.");
+            }
+
+            // Add an auth header if credentials are available
+            String encodedCreds =
+                    org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
+            request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
+
+            response = httpClient.execute(request);
+
+            if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
+                HttpEntity entity = response.getEntity();
+                if (entity != null) {
+                    result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
+                    log.debugJSON(result.toJSONString());
+                } else {
+                    log.noJSON(url);
+                }
+            } else {
+                log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
+            }
+
+        } catch (IOException e) {
+            log.restInvocationError(url, e);
+        } finally {
+            if(response != null) {
+                try {
+                    response.close();
+                } catch (IOException e) {
+                    // Ignore
+                }
+            }
+        }
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
new file mode 100644
index 0000000..caa16ed
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.i18n.messages.Message;
+import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
+import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.i18n.messages.StackTrace;
+
+@Messages(logger="org.apache.gateway.topology.discovery.ambari")
+public interface AmbariServiceDiscoveryMessages {
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error during cluster {0} discovery: {1}")
+    void clusterDiscoveryError(final String clusterName,
+                               @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "REST invocation {0} failed: {1}")
+    void restInvocationError(final String url,
+                             @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error attempting to determine the user for alias {0} : {1}")
+    void aliasServiceUserError(final String alias, final String error);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error attempting to determine the password for alias {0} : {1}")
+    void aliasServicePasswordError(final String alias, final String error);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No user configured for Ambari service discovery.")
+    void aliasServiceUserNotFound();
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No password configured for Ambari service discovery.")
+    void aliasServicePasswordNotFound();
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Unexpected REST invocation response code for {0} : {1}")
+    void unexpectedRestResponseStatusCode(final String url, int responseStatusCode);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "REST invocation {0} yielded a response without any JSON.")
+    void noJSON(final String url);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "REST invocation result: {0}")
+    void debugJSON(final String json);
+
+
+    @Message(level = MessageLevel.INFO,
+            text = "Discovered: Service: {0}, Host: {1}")
+    void discoveredServiceHost(final String serviceName, final String hostName);
+
+
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
new file mode 100644
index 0000000..723a786
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class AmbariServiceDiscoveryType implements ServiceDiscoveryType {
+
+    private static final String IMPL = AmbariServiceDiscovery.class.getCanonicalName();
+
+    @Override
+    public String getType() {
+        return AmbariServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new AmbariServiceDiscovery();
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
new file mode 100644
index 0000000..0674642
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+
+import java.util.ArrayList;
+import java.util.List;
+
+class AmbariServiceURLCreator {
+
+    private static final String NAMENODE_SERVICE        = "NAMENODE";
+    private static final String JOBTRACKER_SERVICE      = "JOBTRACKER";
+    private static final String WEBHDFS_SERVICE         = "WEBHDFS";
+    private static final String WEBHCAT_SERVICE         = "WEBHCAT";
+    private static final String OOZIE_SERVICE           = "OOZIE";
+    private static final String WEBHBASE_SERVICE        = "WEBHBASE";
+    private static final String HIVE_SERVICE            = "HIVE";
+    private static final String RESOURCEMANAGER_SERVICE = "RESOURCEMANAGER";
+
+
+    /**
+     * Derive the endpoint URL(s) for the specified service, based on the info from the specified Cluster.
+     *
+     * @param cluster The cluster discovery results
+     * @param serviceName The name of a Hadoop service
+     *
+     * @return One or more endpoint URLs for the specified service.
+     */
+    public List<String> create(AmbariCluster cluster, String serviceName) {
+        List<String> result = null;
+
+        if (NAMENODE_SERVICE.equals(serviceName)) {
+            result = createNameNodeURL(cluster);
+        } else if (JOBTRACKER_SERVICE.equals(serviceName)) {
+            result = createJobTrackerURL(cluster);
+        } else if (WEBHDFS_SERVICE.equals(serviceName)) {
+            result = createWebHDFSURL(cluster);
+        } else if (WEBHCAT_SERVICE.equals(serviceName)) {
+            result = createWebHCatURL(cluster);
+        } else if (OOZIE_SERVICE.equals(serviceName)) {
+            result = createOozieURL(cluster);
+        } else if (WEBHBASE_SERVICE.equals(serviceName)) {
+            result = createWebHBaseURL(cluster);
+        } else if (HIVE_SERVICE.equals(serviceName)) {
+            result = createHiveURL(cluster);
+        } else if (RESOURCEMANAGER_SERVICE.equals(serviceName)) {
+            result = createResourceManagerURL(cluster);
+        }
+
+        return result;
+    }
+
+
+    private List<String> createNameNodeURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("NAMENODE");
+        if (comp != null) {
+            result.add("hdfs://" + comp.getConfigProperty("dfs.namenode.rpc-address"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createJobTrackerURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("RESOURCEMANAGER");
+        if (comp != null) {
+            result.add("rpc://" + comp.getConfigProperty("yarn.resourcemanager.address"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHDFSURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariCluster.ServiceConfiguration sc = cluster.getServiceConfiguration("HDFS", "hdfs-site");
+        if (sc != null) {
+            String address = sc.getProperties().get("dfs.namenode.http-address");
+            result.add("http://" + address + "/webhdfs");
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHCatURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent webhcat = cluster.getComponent("WEBHCAT_SERVER");
+        if (webhcat != null) {
+            String port = webhcat.getConfigProperty("templeton.port");
+            String host = webhcat.getHostNames().get(0);
+
+            result.add("http://" + host + ":" + port + "/templeton");
+        }
+        return result;
+    }
+
+
+    private List<String> createOozieURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("OOZIE_SERVER");
+        if (comp != null) {
+            result.add(comp.getConfigProperty("oozie.base.url"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHBaseURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("HBASE_MASTER");
+        if (comp != null) {
+            for (String host : comp.getHostNames()) {
+                result.add("http://" + host + ":60080");
+            }
+        }
+
+        return result;
+    }
+
+
+    private List<String> createHiveURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent hive = cluster.getComponent("HIVE_SERVER");
+        if (hive != null) {
+            String path = hive.getConfigProperty("hive.server2.thrift.http.path");
+            String port = hive.getConfigProperty("hive.server2.thrift.http.port");
+            String transport = hive.getConfigProperty("hive.server2.transport.mode");
+            String useSSL = hive.getConfigProperty("hive.server2.use.SSL");
+            String host = hive.getHostNames().get(0);
+
+            String scheme = null; // What is the scheme for the binary transport mode?
+            if ("http".equals(transport)) {
+                scheme = Boolean.valueOf(useSSL) ? "https" : "http";
+            }
+
+            result.add(scheme + "://" + host + ":" + port + "/" + path);
+        }
+        return result;
+    }
+
+
+    private List<String> createResourceManagerURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent resMan = cluster.getComponent("RESOURCEMANAGER");
+        if (resMan != null) {
+            String webappAddress = resMan.getConfigProperty("yarn.resourcemanager.webapp.address");
+            String httpPolicy = resMan.getConfigProperty("yarn.http.policy");
+            String scheme = ("HTTPS_ONLY".equalsIgnoreCase(httpPolicy)) ? "https" : "http";
+
+            result.add(scheme + "://" + webappAddress + "/ws");
+        }
+
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
new file mode 100644
index 0000000..1da4fc9
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.topology.discovery.ambari.AmbariServiceDiscoveryType
\ No newline at end of file


[27/37] knox git commit: KNOX-1079 - Regression: proxy for Atlas fails with j_spring_security_check during login (Madhan Neethiraj via lmccay)

Posted by lm...@apache.org.
KNOX-1079 - Regression: proxy for Atlas fails with j_spring_security_check during login (Madhan Neethiraj via lmccay)

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/0719da37
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/0719da37
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/0719da37

Branch: refs/heads/KNOX-1049
Commit: 0719da376065950d8786576cf92f71367d9aba8d
Parents: 2d236e7
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Oct 12 15:03:16 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Oct 12 15:03:16 2017 -0400

----------------------------------------------------------------------
 .../src/main/resources/services/atlas/0.8.0/rewrite.xml        | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/0719da37/gateway-service-definitions/src/main/resources/services/atlas/0.8.0/rewrite.xml
----------------------------------------------------------------------
diff --git a/gateway-service-definitions/src/main/resources/services/atlas/0.8.0/rewrite.xml b/gateway-service-definitions/src/main/resources/services/atlas/0.8.0/rewrite.xml
index 9db2348..84b4edf 100644
--- a/gateway-service-definitions/src/main/resources/services/atlas/0.8.0/rewrite.xml
+++ b/gateway-service-definitions/src/main/resources/services/atlas/0.8.0/rewrite.xml
@@ -66,6 +66,10 @@
 
 
     <filter name="ATLAS/atlas/outbound/links">
+        <content type="*/x-javascript">
+            <apply path="j_spring_security_check" rule="ATLAS/atlas/outbound/extrapath"/>
+            <apply path="index.html" rule="ATLAS/atlas/outbound/index"/>
+        </content>
         <content type="application/javascript">
             <apply path="j_spring_security_check" rule="ATLAS/atlas/outbound/extrapath"/>
             <apply path="index.html" rule="ATLAS/atlas/outbound/index"/>
@@ -80,4 +84,4 @@
         </content>
     </filter>
 
-</rules>
\ No newline at end of file
+</rules>


[15/37] knox git commit: KNOX-1068 - Add support for HTTP Head request

Posted by lm...@apache.org.
KNOX-1068 - Add support for HTTP Head request


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/10b3473a
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/10b3473a
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/10b3473a

Branch: refs/heads/KNOX-1049
Commit: 10b3473ae0ab88645322ef4798f9eb696cb59b93
Parents: a841e26
Author: Sandeep More <mo...@apache.org>
Authored: Tue Sep 26 14:24:30 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Tue Sep 26 14:24:30 2017 -0400

----------------------------------------------------------------------
 .../hadoop/gateway/dispatch/AbstractGatewayDispatch.java    | 8 ++++++++
 .../org/apache/hadoop/gateway/dispatch/DefaultDispatch.java | 9 +++++++++
 .../java/org/apache/hadoop/gateway/dispatch/Dispatch.java   | 6 ++++++
 .../hadoop/gateway/dispatch/GatewayDispatchFilter.java      | 8 ++++++++
 4 files changed, 31 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/10b3473a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/AbstractGatewayDispatch.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/AbstractGatewayDispatch.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/AbstractGatewayDispatch.java
index 70a6996..16a09fc 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/AbstractGatewayDispatch.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/AbstractGatewayDispatch.java
@@ -115,6 +115,14 @@ public abstract class AbstractGatewayDispatch implements Dispatch {
       throws IOException, URISyntaxException {
     response.sendError( HttpServletResponse.SC_METHOD_NOT_ALLOWED );
   }
+
+  /**
+   * @sine 0.14.0
+   */
+  public void doHead( URI url, HttpServletRequest request, HttpServletResponse response )
+      throws IOException, URISyntaxException {
+    response.sendError( HttpServletResponse.SC_METHOD_NOT_ALLOWED );
+  }
   
   public void copyRequestHeaderFields(HttpUriRequest outboundRequest,
       HttpServletRequest inboundRequest) {

http://git-wip-us.apache.org/repos/asf/knox/blob/10b3473a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/DefaultDispatch.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/DefaultDispatch.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/DefaultDispatch.java
index 4b9e95d..d3633a9 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/DefaultDispatch.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/DefaultDispatch.java
@@ -36,6 +36,7 @@ import org.apache.http.HttpEntity;
 import org.apache.http.HttpResponse;
 import org.apache.http.client.methods.HttpDelete;
 import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpHead;
 import org.apache.http.client.methods.HttpOptions;
 import org.apache.http.client.methods.HttpPost;
 import org.apache.http.client.methods.HttpPut;
@@ -312,6 +313,14 @@ public class DefaultDispatch extends AbstractGatewayDispatch {
       executeRequest(method, request, response);
    }
 
+  @Override
+  public void doHead(URI url, HttpServletRequest request, HttpServletResponse response)
+      throws IOException, URISyntaxException {
+    final HttpHead method = new HttpHead(url);
+    copyRequestHeaderFields(method, request);
+    executeRequest(method, request, response);
+  }
+
   public Set<String> getOutboundResponseExcludeHeaders() {
     return outboundResponseExcludeHeaders;
   }

http://git-wip-us.apache.org/repos/asf/knox/blob/10b3473a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/Dispatch.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/Dispatch.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/Dispatch.java
index de08117..506e22b 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/Dispatch.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/Dispatch.java
@@ -53,4 +53,10 @@ public interface Dispatch {
   void doOptions( URI url, HttpServletRequest request, HttpServletResponse response )
       throws IOException, ServletException, URISyntaxException;
 
+  /**
+   * @since 0.14.0
+   */
+  void doHead( URI url, HttpServletRequest request, HttpServletResponse response )
+      throws IOException, ServletException, URISyntaxException;
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/10b3473a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/GatewayDispatchFilter.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/GatewayDispatchFilter.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/GatewayDispatchFilter.java
index acfa92e..4799ec2 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/GatewayDispatchFilter.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/dispatch/GatewayDispatchFilter.java
@@ -54,6 +54,7 @@ public class GatewayDispatchFilter extends AbstractGatewayFilter {
     map.put("PUT", new PutAdapter());
     map.put("DELETE", new DeleteAdapter());
     map.put("OPTIONS", new OptionsAdapter());
+    map.put("HEAD", new HeadAdapter());
     return Collections.unmodifiableMap(map);
   }
 
@@ -152,6 +153,13 @@ public class GatewayDispatchFilter extends AbstractGatewayFilter {
     }
   }
 
+  private static class HeadAdapter implements Adapter {
+    public void doMethod(Dispatch dispatch, HttpServletRequest request, HttpServletResponse response)
+        throws IOException, ServletException, URISyntaxException {
+      dispatch.doHead( dispatch.getDispatchUrl(request), request, response);
+    }
+  }
+
   private <T> T newInstanceFromName(String dispatchImpl) throws ServletException {
     try {
       Class<T> clazz = loadClass(dispatchImpl);


[05/37] knox git commit: KNOX-1060 - JWT.getExpires() returns null

Posted by lm...@apache.org.
KNOX-1060 - JWT.getExpires() returns null


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/8537d424
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/8537d424
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/8537d424

Branch: refs/heads/KNOX-1049
Commit: 8537d424205dce5b032bbb4c37362d91dd3cfeb5
Parents: 935f81f
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Fri Sep 22 11:10:59 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Fri Sep 22 11:10:59 2017 +0100

----------------------------------------------------------------------
 .../services/security/token/impl/JWTToken.java        |  8 +++++++-
 .../services/security/token/impl/JWTTokenTest.java    | 14 ++++++++++++++
 2 files changed, 21 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/8537d424/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
index b7b8649..567c156 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
@@ -22,6 +22,8 @@ import java.text.ParseException;
 import java.util.Date;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
+
 import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
 
@@ -214,7 +216,11 @@ public class JWTToken implements JWT {
    */
   @Override
   public String getExpires() {
-    return getClaim(JWT.EXPIRES);
+    Date expires = getExpiresDate();
+    if (expires != null) {
+      return String.valueOf(expires.getTime());
+    }
+    return null;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/knox/blob/8537d424/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java b/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
index 4ed2ecf..6372f0c 100644
--- a/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
+++ b/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
@@ -23,6 +23,7 @@ import java.security.NoSuchAlgorithmException;
 import java.security.interfaces.RSAPrivateKey;
 import java.security.interfaces.RSAPublicKey;
 import java.util.ArrayList;
+import java.util.Date;
 
 import org.junit.Test;
 
@@ -206,4 +207,17 @@ public class JWTTokenTest extends org.junit.Assert {
     assertTrue(token.verify(verifier));
   }
 
+  @Test
+  public void testTokenExpiry() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = "https://login.example.com";
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    JWTToken token = new JWTToken("RS256", claims);
+
+    assertNotNull(token.getExpires());
+    assertNotNull(token.getExpiresDate());
+    assertEquals(token.getExpiresDate(), new Date(Long.valueOf(token.getExpires())));
+  }
 }


[04/37] knox git commit: KNOX-1058 - Fix JWTToken.parseToken

Posted by lm...@apache.org.
KNOX-1058 - Fix JWTToken.parseToken


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/935f81fb
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/935f81fb
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/935f81fb

Branch: refs/heads/KNOX-1049
Commit: 935f81fb0f446a18eb09d5c710f679e4012a7cc1
Parents: c833bf9
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Fri Sep 22 10:36:27 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Fri Sep 22 10:36:27 2017 +0100

----------------------------------------------------------------------
 .../services/security/token/impl/JWTToken.java      | 16 +++-------------
 .../services/security/token/impl/JWTTokenTest.java  | 10 +++-------
 2 files changed, 6 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/935f81fb/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
index 49d8609..b7b8649 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
@@ -40,13 +40,8 @@ public class JWTToken implements JWT {
 
   SignedJWT jwt = null;
 
-  private JWTToken(byte[] header, byte[] claims, byte[] signature) throws ParseException {
-    try {
-      jwt = new SignedJWT(new Base64URL(new String(header, "UTF8")), new Base64URL(new String(claims, "UTF8")),
-          new Base64URL(new String(signature, "UTF8")));
-    } catch (UnsupportedEncodingException e) {
-      log.unsupportedEncoding(e);
-    }
+  private JWTToken(String header, String claims, String signature) throws ParseException {
+    jwt = new SignedJWT(new Base64URL(header), new Base64URL(claims), new Base64URL(signature));
   }
 
   public JWTToken(String serializedJWT) throws ParseException {
@@ -147,12 +142,7 @@ public class JWTToken implements JWT {
   public static JWTToken parseToken(String wireToken) throws ParseException {
     log.parsingToken(wireToken);
     String[] parts = wireToken.split("\\.");
-    JWTToken jwt = new JWTToken(Base64.decodeBase64(parts[0]), Base64.decodeBase64(parts[1]), Base64.decodeBase64(parts[2]));
-//    System.out.println("header: " + token.header);
-//    System.out.println("claims: " + token.claims);
-//    System.out.println("payload: " + new String(token.payload));
-
-    return jwt;
+    return new JWTToken(parts[0], parts[1], parts[2]);
   }
 
   /* (non-Javadoc)

http://git-wip-us.apache.org/repos/asf/knox/blob/935f81fb/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java b/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
index ef4023d..4ed2ecf 100644
--- a/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
+++ b/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
@@ -34,8 +34,7 @@ import com.nimbusds.jose.crypto.RSASSAVerifier;
 
 public class JWTTokenTest extends org.junit.Assert {
   private static final String JWT_TOKEN = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOjE0MTY5MjkxMDksImp0aSI6ImFhN2Y4ZDBhOTVjIiwic2NvcGVzIjpbInJlcG8iLCJwdWJsaWNfcmVwbyJdfQ.XCEwpBGvOLma4TCoh36FU7XhUbcskygS81HE1uHLf0E";
-  private static final String HEADER = "{\"alg\":\"RS256\", \"type\":\"JWT\"}";
-  private static final String CLAIMS = "{\"iss\": \"gateway\", \"prn\": \"john.doe@example.com\", \"aud\": \"https://login.example.com\", \"exp\": \"1363360913\"}";
+  private static final String HEADER = "{\"typ\":\"JWT\",\"alg\":\"HS256\"}";
 
   private RSAPublicKey publicKey;
   private RSAPrivateKey privateKey;
@@ -49,15 +48,12 @@ public class JWTTokenTest extends org.junit.Assert {
     privateKey = (RSAPrivateKey) kp.getPrivate();
   }
 
+  @Test
   public void testTokenParsing() throws Exception {
     JWTToken token = JWTToken.parseToken(JWT_TOKEN);
     assertEquals(token.getHeader(), HEADER);
-    assertEquals(token.getClaims(), CLAIMS);
 
-    assertEquals(token.getIssuer(), "gateway");
-    assertEquals(token.getPrincipal(), "john.doe@example.com");
-    assertEquals(token.getAudience(), "https://login.example.com");
-    assertEquals(token.getExpires(), "1363360913");
+    assertEquals(token.getClaim("jti"), "aa7f8d0a95c");
   }
 
   @Test


[13/37] knox git commit: KNOX-1014 - remove extraneous directory

Posted by lm...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java b/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
deleted file mode 100644
index 1e5e7b2..0000000
--- a/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
+++ /dev/null
@@ -1,856 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import net.minidev.json.JSONObject;
-import net.minidev.json.JSONValue;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
-import org.easymock.EasyMock;
-import org.junit.Test;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.junit.Assert.*;
-
-
-/**
- * Test the Ambari ServiceDiscovery implementation.
- *
- * N.B. These tests do NOT verify Ambari API responses. They DO validate the Ambari ServiceDiscovery implementation's
- *      treatment of the responses as they were observed at the time the tests are developed.
- */
-public class AmbariServiceDiscoveryTest {
-
-    @Test
-    public void testSingleClusterDiscovery() throws Exception {
-        final String discoveryAddress = "http://ambarihost:8080";
-        final String clusterName = "testCluster";
-        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
-
-        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
-        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
-        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
-        EasyMock.replay(sdc);
-
-        ServiceDiscovery.Cluster cluster = sd.discover(sdc, clusterName);
-        assertNotNull(cluster);
-        assertEquals(clusterName, cluster.getName());
-        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
-        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
-
-//        printServiceURLs(cluster);
-    }
-
-
-    @Test
-    public void testBulkClusterDiscovery() throws Exception {
-        final String discoveryAddress = "http://ambarihost:8080";
-        final String clusterName = "anotherCluster";
-        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
-
-        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
-        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
-        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
-        EasyMock.replay(sdc);
-
-        Map<String, ServiceDiscovery.Cluster> clusters = sd.discover(sdc);
-        assertNotNull(clusters);
-        assertEquals(1, clusters.size());
-        ServiceDiscovery.Cluster cluster = clusters.get(clusterName);
-        assertNotNull(cluster);
-        assertEquals(clusterName, cluster.getName());
-        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
-        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
-
-//        printServiceURLs(cluster, "NAMENODE", "WEBHCAT", "OOZIE", "RESOURCEMANAGER");
-    }
-
-
-    private static void printServiceURLs(ServiceDiscovery.Cluster cluster) {
-        final String[] services = new String[]{"NAMENODE",
-                                               "JOBTRACKER",
-                                               "WEBHDFS",
-                                               "WEBHCAT",
-                                               "OOZIE",
-                                               "WEBHBASE",
-                                               "HIVE",
-                                               "RESOURCEMANAGER"};
-        printServiceURLs(cluster, services);
-    }
-
-
-    private static void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
-        for (String name : services) {
-            StringBuilder sb = new StringBuilder();
-            List<String> urls = cluster.getServiceURLs(name);
-            if (urls != null && !urls.isEmpty()) {
-                for (String url : urls) {
-                    sb.append(url);
-                    sb.append(" ");
-                }
-            }
-            System.out.println(String.format("%18s: %s", name, sb.toString()));
-        }
-    }
-
-
-    /**
-     * ServiceDiscovery implementation derived from AmbariServiceDiscovery, so the invokeREST method can be overridden
-     * to eliminate the need to perform actual HTTP interactions with a real Ambari endpoint.
-     */
-    private static final class TestAmbariServiceDiscovery extends AmbariServiceDiscovery {
-
-        final static String CLUSTER_PLACEHOLDER = "CLUSTER_NAME";
-
-        private Map<String, JSONObject> cannedResponses = new HashMap<>();
-
-        TestAmbariServiceDiscovery(String clusterName) {
-            cannedResponses.put(AmbariServiceDiscovery.AMBARI_CLUSTERS_URI,
-                                (JSONObject) JSONValue.parse(CLUSTERS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
-                                                                                               clusterName)));
-
-            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_HOSTROLES_URI, clusterName),
-                                (JSONObject) JSONValue.parse(HOSTROLES_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
-                                                                                                clusterName)));
-
-            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_SERVICECONFIGS_URI, clusterName),
-                                (JSONObject) JSONValue.parse(SERVICECONFIGS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
-                                                                                                     clusterName)));
-        }
-
-        @Override
-        protected JSONObject invokeREST(String url, String username, String passwordAlias) {
-            return cannedResponses.get(url.substring(url.indexOf("/api")));
-        }
-    }
-
-
-    ////////////////////////////////////////////////////////////////////////
-    //  JSON response templates, based on actual response content excerpts
-    ////////////////////////////////////////////////////////////////////////
-
-    private static final String CLUSTERS_JSON_TEMPLATE =
-    "{\n" +
-    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters\",\n" +
-    "  \"items\" : [\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"Clusters\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"version\" : \"HDP-2.6\"\n" +
-    "      }\n" +
-    "    }\n" +
-    "  ]" +
-    "}";
-
-
-    private static final String HOSTROLES_JSON_TEMPLATE =
-    "{\n" +
-    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services?fields=components/host_components/HostRoles\",\n" +
-    "  \"items\" : [\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"AMBARI_METRICS\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS/components/METRICS_COLLECTOR\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"METRICS_COLLECTOR\",\n" +
-    "            \"service_name\" : \"AMBARI_METRICS\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/METRICS_COLLECTOR\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"METRICS_COLLECTOR\",\n" +
-    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"AMBARI_METRICS\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HBASE/components/HBASE_MASTER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"HBASE_MASTER\",\n" +
-    "            \"service_name\" : \"HBASE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/HBASE_MASTER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"HBASE_MASTER\",\n" +
-    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HBASE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"HDFS\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/NAMENODE\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"NAMENODE\",\n" +
-    "            \"service_name\" : \"HDFS\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/NAMENODE\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"NAMENODE\",\n" +
-    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HDFS\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/SECONDARY_NAMENODE\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
-    "            \"service_name\" : \"HDFS\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/SECONDARY_NAMENODE\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HDFS\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"HIVE\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HCAT\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"HCAT\",\n" +
-    "            \"service_name\" : \"HIVE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/HCAT\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"HCAT\",\n" +
-    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HIVE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_METASTORE\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"HIVE_METASTORE\",\n" +
-    "            \"service_name\" : \"HIVE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_METASTORE\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"HIVE_METASTORE\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HIVE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_SERVER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"HIVE_SERVER\",\n" +
-    "            \"service_name\" : \"HIVE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"HIVE_SERVER\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HIVE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/WEBHCAT_SERVER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"WEBHCAT_SERVER\",\n" +
-    "            \"service_name\" : \"HIVE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/WEBHCAT_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"WEBHCAT_SERVER\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"HIVE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\",\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"OOZIE\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE/components/OOZIE_SERVER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"OOZIE_SERVER\",\n" +
-    "            \"service_name\" : \"OOZIE\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/OOZIE_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"OOZIE_SERVER\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"OOZIE\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"YARN\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/APP_TIMELINE_SERVER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
-    "            \"service_name\" : \"YARN\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/APP_TIMELINE_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"YARN\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/NODEMANAGER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"NODEMANAGER\",\n" +
-    "            \"service_name\" : \"YARN\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/NODEMANAGER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"NODEMANAGER\",\n" +
-    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"YARN\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/RESOURCEMANAGER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"RESOURCEMANAGER\",\n" +
-    "            \"service_name\" : \"YARN\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/RESOURCEMANAGER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"RESOURCEMANAGER\",\n" +
-    "                \"ha_state\" : \"ACTIVE\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"YARN\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER\",\n" +
-    "      \"ServiceInfo\" : {\n" +
-    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "        \"service_name\" : \"ZOOKEEPER\"\n" +
-    "      },\n" +
-    "      \"components\" : [\n" +
-    "        {\n" +
-    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER/components/ZOOKEEPER_SERVER\",\n" +
-    "          \"ServiceComponentInfo\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
-    "            \"service_name\" : \"ZOOKEEPER\"\n" +
-    "          },\n" +
-    "          \"host_components\" : [\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
-    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"ZOOKEEPER\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            },\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
-    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"ZOOKEEPER\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            },\n" +
-    "            {\n" +
-    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
-    "              \"HostRoles\" : {\n" +
-    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
-    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
-    "                \"service_name\" : \"ZOOKEEPER\",\n" +
-    "                \"stack_id\" : \"HDP-2.6\"\n" +
-    "              }\n" +
-    "            }\n" +
-    "          ]\n" +
-    "        }\n" +
-    "      ]\n" +
-    "    }\n" +
-    "  ]\n" +
-    "}\n";
-
-
-    private static final String SERVICECONFIGS_JSON_TEMPLATE =
-    "{\n" +
-    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?is_current=true\",\n" +
-    "  \"items\" : [\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HBASE&service_config_version=1\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hbase-site\",\n" +
-    "          \"tag\" : \"version1503410563715\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hbase.master.info.bindAddress\" : \"0.0.0.0\",\n" +
-    "            \"hbase.master.info.port\" : \"16010\",\n" +
-    "            \"hbase.master.port\" : \"16000\",\n" +
-    "            \"hbase.regionserver.info.port\" : \"16030\",\n" +
-    "            \"hbase.regionserver.port\" : \"16020\",\n" +
-    "            \"hbase.zookeeper.property.clientPort\" : \"2181\",\n" +
-    "            \"hbase.zookeeper.quorum\" : \"c6403.ambari.apache.org,c6402.ambari.apache.org,c6401.ambari.apache.org\",\n" +
-    "            \"hbase.zookeeper.useMulti\" : \"true\",\n" +
-    "            \"zookeeper.znode.parent\" : \"/hbase-unsecure\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        },\n" +
-    "      ],\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 1,\n" +
-    "      \"service_config_version_note\" : \"Initial configurations for HBase\",\n" +
-    "      \"service_name\" : \"HBASE\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HDFS&service_config_version=2\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hdfs-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"dfs.cluster.administrators\" : \" hdfs\",\n" +
-    "            \"dfs.datanode.address\" : \"0.0.0.0:50010\",\n" +
-    "            \"dfs.datanode.http.address\" : \"0.0.0.0:50075\",\n" +
-    "            \"dfs.datanode.https.address\" : \"0.0.0.0:50475\",\n" +
-    "            \"dfs.datanode.ipc.address\" : \"0.0.0.0:8010\",\n" +
-    "            \"dfs.http.policy\" : \"HTTP_ONLY\",\n" +
-    "            \"dfs.https.port\" : \"50470\",\n" +
-    "            \"dfs.journalnode.http-address\" : \"0.0.0.0:8480\",\n" +
-    "            \"dfs.journalnode.https-address\" : \"0.0.0.0:8481\",\n" +
-    "            \"dfs.namenode.http-address\" : \"c6401.ambari.apache.org:50070\",\n" +
-    "            \"dfs.namenode.https-address\" : \"c6401.ambari.apache.org:50470\",\n" +
-    "            \"dfs.namenode.rpc-address\" : \"c6401.ambari.apache.org:8020\",\n" +
-    "            \"dfs.namenode.secondary.http-address\" : \"c6402.ambari.apache.org:50090\",\n" +
-    "            \"dfs.webhdfs.enabled\" : \"true\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : {\n" +
-    "            \"final\" : {\n" +
-    "              \"dfs.webhdfs.enabled\" : \"true\",\n" +
-    "              \"dfs.namenode.http-address\" : \"true\",\n" +
-    "              \"dfs.support.append\" : \"true\",\n" +
-    "              \"dfs.namenode.name.dir\" : \"true\",\n" +
-    "              \"dfs.datanode.failed.volumes.tolerated\" : \"true\",\n" +
-    "              \"dfs.datanode.data.dir\" : \"true\"\n" +
-    "            }\n" +
-    "          }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"core-site\",\n" +
-    "          \"tag\" : \"version1502131215159\",\n" +
-    "          \"version\" : 2,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hadoop.http.authentication.simple.anonymous.allowed\" : \"true\",\n" +
-    "            \"net.topology.script.file.name\" : \"/etc/hadoop/conf/topology_script.py\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : {\n" +
-    "            \"final\" : {\n" +
-    "              \"fs.defaultFS\" : \"true\"\n" +
-    "            }\n" +
-    "          }\n" +
-    "        }\n" +
-    "      ],\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 2,\n" +
-    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
-    "      \"service_name\" : \"HDFS\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HIVE&service_config_version=3\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hive-env\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hive_security_authorization\" : \"None\",\n" +
-    "            \"webhcat_user\" : \"hcat\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hiveserver2-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hive.metastore.metrics.enabled\" : \"true\",\n" +
-    "            \"hive.security.authorization.enabled\" : \"false\",\n" +
-    "            \"hive.service.metrics.hadoop2.component\" : \"hiveserver2\",\n" +
-    "            \"hive.service.metrics.reporter\" : \"HADOOP2\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hive-interactive-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hive.server2.enable.doAs\" : \"false\",\n" +
-    "            \"hive.server2.tez.default.queues\" : \"default\",\n" +
-    "            \"hive.server2.tez.initialize.default.sessions\" : \"true\",\n" +
-    "            \"hive.server2.tez.sessions.custom.queue.allowed\" : \"ignore\",\n" +
-    "            \"hive.server2.tez.sessions.per.default.queue\" : \"1\",\n" +
-    "            \"hive.server2.tez.sessions.restricted.configs\" : \"hive.execution.mode,hive.execution.engine\",\n" +
-    "            \"hive.server2.thrift.http.port\" : \"10501\",\n" +
-    "            \"hive.server2.thrift.port\" : \"10500\",\n" +
-    "            \"hive.server2.webui.port\" : \"10502\",\n" +
-    "            \"hive.server2.webui.use.ssl\" : \"false\",\n" +
-    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2-hive2\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"tez-interactive-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"tez.am.am-rm.heartbeat.interval-ms.max\" : \"10000\",\n" +
-    "            \"tez.am.client.heartbeat.poll.interval.millis\" : \"6000\",\n" +
-    "            \"tez.am.client.heartbeat.timeout.secs\" : \"90\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"hive-site\",\n" +
-    "          \"tag\" : \"version1502130841736\",\n" +
-    "          \"version\" : 2,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hive.metastore.sasl.enabled\" : \"false\",\n" +
-    "            \"hive.metastore.server.max.threads\" : \"100000\",\n" +
-    "            \"hive.metastore.uris\" : \"thrift://c6402.ambari.apache.org:9083\",\n" +
-    "            \"hive.server2.allow.user.substitution\" : \"true\",\n" +
-    "            \"hive.server2.authentication\" : \"NONE\",\n" +
-    "            \"hive.server2.authentication.spnego.keytab\" : \"HTTP/_HOST@EXAMPLE.COM\",\n" +
-    "            \"hive.server2.authentication.spnego.principal\" : \"/etc/security/keytabs/spnego.service.keytab\",\n" +
-    "            \"hive.server2.enable.doAs\" : \"true\",\n" +
-    "            \"hive.server2.support.dynamic.service.discovery\" : \"true\",\n" +
-    "            \"hive.server2.thrift.http.path\" : \"cliservice\",\n" +
-    "            \"hive.server2.thrift.http.port\" : \"10001\",\n" +
-    "            \"hive.server2.thrift.max.worker.threads\" : \"500\",\n" +
-    "            \"hive.server2.thrift.port\" : \"10000\",\n" +
-    "            \"hive.server2.thrift.sasl.qop\" : \"auth\",\n" +
-    "            \"hive.server2.transport.mode\" : \"http\",\n" +
-    "            \"hive.server2.use.SSL\" : \"false\",\n" +
-    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : {\n" +
-    "            \"hidden\" : {\n" +
-    "              \"javax.jdo.option.ConnectionPassword\" : \"HIVE_CLIENT,WEBHCAT_SERVER,HCAT,CONFIG_DOWNLOAD\"\n" +
-    "            }\n" +
-    "          }\n" +
-    "        },\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"webhcat-site\",\n" +
-    "          \"tag\" : \"version1502131111746\",\n" +
-    "          \"version\" : 2,\n" +
-    "          \"properties\" : {\n" +
-    "            \"templeton.port\" : \"50111\",\n" +
-    "            \"templeton.zookeeper.hosts\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
-    "            \"webhcat.proxyuser.knox.groups\" : \"users\",\n" +
-    "            \"webhcat.proxyuser.knox.hosts\" : \"*\",\n" +
-    "            \"webhcat.proxyuser.root.groups\" : \"*\",\n" +
-    "            \"webhcat.proxyuser.root.hosts\" : \"c6401.ambari.apache.org\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        }\n" +
-    "      ],\n" +
-    "      \"createtime\" : 1502131110745,\n" +
-    "      \"group_id\" : -1,\n" +
-    "      \"group_name\" : \"Default\",\n" +
-    "      \"hosts\" : [ ],\n" +
-    "      \"is_cluster_compatible\" : true,\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 3,\n" +
-    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
-    "      \"service_name\" : \"HIVE\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=OOZIE&service_config_version=3\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"oozie-site\",\n" +
-    "          \"tag\" : \"version1502131137103\",\n" +
-    "          \"version\" : 3,\n" +
-    "          \"properties\" : {\n" +
-    "            \"oozie.base.url\" : \"http://c6402.ambari.apache.org:11000/oozie\",\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        }\n" +
-    "      ],\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 3,\n" +
-    "      \"service_name\" : \"OOZIE\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=TEZ&service_config_version=1\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"tez-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"tez.use.cluster.hadoop-libs\" : \"false\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        }\n" +
-    "      ],\n" +
-    "      \"createtime\" : 1502122253525,\n" +
-    "      \"group_id\" : -1,\n" +
-    "      \"group_name\" : \"Default\",\n" +
-    "      \"hosts\" : [ ],\n" +
-    "      \"is_cluster_compatible\" : true,\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 1,\n" +
-    "      \"service_config_version_note\" : \"Initial configurations for Tez\",\n" +
-    "      \"service_name\" : \"TEZ\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    },\n" +
-    "    {\n" +
-    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=YARN&service_config_version=1\",\n" +
-    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "      \"configurations\" : [\n" +
-    "        {\n" +
-    "          \"Config\" : {\n" +
-    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
-    "            \"stack_id\" : \"HDP-2.6\"\n" +
-    "          },\n" +
-    "          \"type\" : \"yarn-site\",\n" +
-    "          \"tag\" : \"version1\",\n" +
-    "          \"version\" : 1,\n" +
-    "          \"properties\" : {\n" +
-    "            \"hadoop.registry.rm.enabled\" : \"true\",\n" +
-    "            \"hadoop.registry.zk.quorum\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
-    "            \"yarn.acl.enable\" : \"false\",\n" +
-    "            \"yarn.http.policy\" : \"HTTP_ONLY\",\n" +
-    "            \"yarn.nodemanager.address\" : \"0.0.0.0:45454\",\n" +
-    "            \"yarn.nodemanager.bind-host\" : \"0.0.0.0\",\n" +
-    "            \"yarn.resourcemanager.address\" : \"c6402.ambari.apache.org:8050\",\n" +
-    "            \"yarn.resourcemanager.admin.address\" : \"c6402.ambari.apache.org:8141\",\n" +
-    "            \"yarn.resourcemanager.ha.enabled\" : \"false\",\n" +
-    "            \"yarn.resourcemanager.hostname\" : \"c6402.ambari.apache.org\",\n" +
-    "            \"yarn.resourcemanager.resource-tracker.address\" : \"c6402.ambari.apache.org:8025\",\n" +
-    "            \"yarn.resourcemanager.scheduler.address\" : \"c6402.ambari.apache.org:8030\",\n" +
-    "            \"yarn.resourcemanager.webapp.address\" : \"c6402.ambari.apache.org:8088\",\n" +
-    "            \"yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled\" : \"false\",\n" +
-    "            \"yarn.resourcemanager.webapp.https.address\" : \"c6402.ambari.apache.org:8090\",\n" +
-    "            \"yarn.resourcemanager.zk-address\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\"\n" +
-    "          },\n" +
-    "          \"properties_attributes\" : { }\n" +
-    "        }\n" +
-    "      ],\n" +
-    "      \"is_current\" : true,\n" +
-    "      \"service_config_version\" : 1,\n" +
-    "      \"service_name\" : \"YARN\",\n" +
-    "      \"stack_id\" : \"HDP-2.6\",\n" +
-    "      \"user\" : \"admin\"\n" +
-    "    }\n" +
-    "  ]\n" +
-    "}";
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-release/home/conf/descriptors/README
----------------------------------------------------------------------
diff --git a/b/gateway-release/home/conf/descriptors/README b/b/gateway-release/home/conf/descriptors/README
deleted file mode 100644
index a2e5226..0000000
--- a/b/gateway-release/home/conf/descriptors/README
+++ /dev/null
@@ -1 +0,0 @@
-THIS DIRECTORY IS WHERE SIMPLE TOPOLOGY DESCRIPTORS CAN BE PLACED
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-release/home/conf/shared-providers/README
----------------------------------------------------------------------
diff --git a/b/gateway-release/home/conf/shared-providers/README b/b/gateway-release/home/conf/shared-providers/README
deleted file mode 100644
index 44d12a3..0000000
--- a/b/gateway-release/home/conf/shared-providers/README
+++ /dev/null
@@ -1 +0,0 @@
-THIS DIRECTORY IS WHERE SHARED PROVIDER CONFIGURATIONS CAN BE PLACED
\ No newline at end of file


[24/37] knox git commit: KNOX-1064 - Externalize Hadoop Service Configuration Details and Service URL Creation (Phil Zampino via Sandeep More)

Posted by lm...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
new file mode 100644
index 0000000..dd35dbb
--- /dev/null
+++ b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
@@ -0,0 +1,876 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.commons.io.FileUtils;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import static junit.framework.TestCase.assertTrue;
+import static junit.framework.TestCase.fail;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+
+public class AmbariDynamicServiceURLCreatorTest {
+
+    @Test
+    public void testHiveURLFromInternalMapping() throws Exception {
+        testHiveURL(null);
+    }
+
+    @Test
+    public void testHiveURLFromExternalMapping() throws Exception {
+        testHiveURL(TEST_MAPPING_CONFIG);
+    }
+
+    private void testHiveURL(Object mappingConfiguration) throws Exception {
+
+        final String   SERVICE_NAME = "HIVE";
+        final String[] HOSTNAMES    = {"host3", "host2", "host4"};
+        final String   HTTP_PATH    = "cliservice";
+        final String   HTTP_PORT    = "10001";
+        final String   BINARY_PORT  = "10000";
+
+        String expectedScheme = "http";
+
+        final List<String> hiveServerHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent hiveServer = EasyMock.createNiceMock(AmbariComponent.class);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("HIVE_SERVER")).andReturn(hiveServer).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Configure HTTP Transport
+        EasyMock.expect(hiveServer.getHostNames()).andReturn(hiveServerHosts).anyTimes();
+        EasyMock.expect(hiveServer.getConfigProperty("hive.server2.use.SSL")).andReturn("false").anyTimes();
+        EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.path")).andReturn(HTTP_PATH).anyTimes();
+        EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.port")).andReturn(HTTP_PORT).anyTimes();
+        EasyMock.expect(hiveServer.getConfigProperty("hive.server2.transport.mode")).andReturn("http").anyTimes();
+        EasyMock.replay(hiveServer);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+        List<String> urls = builder.create(SERVICE_NAME);
+        assertEquals(HOSTNAMES.length, urls.size());
+        validateServiceURLs(urls, HOSTNAMES, expectedScheme, HTTP_PORT, HTTP_PATH);
+
+        // Configure BINARY Transport
+        EasyMock.reset(hiveServer);
+        EasyMock.expect(hiveServer.getHostNames()).andReturn(hiveServerHosts).anyTimes();
+        EasyMock.expect(hiveServer.getConfigProperty("hive.server2.use.SSL")).andReturn("false").anyTimes();
+        EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.path")).andReturn("").anyTimes();
+        EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.port")).andReturn(HTTP_PORT).anyTimes();
+        EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.port")).andReturn(BINARY_PORT).anyTimes();
+        EasyMock.expect(hiveServer.getConfigProperty("hive.server2.transport.mode")).andReturn("binary").anyTimes();
+        EasyMock.replay(hiveServer);
+
+        // Run the test
+        urls = builder.create(SERVICE_NAME);
+        assertEquals(HOSTNAMES.length, urls.size());
+        validateServiceURLs(urls, HOSTNAMES, expectedScheme, HTTP_PORT, "");
+
+        // Configure HTTPS Transport
+        EasyMock.reset(hiveServer);
+        EasyMock.expect(hiveServer.getHostNames()).andReturn(hiveServerHosts).anyTimes();
+        EasyMock.expect(hiveServer.getConfigProperty("hive.server2.use.SSL")).andReturn("true").anyTimes();
+        EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.path")).andReturn(HTTP_PATH).anyTimes();
+        EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.port")).andReturn(HTTP_PORT).anyTimes();
+        EasyMock.expect(hiveServer.getConfigProperty("hive.server2.transport.mode")).andReturn("http").anyTimes();
+        EasyMock.replay(hiveServer);
+
+        // Run the test
+        expectedScheme = "https";
+        urls = builder.create(SERVICE_NAME);
+        assertEquals(HOSTNAMES.length, urls.size());
+        validateServiceURLs(urls, HOSTNAMES, expectedScheme, HTTP_PORT, HTTP_PATH);
+    }
+
+    @Test
+    public void testResourceManagerURLFromInternalMapping() throws Exception {
+        testResourceManagerURL(null);
+    }
+
+    @Test
+    public void testResourceManagerURLFromExternalMapping() throws Exception {
+        testResourceManagerURL(TEST_MAPPING_CONFIG);
+    }
+
+    private void testResourceManagerURL(Object mappingConfiguration) throws Exception {
+
+        final String HTTP_ADDRESS  = "host2:1111";
+        final String HTTPS_ADDRESS = "host2:22222";
+
+        // HTTP
+        AmbariComponent resman = EasyMock.createNiceMock(AmbariComponent.class);
+        setResourceManagerComponentExpectations(resman, HTTP_ADDRESS, HTTPS_ADDRESS, "HTTP");
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("RESOURCEMANAGER")).andReturn(resman).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+        String url = builder.create("RESOURCEMANAGER").get(0);
+        assertEquals("http://" + HTTP_ADDRESS + "/ws", url);
+
+        // HTTPS
+        EasyMock.reset(resman);
+        setResourceManagerComponentExpectations(resman, HTTP_ADDRESS, HTTPS_ADDRESS, "HTTPS_ONLY");
+
+        // Run the test
+        url = builder.create("RESOURCEMANAGER").get(0);
+        assertEquals("https://" + HTTPS_ADDRESS + "/ws", url);
+    }
+
+    private void setResourceManagerComponentExpectations(final AmbariComponent resmanMock,
+                                                         final String          httpAddress,
+                                                         final String          httpsAddress,
+                                                         final String          httpPolicy) {
+        EasyMock.expect(resmanMock.getConfigProperty("yarn.resourcemanager.webapp.address")).andReturn(httpAddress).anyTimes();
+        EasyMock.expect(resmanMock.getConfigProperty("yarn.resourcemanager.webapp.https.address")).andReturn(httpsAddress).anyTimes();
+        EasyMock.expect(resmanMock.getConfigProperty("yarn.http.policy")).andReturn(httpPolicy).anyTimes();
+        EasyMock.replay(resmanMock);
+    }
+
+    @Test
+    public void testJobTrackerURLFromInternalMapping() throws Exception {
+        testJobTrackerURL(null);
+    }
+
+    @Test
+    public void testJobTrackerURLFromExternalMapping() throws Exception {
+        testJobTrackerURL(TEST_MAPPING_CONFIG);
+    }
+
+    private void testJobTrackerURL(Object mappingConfiguration) throws Exception {
+        final String ADDRESS = "host2:5678";
+
+        AmbariComponent resman = EasyMock.createNiceMock(AmbariComponent.class);
+        EasyMock.expect(resman.getConfigProperty("yarn.resourcemanager.address")).andReturn(ADDRESS).anyTimes();
+        EasyMock.replay(resman);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("RESOURCEMANAGER")).andReturn(resman).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+        String url = builder.create("JOBTRACKER").get(0);
+        assertEquals("rpc://" + ADDRESS, url);
+    }
+
+    @Test
+    public void testNameNodeURLFromInternalMapping() throws Exception {
+        testNameNodeURL(null);
+    }
+
+    @Test
+    public void testNameNodeURLFromExternalMapping() throws Exception {
+        testNameNodeURL(TEST_MAPPING_CONFIG);
+    }
+
+    private void testNameNodeURL(Object mappingConfiguration) throws Exception {
+        final String ADDRESS = "host1:1234";
+
+        AmbariComponent namenode = EasyMock.createNiceMock(AmbariComponent.class);
+        EasyMock.expect(namenode.getConfigProperty("dfs.namenode.rpc-address")).andReturn(ADDRESS).anyTimes();
+        EasyMock.replay(namenode);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("NAMENODE")).andReturn(namenode).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+        String url = builder.create("NAMENODE").get(0);
+        assertEquals("hdfs://" + ADDRESS, url);
+    }
+
+    @Test
+    public void testWebHCatURLFromInternalMapping() throws Exception {
+        testWebHCatURL(null);
+    }
+
+    @Test
+    public void testWebHCatURLFromExternalMapping() throws Exception {
+        testWebHCatURL(TEST_MAPPING_CONFIG);
+    }
+
+    private void testWebHCatURL(Object mappingConfiguration) throws Exception {
+
+        final String HOSTNAME = "host3";
+        final String PORT     = "1919";
+
+        AmbariComponent webhcatServer = EasyMock.createNiceMock(AmbariComponent.class);
+        EasyMock.expect(webhcatServer.getConfigProperty("templeton.port")).andReturn(PORT).anyTimes();
+        List<String> webHcatServerHosts = Collections.singletonList(HOSTNAME);
+        EasyMock.expect(webhcatServer.getHostNames()).andReturn(webHcatServerHosts).anyTimes();
+        EasyMock.replay(webhcatServer);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("WEBHCAT_SERVER")).andReturn(webhcatServer).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+        String url = builder.create("WEBHCAT").get(0);
+        assertEquals("http://" + HOSTNAME + ":" + PORT + "/templeton", url);
+    }
+
+    @Test
+    public void testOozieURLFromInternalMapping() throws Exception {
+        testOozieURL(null);
+    }
+
+    @Test
+    public void testOozieURLFromExternalMapping() throws Exception {
+        testOozieURL(TEST_MAPPING_CONFIG);
+    }
+
+    private void testOozieURL(Object mappingConfiguration) throws Exception {
+        final String URL = "http://host3:2222";
+
+        AmbariComponent oozieServer = EasyMock.createNiceMock(AmbariComponent.class);
+        EasyMock.expect(oozieServer.getConfigProperty("oozie.base.url")).andReturn(URL).anyTimes();
+        EasyMock.replay(oozieServer);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("OOZIE_SERVER")).andReturn(oozieServer).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+        String url = builder.create("OOZIE").get(0);
+        assertEquals(URL, url);
+    }
+
+    @Test
+    public void testWebHBaseURLFromInternalMapping() throws Exception {
+        testWebHBaseURL(null);
+    }
+
+    @Test
+    public void testWebHBaseURLFromExternalMapping() throws Exception {
+        testWebHBaseURL(TEST_MAPPING_CONFIG);
+    }
+
+    private void testWebHBaseURL(Object mappingConfiguration) throws Exception {
+        final String[] HOSTNAMES = {"host2", "host4"};
+
+        AmbariComponent hbaseMaster = EasyMock.createNiceMock(AmbariComponent.class);
+        List<String> hbaseMasterHosts = Arrays.asList(HOSTNAMES);
+        EasyMock.expect(hbaseMaster.getHostNames()).andReturn(hbaseMasterHosts).anyTimes();
+        EasyMock.replay(hbaseMaster);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("HBASE_MASTER")).andReturn(hbaseMaster).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+        List<String> urls = builder.create("WEBHBASE");
+        validateServiceURLs(urls, HOSTNAMES, "http", "60080", null);
+    }
+
+    @Test
+    public void testWebHdfsURLFromInternalMapping() throws Exception {
+        testWebHdfsURL(null);
+    }
+
+    @Test
+    public void testWebHdfsURLFromExternalMapping() throws Exception {
+        testWebHdfsURL(TEST_MAPPING_CONFIG);
+    }
+
+    @Test
+    public void testWebHdfsURLFromSystemPropertyOverride() throws Exception {
+        // Write the test mapping configuration to a temp file
+        File mappingFile = File.createTempFile("mapping-config", "xml");
+        FileUtils.write(mappingFile, OVERRIDE_MAPPING_FILE_CONTENTS, "utf-8");
+
+        // Set the system property to point to the temp file
+        System.setProperty(AmbariDynamicServiceURLCreator.MAPPING_CONFIG_OVERRIDE_PROPERTY,
+                           mappingFile.getAbsolutePath());
+        try {
+            final String ADDRESS = "host3:1357";
+            // The URL creator should apply the file contents, and create the URL accordingly
+            String url = getTestWebHdfsURL(ADDRESS, null);
+
+            // Verify the URL matches the pattern from the file
+            assertEquals("http://" + ADDRESS + "/webhdfs/OVERRIDE", url);
+        } finally {
+            // Reset the system property, and delete the temp file
+            System.clearProperty(AmbariDynamicServiceURLCreator.MAPPING_CONFIG_OVERRIDE_PROPERTY);
+            mappingFile.delete();
+        }
+    }
+
+    private void testWebHdfsURL(Object mappingConfiguration) throws Exception {
+        final String ADDRESS = "host3:1357";
+        assertEquals("http://" + ADDRESS + "/webhdfs", getTestWebHdfsURL(ADDRESS, mappingConfiguration));
+    }
+
+
+    private String getTestWebHdfsURL(String address, Object mappingConfiguration) throws Exception {
+        AmbariCluster.ServiceConfiguration hdfsSC = EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+        Map<String, String> hdfsProps = new HashMap<>();
+        hdfsProps.put("dfs.namenode.http-address", address);
+        EasyMock.expect(hdfsSC.getProperties()).andReturn(hdfsProps).anyTimes();
+        EasyMock.replay(hdfsSC);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getServiceConfiguration("HDFS", "hdfs-site")).andReturn(hdfsSC).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Create the URL
+        AmbariDynamicServiceURLCreator creator = newURLCreator(cluster, mappingConfiguration);
+        return creator.create("WEBHDFS").get(0);
+    }
+
+
+    @Test
+    public void testAtlasApiURL() throws Exception {
+        final String ATLAS_REST_ADDRESS = "http://host2:21000";
+
+        AmbariComponent atlasServer = EasyMock.createNiceMock(AmbariComponent.class);
+        EasyMock.expect(atlasServer.getConfigProperty("atlas.rest.address")).andReturn(ATLAS_REST_ADDRESS).anyTimes();
+        EasyMock.replay(atlasServer);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("ATLAS_SERVER")).andReturn(atlasServer).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("ATLAS-API");
+        assertEquals(1, urls.size());
+        assertEquals(ATLAS_REST_ADDRESS, urls.get(0));
+    }
+
+
+    @Test
+    public void testAtlasURL() throws Exception {
+        final String HTTP_PORT = "8787";
+        final String HTTPS_PORT = "8989";
+
+        final String[] HOSTNAMES = {"host1", "host4"};
+        final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent atlasServer = EasyMock.createNiceMock(AmbariComponent.class);
+        EasyMock.expect(atlasServer.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        EasyMock.expect(atlasServer.getConfigProperty("atlas.enableTLS")).andReturn("false").anyTimes();
+        EasyMock.expect(atlasServer.getConfigProperty("atlas.server.http.port")).andReturn(HTTP_PORT).anyTimes();
+        EasyMock.expect(atlasServer.getConfigProperty("atlas.server.https.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(atlasServer);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("ATLAS_SERVER")).andReturn(atlasServer).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("ATLAS");
+        validateServiceURLs(urls, HOSTNAMES, "http", HTTP_PORT, null);
+
+        EasyMock.reset(atlasServer);
+        EasyMock.expect(atlasServer.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        EasyMock.expect(atlasServer.getConfigProperty("atlas.enableTLS")).andReturn("true").anyTimes();
+        EasyMock.expect(atlasServer.getConfigProperty("atlas.server.http.port")).andReturn(HTTP_PORT).anyTimes();
+        EasyMock.expect(atlasServer.getConfigProperty("atlas.server.https.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(atlasServer);
+
+        // Run the test
+        urls = builder.create("ATLAS");
+        validateServiceURLs(urls, HOSTNAMES, "https", HTTPS_PORT, null);
+    }
+
+
+    @Test
+    public void testZeppelinURL() throws Exception {
+        final String HTTP_PORT = "8787";
+        final String HTTPS_PORT = "8989";
+
+        final String[] HOSTNAMES = {"host1", "host4"};
+        final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent zeppelinMaster = EasyMock.createNiceMock(AmbariComponent.class);
+        EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("false").anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(zeppelinMaster);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("ZEPPELIN_MASTER")).andReturn(zeppelinMaster).anyTimes();
+        EasyMock.replay(cluster);
+
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+
+        // Run the test
+        validateServiceURLs(builder.create("ZEPPELIN"), HOSTNAMES, "http", HTTP_PORT, null);
+
+        EasyMock.reset(zeppelinMaster);
+        EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("true").anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(zeppelinMaster);
+
+        // Run the test
+        validateServiceURLs(builder.create("ZEPPELIN"), HOSTNAMES, "https", HTTPS_PORT, null);
+    }
+
+
+    @Test
+    public void testZeppelinUiURL() throws Exception {
+        final String HTTP_PORT = "8787";
+        final String HTTPS_PORT = "8989";
+
+        final String[] HOSTNAMES = {"host1", "host4"};
+        final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent zeppelinMaster = EasyMock.createNiceMock(AmbariComponent.class);
+        EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("false").anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(zeppelinMaster);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("ZEPPELIN_MASTER")).andReturn(zeppelinMaster).anyTimes();
+        EasyMock.replay(cluster);
+
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+
+        // Run the test
+        validateServiceURLs(builder.create("ZEPPELINUI"), HOSTNAMES, "http", HTTP_PORT, null);
+
+        EasyMock.reset(zeppelinMaster);
+        EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("true").anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(zeppelinMaster);
+
+        // Run the test
+        validateServiceURLs(builder.create("ZEPPELINUI"), HOSTNAMES, "https", HTTPS_PORT, null);
+    }
+
+
+    @Test
+    public void testZeppelinWsURL() throws Exception {
+        final String HTTP_PORT = "8787";
+        final String HTTPS_PORT = "8989";
+
+        final String[] HOSTNAMES = {"host1", "host4"};
+        final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent zeppelinMaster = EasyMock.createNiceMock(AmbariComponent.class);
+        EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("false").anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(zeppelinMaster);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("ZEPPELIN_MASTER")).andReturn(zeppelinMaster).anyTimes();
+        EasyMock.replay(cluster);
+
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+
+        // Run the test
+        validateServiceURLs(builder.create("ZEPPELINWS"), HOSTNAMES, "ws", HTTP_PORT, null);
+
+        EasyMock.reset(zeppelinMaster);
+        EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("true").anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+        EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+        EasyMock.replay(zeppelinMaster);
+
+        // Run the test
+        validateServiceURLs(builder.create("ZEPPELINWS"), HOSTNAMES, "wss", HTTPS_PORT, null);
+    }
+
+
+    @Test
+    public void testDruidCoordinatorURL() throws Exception {
+        final String PORT = "8787";
+
+        final String[] HOSTNAMES = {"host3", "host2"};
+        final List<String> druidCoordinatorHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent druidCoordinator = EasyMock.createNiceMock(AmbariComponent.class);
+        EasyMock.expect(druidCoordinator.getHostNames()).andReturn(druidCoordinatorHosts).anyTimes();
+        EasyMock.expect(druidCoordinator.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+        EasyMock.replay(druidCoordinator);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("DRUID_COORDINATOR")).andReturn(druidCoordinator).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("DRUID-COORDINATOR");
+        validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+    }
+
+
+    @Test
+    public void testDruidBrokerURL() throws Exception {
+        final String PORT = "8181";
+
+        final String[] HOSTNAMES = {"host4", "host3"};
+        final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent druidBroker = EasyMock.createNiceMock(AmbariComponent.class);
+        EasyMock.expect(druidBroker.getHostNames()).andReturn(druidHosts).anyTimes();
+        EasyMock.expect(druidBroker.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+        EasyMock.replay(druidBroker);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("DRUID_BROKER")).andReturn(druidBroker).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("DRUID-BROKER");
+        validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+    }
+
+
+    @Test
+    public void testDruidRouterURL() throws Exception {
+        final String PORT = "8282";
+
+        final String[] HOSTNAMES = {"host5", "host7"};
+        final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent druidRouter = EasyMock.createNiceMock(AmbariComponent.class);
+        EasyMock.expect(druidRouter.getHostNames()).andReturn(druidHosts).anyTimes();
+        EasyMock.expect(druidRouter.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+        EasyMock.replay(druidRouter);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("DRUID_ROUTER")).andReturn(druidRouter).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("DRUID-ROUTER");
+        validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+    }
+
+
+    @Test
+    public void testDruidOverlordURL() throws Exception {
+        final String PORT = "8383";
+
+        final String[] HOSTNAMES = {"host4", "host1"};
+        final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent druidOverlord = EasyMock.createNiceMock(AmbariComponent.class);
+        EasyMock.expect(druidOverlord.getHostNames()).andReturn(druidHosts).anyTimes();
+        EasyMock.expect(druidOverlord.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+        EasyMock.replay(druidOverlord);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("DRUID_OVERLORD")).andReturn(druidOverlord).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("DRUID-OVERLORD");
+        validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+    }
+
+
+    @Test
+    public void testDruidSupersetURL() throws Exception {
+        final String PORT = "8484";
+
+        final String[] HOSTNAMES = {"host4", "host1"};
+        final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+        AmbariComponent druidSuperset = EasyMock.createNiceMock(AmbariComponent.class);
+        EasyMock.expect(druidSuperset.getHostNames()).andReturn(druidHosts).anyTimes();
+        EasyMock.expect(druidSuperset.getConfigProperty("SUPERSET_WEBSERVER_PORT")).andReturn(PORT).anyTimes();
+        EasyMock.replay(druidSuperset);
+
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("DRUID_SUPERSET")).andReturn(druidSuperset).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("SUPERSET");
+        validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+    }
+
+
+    @Test
+    public void testMissingServiceComponentURL() throws Exception {
+        AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+        EasyMock.expect(cluster.getComponent("DRUID_BROKER")).andReturn(null).anyTimes();
+        EasyMock.expect(cluster.getComponent("HIVE_SERVER")).andReturn(null).anyTimes();
+        EasyMock.replay(cluster);
+
+        // Run the test
+        AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+        List<String> urls = builder.create("DRUID-BROKER");
+        assertNotNull(urls);
+        assertEquals(1, urls.size());
+        assertEquals("http://{HOST}:{PORT}", urls.get(0));
+
+        urls = builder.create("HIVE");
+        assertNotNull(urls);
+        assertEquals(1, urls.size());
+        assertEquals("http://{HOST}:{PORT}/{PATH}", urls.get(0));
+    }
+
+
+    /**
+     * Convenience method for creating AmbariDynamicServiceURLCreator instances from different mapping configuration
+     * input sources.
+     *
+     * @param cluster       The Ambari ServiceDiscovery Cluster model
+     * @param mappingConfig The mapping configuration, or null if the internal config should be used.
+     *
+     * @return An AmbariDynamicServiceURLCreator instance, capable of creating service URLs based on the specified
+     *         cluster's configuration details.
+     */
+    private static AmbariDynamicServiceURLCreator newURLCreator(AmbariCluster cluster, Object mappingConfig) throws Exception {
+        AmbariDynamicServiceURLCreator result = null;
+
+        if (mappingConfig == null) {
+            result = new AmbariDynamicServiceURLCreator(cluster);
+        } else {
+            if (mappingConfig instanceof String) {
+                result = new AmbariDynamicServiceURLCreator(cluster, (String) mappingConfig);
+            } else if (mappingConfig instanceof File) {
+                result = new AmbariDynamicServiceURLCreator(cluster, (File) mappingConfig);
+            }
+        }
+
+        return result;
+    }
+
+
+    /**
+     * Validate the specifed HIVE URLs.
+     *
+     * @param urlsToValidate The URLs to validate
+     * @param hostNames      The host names expected in the test URLs
+     * @param scheme         The expected scheme for the URLs
+     * @param port           The expected port for the URLs
+     * @param path           The expected path for the URLs
+     */
+    private static void validateServiceURLs(List<String> urlsToValidate,
+                                            String[]     hostNames,
+                                            String       scheme,
+                                            String       port,
+                                            String       path) throws MalformedURLException {
+
+        List<String> hostNamesToTest = new LinkedList<>(Arrays.asList(hostNames));
+        for (String url : urlsToValidate) {
+            URI test = null;
+            try {
+                // Make sure it's a valid URL
+                test = new URI(url);
+            } catch (URISyntaxException e) {
+                fail(e.getMessage());
+            }
+
+            // Validate the scheme
+            assertEquals(scheme, test.getScheme());
+
+            // Validate the port
+            assertEquals(port, String.valueOf(test.getPort()));
+
+            // If the expected path is not specified, don't validate it
+            if (path != null) {
+                assertEquals("/" + path, test.getPath());
+            }
+
+            // Validate the host name
+            assertTrue(hostNamesToTest.contains(test.getHost()));
+            hostNamesToTest.remove(test.getHost());
+        }
+        assertTrue(hostNamesToTest.isEmpty());
+    }
+
+
+    private static final String TEST_MAPPING_CONFIG =
+            "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
+            "<service-discovery-url-mappings>\n" +
+            "  <service name=\"NAMENODE\">\n" +
+            "    <url-pattern>hdfs://{DFS_NAMENODE_RPC_ADDRESS}</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"DFS_NAMENODE_RPC_ADDRESS\">\n" +
+            "        <component>NAMENODE</component>\n" +
+            "        <config-property>dfs.namenode.rpc-address</config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "\n" +
+            "  <service name=\"JOBTRACKER\">\n" +
+            "    <url-pattern>rpc://{YARN_RM_ADDRESS}</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"YARN_RM_ADDRESS\">\n" +
+            "        <component>RESOURCEMANAGER</component>\n" +
+            "        <config-property>yarn.resourcemanager.address</config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "\n" +
+            "  <service name=\"WEBHDFS\">\n" +
+            "    <url-pattern>http://{WEBHDFS_ADDRESS}/webhdfs</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"WEBHDFS_ADDRESS\">\n" +
+            "        <service-config name=\"HDFS\">hdfs-site</service-config>\n" +
+            "        <config-property>dfs.namenode.http-address</config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "\n" +
+            "  <service name=\"WEBHCAT\">\n" +
+            "    <url-pattern>http://{HOST}:{PORT}/templeton</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"HOST\">\n" +
+            "        <component>WEBHCAT_SERVER</component>\n" +
+            "        <hostname/>\n" +
+            "      </property>\n" +
+            "      <property name=\"PORT\">\n" +
+            "        <component>WEBHCAT_SERVER</component>\n" +
+            "        <config-property>templeton.port</config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "\n" +
+            "  <service name=\"OOZIE\">\n" +
+            "    <url-pattern>{OOZIE_ADDRESS}</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"OOZIE_ADDRESS\">\n" +
+            "        <component>OOZIE_SERVER</component>\n" +
+            "        <config-property>oozie.base.url</config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "\n" +
+            "  <service name=\"WEBHBASE\">\n" +
+            "    <url-pattern>http://{HOST}:60080</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"HOST\">\n" +
+            "        <component>HBASE_MASTER</component>\n" +
+            "        <hostname/>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "  <service name=\"RESOURCEMANAGER\">\n" +
+            "    <url-pattern>{SCHEME}://{WEBAPP_ADDRESS}/ws</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"WEBAPP_HTTP_ADDRESS\">\n" +
+            "        <component>RESOURCEMANAGER</component>\n" +
+            "        <config-property>yarn.resourcemanager.webapp.address</config-property>\n" +
+            "      </property>\n" +
+            "      <property name=\"WEBAPP_HTTPS_ADDRESS\">\n" +
+            "        <component>RESOURCEMANAGER</component>\n" +
+            "        <config-property>yarn.resourcemanager.webapp.https.address</config-property>\n" +
+            "      </property>\n" +
+            "      <property name=\"HTTP_POLICY\">\n" +
+            "        <component>RESOURCEMANAGER</component>\n" +
+            "        <config-property>yarn.http.policy</config-property>\n" +
+            "      </property>\n" +
+            "      <property name=\"SCHEME\">\n" +
+            "        <config-property>\n" +
+            "          <if property=\"HTTP_POLICY\" value=\"HTTPS_ONLY\">\n" +
+            "            <then>https</then>\n" +
+            "            <else>http</else>\n" +
+            "          </if>\n" +
+            "        </config-property>\n" +
+            "      </property>\n" +
+            "      <property name=\"WEBAPP_ADDRESS\">\n" +
+            "        <component>RESOURCEMANAGER</component>\n" +
+            "        <config-property>\n" +
+            "          <if property=\"HTTP_POLICY\" value=\"HTTPS_ONLY\">\n" +
+            "            <then>WEBAPP_HTTPS_ADDRESS</then>\n" +
+            "            <else>WEBAPP_HTTP_ADDRESS</else>\n" +
+            "          </if>\n" +
+            "        </config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "  <service name=\"HIVE\">\n" +
+            "    <url-pattern>{SCHEME}://{HOST}:{PORT}/{PATH}</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"HOST\">\n" +
+            "        <component>HIVE_SERVER</component>\n" +
+            "        <hostname/>\n" +
+            "      </property>\n" +
+            "      <property name=\"USE_SSL\">\n" +
+            "        <component>HIVE_SERVER</component>\n" +
+            "        <config-property>hive.server2.use.SSL</config-property>\n" +
+            "      </property>\n" +
+            "      <property name=\"PATH\">\n" +
+            "        <component>HIVE_SERVER</component>\n" +
+            "        <config-property>hive.server2.thrift.http.path</config-property>\n" +
+            "      </property>\n" +
+            "      <property name=\"PORT\">\n" +
+            "        <component>HIVE_SERVER</component>\n" +
+            "        <config-property>hive.server2.thrift.http.port</config-property>\n" +
+            "      </property>\n" +
+            "      <property name=\"SCHEME\">\n" +
+            "        <config-property>\n" +
+            "            <if property=\"USE_SSL\" value=\"true\">\n" +
+            "                <then>https</then>\n" +
+            "                <else>http</else>\n" +
+            "            </if>\n" +
+            "        </config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "</service-discovery-url-mappings>\n";
+
+
+    private static final String OVERRIDE_MAPPING_FILE_CONTENTS =
+            "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
+            "<service-discovery-url-mappings>\n" +
+            "  <service name=\"WEBHDFS\">\n" +
+            "    <url-pattern>http://{WEBHDFS_ADDRESS}/webhdfs/OVERRIDE</url-pattern>\n" +
+            "    <properties>\n" +
+            "      <property name=\"WEBHDFS_ADDRESS\">\n" +
+            "        <service-config name=\"HDFS\">hdfs-site</service-config>\n" +
+            "        <config-property>dfs.namenode.http-address</config-property>\n" +
+            "      </property>\n" +
+            "    </properties>\n" +
+            "  </service>\n" +
+            "</service-discovery-url-mappings>\n";
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
index 1e5e7b2..f7f0553 100644
--- a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
+++ b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
@@ -27,7 +27,9 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 
 /**

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
index fb563fa..521b5b4 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
@@ -16,15 +16,28 @@
  */
 package org.apache.hadoop.gateway.topology.simple;
 
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.InputStreamReader;
+import java.io.IOException;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
 import org.apache.hadoop.gateway.services.Service;
 import org.apache.hadoop.gateway.topology.discovery.DefaultServiceDiscoveryConfig;
 import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
 import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryFactory;
 
-import java.io.*;
-import java.util.*;
-
 
 /**
  * Processes simple topology descriptors, producing full topology files, which can subsequently be deployed to the
@@ -78,19 +91,29 @@ public class SimpleDescriptorHandler {
                     descServiceURLs = cluster.getServiceURLs(serviceName);
                 }
 
-                // If there is at least one URL associated with the service, then add it to the map
+                // Validate the discovered service URLs
+                List<String> validURLs = new ArrayList<>();
                 if (descServiceURLs != null && !descServiceURLs.isEmpty()) {
-                    serviceURLs.put(serviceName, descServiceURLs);
+                    // Validate the URL(s)
+                    for (String descServiceURL : descServiceURLs) {
+                        if (validateURL(serviceName, descServiceURL)) {
+                            validURLs.add(descServiceURL);
+                        }
+                    }
+                }
+
+                // If there is at least one valid URL associated with the service, then add it to the map
+                if (!validURLs.isEmpty()) {
+                    serviceURLs.put(serviceName, validURLs);
                 } else {
                     log.failedToDiscoverClusterServiceURLs(serviceName, cluster.getName());
-                    throw new IllegalStateException("ServiceDiscovery failed to resolve any URLs for " + serviceName +
-                                                    ". Topology update aborted!");
                 }
             }
         } else {
             log.failedToDiscoverClusterServices(desc.getClusterName());
         }
 
+        BufferedWriter fw = null;
         topologyDescriptor = null;
         File providerConfig = null;
         try {
@@ -110,7 +133,7 @@ public class SimpleDescriptorHandler {
                 topologyFilename = desc.getClusterName();
             }
             topologyDescriptor = new File(destDirectory, topologyFilename + ".xml");
-            FileWriter fw = new FileWriter(topologyDescriptor);
+            fw = new BufferedWriter(new FileWriter(topologyDescriptor));
 
             fw.write("<topology>\n");
 
@@ -123,8 +146,12 @@ public class SimpleDescriptorHandler {
             }
             policyReader.close();
 
+            // Sort the service names to write the services alphabetically
+            List<String> serviceNames = new ArrayList<>(serviceURLs.keySet());
+            Collections.sort(serviceNames);
+
             // Write the service declarations
-            for (String serviceName : serviceURLs.keySet()) {
+            for (String serviceName : serviceNames) {
                 fw.write("    <service>\n");
                 fw.write("        <role>" + serviceName + "</role>\n");
                 for (String url : serviceURLs.get(serviceName)) {
@@ -136,16 +163,37 @@ public class SimpleDescriptorHandler {
             fw.write("</topology>\n");
 
             fw.flush();
-            fw.close();
         } catch (IOException e) {
             log.failedToGenerateTopologyFromSimpleDescriptor(topologyDescriptor.getName(), e);
             topologyDescriptor.delete();
+        } finally {
+            if (fw != null) {
+                try {
+                    fw.close();
+                } catch (IOException e) {
+                    // ignore
+                }
+            }
         }
 
         result.put("topology", topologyDescriptor);
         return result;
     }
 
+    private static boolean validateURL(String serviceName, String url) {
+        boolean result = false;
+
+        if (url != null && !url.isEmpty()) {
+            try {
+                new URI(url);
+                result = true;
+            } catch (URISyntaxException e) {
+                log.serviceURLValidationFailed(serviceName, url, e);
+            }
+        }
+
+        return result;
+    }
 
     private static File resolveProviderConfigurationReference(String reference, File srcDirectory) {
         File providerConfig;

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
index cf9aa28..2a2c4c1 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
@@ -29,7 +29,7 @@ public interface SimpleDescriptorMessages {
     void failedToDiscoverClusterServices(final String cluster);
 
     @Message(level = MessageLevel.ERROR,
-            text = "No URLs were discovered for {0} in the {1} cluster.")
+            text = "No valid URLs were discovered for {0} in the {1} cluster.")
     void failedToDiscoverClusterServiceURLs(final String serviceName, final String clusterName);
 
     @Message(level = MessageLevel.ERROR,
@@ -37,6 +37,12 @@ public interface SimpleDescriptorMessages {
     void failedToResolveProviderConfigRef(final String providerConfigRef);
 
     @Message(level = MessageLevel.ERROR,
+            text = "URL validation failed for {0} URL {1} : {2}")
+    void serviceURLValidationFailed(final String serviceName,
+                                    final String url,
+                                    @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+    @Message(level = MessageLevel.ERROR,
             text = "Error generating topology {0} from simple descriptor: {1}")
     void failedToGenerateTopologyFromSimpleDescriptor(final String topologyFile,
                                                       @StackTrace( level = MessageLevel.DEBUG ) Exception e );

http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
index 90c7146..f79ef23 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
@@ -17,6 +17,23 @@
  */
 package org.apache.hadoop.gateway.topology.simple;
 
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathFactory;
+
+import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.gateway.topology.validation.TopologyValidator;
 import org.apache.hadoop.gateway.util.XmlUtils;
 import org.easymock.EasyMock;
@@ -26,91 +43,89 @@ import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 import org.xml.sax.SAXException;
 
-import javax.xml.xpath.XPath;
-import javax.xml.xpath.XPathConstants;
-import javax.xml.xpath.XPathFactory;
-import java.io.*;
-import java.util.*;
-
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 
 public class SimpleDescriptorHandlerTest {
 
     private static final String TEST_PROVIDER_CONFIG =
-            "    <gateway>\n" +
-                    "        <provider>\n" +
-                    "            <role>authentication</role>\n" +
-                    "            <name>ShiroProvider</name>\n" +
-                    "            <enabled>true</enabled>\n" +
-                    "            <param>\n" +
-                    "                <!-- \n" +
-                    "                session timeout in minutes,  this is really idle timeout,\n" +
-                    "                defaults to 30mins, if the property value is not defined,, \n" +
-                    "                current client authentication would expire if client idles contiuosly for more than this value\n" +
-                    "                -->\n" +
-                    "                <name>sessionTimeout</name>\n" +
-                    "                <value>30</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>main.ldapRealm</name>\n" +
-                    "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>main.ldapContextFactory</name>\n" +
-                    "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>main.ldapRealm.contextFactory</name>\n" +
-                    "                <value>$ldapContextFactory</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>main.ldapRealm.userDnTemplate</name>\n" +
-                    "                <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>main.ldapRealm.contextFactory.url</name>\n" +
-                    "                <value>ldap://localhost:33389</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
-                    "                <value>simple</value>\n" +
-                    "            </param>\n" +
-                    "            <param>\n" +
-                    "                <name>urls./**</name>\n" +
-                    "                <value>authcBasic</value>\n" +
-                    "            </param>\n" +
-                    "        </provider>\n" +
-                    "\n" +
-                    "        <provider>\n" +
-                    "            <role>identity-assertion</role>\n" +
-                    "            <name>Default</name>\n" +
-                    "            <enabled>true</enabled>\n" +
-                    "        </provider>\n" +
-                    "\n" +
-                    "        <!--\n" +
-                    "        Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.\n" +
-                    "        For example, a hadoop service running in AWS may return a response that includes URLs containing the\n" +
-                    "        some AWS internal host name.  If the client needs to make a subsequent request to the host identified\n" +
-                    "        in those URLs they need to be mapped to external host names that the client Knox can use to connect.\n" +
-                    "\n" +
-                    "        If the external hostname and internal host names are same turn of this provider by setting the value of\n" +
-                    "        enabled parameter as false.\n" +
-                    "\n" +
-                    "        The name parameter specifies the external host names in a comma separated list.\n" +
-                    "        The value parameter specifies corresponding internal host names in a comma separated list.\n" +
-                    "\n" +
-                    "        Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out\n" +
-                    "        of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the\n" +
-                    "        Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.\n" +
-                    "        -->\n" +
-                    "        <provider>\n" +
-                    "            <role>hostmap</role>\n" +
-                    "            <name>static</name>\n" +
-                    "            <enabled>true</enabled>\n" +
-                    "            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
-                    "        </provider>\n" +
-                    "    </gateway>\n";
+        "    <gateway>\n" +
+        "        <provider>\n" +
+        "            <role>authentication</role>\n" +
+        "            <name>ShiroProvider</name>\n" +
+        "            <enabled>true</enabled>\n" +
+        "            <param>\n" +
+        "                <!-- \n" +
+        "                session timeout in minutes,  this is really idle timeout,\n" +
+        "                defaults to 30mins, if the property value is not defined,, \n" +
+        "                current client authentication would expire if client idles contiuosly for more than this value\n" +
+        "                -->\n" +
+        "                <name>sessionTimeout</name>\n" +
+        "                <value>30</value>\n" +
+        "            </param>\n" +
+        "            <param>\n" +
+        "                <name>main.ldapRealm</name>\n" +
+        "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
+        "            </param>\n" +
+        "            <param>\n" +
+        "                <name>main.ldapContextFactory</name>\n" +
+        "                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
+        "            </param>\n" +
+        "            <param>\n" +
+        "                <name>main.ldapRealm.contextFactory</name>\n" +
+        "                <value>$ldapContextFactory</value>\n" +
+        "            </param>\n" +
+        "            <param>\n" +
+        "                <name>main.ldapRealm.userDnTemplate</name>\n" +
+        "                <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
+        "            </param>\n" +
+        "            <param>\n" +
+        "                <name>main.ldapRealm.contextFactory.url</name>\n" +
+        "                <value>ldap://localhost:33389</value>\n" +
+        "            </param>\n" +
+        "            <param>\n" +
+        "                <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
+        "                <value>simple</value>\n" +
+        "            </param>\n" +
+        "            <param>\n" +
+        "                <name>urls./**</name>\n" +
+        "                <value>authcBasic</value>\n" +
+        "            </param>\n" +
+        "        </provider>\n" +
+        "\n" +
+        "        <provider>\n" +
+        "            <role>identity-assertion</role>\n" +
+        "            <name>Default</name>\n" +
+        "            <enabled>true</enabled>\n" +
+        "        </provider>\n" +
+        "\n" +
+        "        <!--\n" +
+        "        Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.\n" +
+        "        For example, a hadoop service running in AWS may return a response that includes URLs containing the\n" +
+        "        some AWS internal host name.  If the client needs to make a subsequent request to the host identified\n" +
+        "        in those URLs they need to be mapped to external host names that the client Knox can use to connect.\n" +
+        "\n" +
+        "        If the external hostname and internal host names are same turn of this provider by setting the value of\n" +
+        "        enabled parameter as false.\n" +
+        "\n" +
+        "        The name parameter specifies the external host names in a comma separated list.\n" +
+        "        The value parameter specifies corresponding internal host names in a comma separated list.\n" +
+        "\n" +
+        "        Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out\n" +
+        "        of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the\n" +
+        "        Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.\n" +
+        "        -->\n" +
+        "        <provider>\n" +
+        "            <role>hostmap</role>\n" +
+        "            <name>static</name>\n" +
+        "            <enabled>true</enabled>\n" +
+        "            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
+        "        </provider>\n" +
+        "    </gateway>\n";
 
 
     /**
@@ -134,7 +149,7 @@ public class SimpleDescriptorHandlerTest {
         serviceURLs.put("WEBHBASE", null);
         serviceURLs.put("HIVE", null);
         serviceURLs.put("RESOURCEMANAGER", null);
-        serviceURLs.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
+        serviceURLs.put("AMBARIUI", Collections.singletonList("http://c6401.ambari.apache.org:8080"));
 
         // Write the externalized provider config to a temp file
         File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", TEST_PROVIDER_CONFIG);
@@ -225,14 +240,152 @@ public class SimpleDescriptorHandlerTest {
     }
 
 
-    private File writeProviderConfig(String path, String content) throws IOException {
-        File f = new File(path);
+    /**
+     * KNOX-1006
+     *
+     * Verify the behavior of the SimpleDescriptorHandler when service discovery fails to produce a valid URL for
+     * a service.
+     *
+     * N.B. This test depends on the PropertiesFileServiceDiscovery extension being configured:
+     *             org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscovery
+     */
+    @Test
+    public void testInvalidServiceURLFromDiscovery() throws Exception {
+        final String CLUSTER_NAME = "myproperties";
+
+        // Configure the PropertiesFile Service Discovery implementation for this test
+        final String DEFAULT_VALID_SERVICE_URL = "http://localhost:9999/thiswillwork";
+        Properties serviceDiscoverySourceProps = new Properties();
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".NAMENODE",
+                                                DEFAULT_VALID_SERVICE_URL.replace("http", "hdfs"));
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".JOBTRACKER",
+                                                DEFAULT_VALID_SERVICE_URL.replace("http", "rpc"));
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHDFS",         DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHCAT",         DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".OOZIE",           DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHBASE",        DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".HIVE",            "{SCHEME}://localhost:10000/");
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".RESOURCEMANAGER", DEFAULT_VALID_SERVICE_URL);
+        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".AMBARIUI",        DEFAULT_VALID_SERVICE_URL);
+        File serviceDiscoverySource = File.createTempFile("service-discovery", ".properties");
+        serviceDiscoverySourceProps.store(new FileOutputStream(serviceDiscoverySource),
+                                          "Test Service Discovery Source");
+
+        // Prepare a mock SimpleDescriptor
+        final String type = "PROPERTIES_FILE";
+        final String address = serviceDiscoverySource.getAbsolutePath();
+        final Map<String, List<String>> serviceURLs = new HashMap<>();
+        serviceURLs.put("NAMENODE", null);
+        serviceURLs.put("JOBTRACKER", null);
+        serviceURLs.put("WEBHDFS", null);
+        serviceURLs.put("WEBHCAT", null);
+        serviceURLs.put("OOZIE", null);
+        serviceURLs.put("WEBHBASE", null);
+        serviceURLs.put("HIVE", null);
+        serviceURLs.put("RESOURCEMANAGER", null);
+        serviceURLs.put("AMBARIUI", Collections.singletonList("http://c6401.ambari.apache.org:8080"));
 
-        Writer fw = new FileWriter(f);
-        fw.write(content);
-        fw.flush();
-        fw.close();
+        // Write the externalized provider config to a temp file
+        File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", TEST_PROVIDER_CONFIG);
+
+        File topologyFile = null;
+        try {
+            File destDir = (new File(".")).getCanonicalFile();
+
+            // Mock out the simple descriptor
+            SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
+            EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(address).anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(type).anyTimes();
+            EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
+            EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
+            EasyMock.expect(testDescriptor.getClusterName()).andReturn(CLUSTER_NAME).anyTimes();
+            List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
+            for (String serviceName : serviceURLs.keySet()) {
+                SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
+                EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
+                EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
+                EasyMock.replay(svc);
+                serviceMocks.add(svc);
+            }
+            EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
+            EasyMock.replay(testDescriptor);
+
+            // Invoke the simple descriptor handler
+            Map<String, File> files =
+                    SimpleDescriptorHandler.handle(testDescriptor,
+                                                   providerConfig.getParentFile(), // simple desc co-located with provider config
+                                                   destDir);
+
+            topologyFile = files.get("topology");
 
+            // Validate the resulting topology descriptor
+            assertTrue(topologyFile.exists());
+
+            // Validate the topology descriptor's correctness
+            TopologyValidator validator = new TopologyValidator( topologyFile.getAbsolutePath() );
+            if( !validator.validateTopology() ){
+                throw new SAXException( validator.getErrorString() );
+            }
+
+            XPathFactory xPathfactory = XPathFactory.newInstance();
+            XPath xpath = xPathfactory.newXPath();
+
+            // Parse the topology descriptor
+            Document topologyXml = XmlUtils.readXml(topologyFile);
+
+            // Validate the provider configuration
+            Document extProviderConf = XmlUtils.readXml(new ByteArrayInputStream(TEST_PROVIDER_CONFIG.getBytes()));
+            Node gatewayNode = (Node) xpath.compile("/topology/gateway").evaluate(topologyXml, XPathConstants.NODE);
+            assertTrue("Resulting provider config should be identical to the referenced content.",
+                    extProviderConf.getDocumentElement().isEqualNode(gatewayNode));
+
+            // Validate the service declarations
+            List<String> topologyServices = new ArrayList<>();
+            Map<String, List<String>> topologyServiceURLs = new HashMap<>();
+            NodeList serviceNodes =
+                    (NodeList) xpath.compile("/topology/service").evaluate(topologyXml, XPathConstants.NODESET);
+            for (int serviceNodeIndex=0; serviceNodeIndex < serviceNodes.getLength(); serviceNodeIndex++) {
+                Node serviceNode = serviceNodes.item(serviceNodeIndex);
+                Node roleNode = (Node) xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE);
+                assertNotNull(roleNode);
+                String role = roleNode.getNodeValue();
+                topologyServices.add(role);
+                NodeList urlNodes = (NodeList) xpath.compile("url/text()").evaluate(serviceNode, XPathConstants.NODESET);
+                for(int urlNodeIndex = 0 ; urlNodeIndex < urlNodes.getLength(); urlNodeIndex++) {
+                    Node urlNode = urlNodes.item(urlNodeIndex);
+                    assertNotNull(urlNode);
+                    String url = urlNode.getNodeValue();
+                    assertNotNull("Every declared service should have a URL.", url);
+                    if (!topologyServiceURLs.containsKey(role)) {
+                        topologyServiceURLs.put(role, new ArrayList<String>());
+                    }
+                    topologyServiceURLs.get(role).add(url);
+                }
+            }
+
+            // There should not be a service element for HIVE, since it had no valid URLs
+            assertEquals("Unexpected number of service declarations.", serviceURLs.size() - 1, topologyServices.size());
+            assertFalse("The HIVE service should have been omitted from the generated topology.", topologyServices.contains("HIVE"));
+
+            assertEquals("Unexpected number of service URLs.", serviceURLs.size() - 1, topologyServiceURLs.size());
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            fail(e.getMessage());
+        } finally {
+            serviceDiscoverySource.delete();
+            providerConfig.delete();
+            if (topologyFile != null) {
+                topologyFile.delete();
+            }
+        }
+    }
+
+
+    private File writeProviderConfig(String path, String content) throws IOException {
+        File f = new File(path);
+        FileUtils.write(f, content);
         return f;
     }
 


[26/37] knox git commit: KNOX-895 - Pass Headers and Cookies to websocket backend

Posted by lm...@apache.org.
KNOX-895 - Pass Headers and Cookies to websocket backend


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/2d236e78
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/2d236e78
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/2d236e78

Branch: refs/heads/KNOX-1049
Commit: 2d236e78b70ef7fb312ebf0fa198657595e2f4ba
Parents: 7b401de
Author: Sandeep More <mo...@apache.org>
Authored: Wed Oct 11 17:04:52 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Wed Oct 11 17:04:52 2017 -0400

----------------------------------------------------------------------
 .../websockets/GatewayWebsocketHandler.java     |  41 +-
 .../gateway/websockets/ProxyInboundClient.java  | 107 ++++++
 .../websockets/ProxyWebSocketAdapter.java       |  20 +-
 .../websockets/ProxyInboundClientTest.java      | 374 +++++++++++++++++++
 4 files changed, 530 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/2d236e78/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/GatewayWebsocketHandler.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/GatewayWebsocketHandler.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/GatewayWebsocketHandler.java
index 75a4a2b..0ee54fd 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/GatewayWebsocketHandler.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/GatewayWebsocketHandler.java
@@ -21,6 +21,8 @@ import java.io.File;
 import java.net.MalformedURLException;
 import java.net.URI;
 import java.net.URL;
+import java.util.List;
+import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -40,11 +42,13 @@ import org.eclipse.jetty.websocket.servlet.ServletUpgradeResponse;
 import org.eclipse.jetty.websocket.servlet.WebSocketCreator;
 import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory;
 
+import javax.websocket.ClientEndpointConfig;
+
 /**
  * Websocket handler that will handle websocket connection request. This class
  * is responsible for creating a proxy socket for inbound and outbound
  * connections. This is also where the http to websocket handoff happens.
- * 
+ *
  * @since 0.10
  */
 public class GatewayWebsocketHandler extends WebSocketHandler
@@ -74,7 +78,7 @@ public class GatewayWebsocketHandler extends WebSocketHandler
 
   /**
    * Create an instance
-   * 
+   *
    * @param config
    * @param services
    */
@@ -90,7 +94,7 @@ public class GatewayWebsocketHandler extends WebSocketHandler
 
   /*
    * (non-Javadoc)
-   * 
+   *
    * @see
    * org.eclipse.jetty.websocket.server.WebSocketHandler#configure(org.eclipse.
    * jetty.websocket.servlet.WebSocketServletFactory)
@@ -119,7 +123,7 @@ public class GatewayWebsocketHandler extends WebSocketHandler
 
   /*
    * (non-Javadoc)
-   * 
+   *
    * @see
    * org.eclipse.jetty.websocket.servlet.WebSocketCreator#createWebSocket(org.
    * eclipse.jetty.websocket.servlet.ServletUpgradeRequest,
@@ -137,7 +141,7 @@ public class GatewayWebsocketHandler extends WebSocketHandler
       final String backendURL = getMatchedBackendURL(path);
 
       /* Upgrade happens here */
-      return new ProxyWebSocketAdapter(URI.create(backendURL), pool);
+      return new ProxyWebSocketAdapter(URI.create(backendURL), pool, getClientEndpointConfig(req));
     } catch (final Exception e) {
       LOG.failedCreatingWebSocket(e);
       throw e;
@@ -145,11 +149,32 @@ public class GatewayWebsocketHandler extends WebSocketHandler
   }
 
   /**
+   * Returns a {@link ClientEndpointConfig} config that contains the headers
+   * to be passed to the backend.
+   * @since 0.14.0
+   * @param req
+   * @return
+   */
+  private ClientEndpointConfig getClientEndpointConfig(final ServletUpgradeRequest req) {
+
+    return ClientEndpointConfig.Builder.create().configurator( new ClientEndpointConfig.Configurator() {
+
+       @Override
+       public void beforeRequest(final Map<String, List<String>> headers) {
+
+         /* Add request headers */
+         req.getHeaders().forEach(headers::putIfAbsent);
+
+       }
+    }).build();
+  }
+
+  /**
    * This method looks at the context path and returns the backend websocket
    * url. If websocket url is found it is used as is, or we default to
    * ws://{host}:{port} which might or might not be right.
-   * 
-   * @param  The context path
+   *
+   * @param
    * @return Websocket backend url
    */
   private synchronized String getMatchedBackendURL(final String path) {
@@ -203,7 +228,7 @@ public class GatewayWebsocketHandler extends WebSocketHandler
         URI serviceUri = new URI(backendURL);
         backend.append(serviceUri);
         /* Avoid Zeppelin Regression - as this would require ambari changes and break current knox websocket use case*/
-        if (!StringUtils.endsWith(backend.toString(), "/ws") && pathService[1] != null) {
+        if (!StringUtils.endsWith(backend.toString(), "/ws") && pathService.length > 0 && pathService[1] != null) {
           backend.append(pathService[1]);
         }
       }

http://git-wip-us.apache.org/repos/asf/knox/blob/2d236e78/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyInboundClient.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyInboundClient.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyInboundClient.java
new file mode 100644
index 0000000..4e938d2
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyInboundClient.java
@@ -0,0 +1,107 @@
+package org.apache.hadoop.gateway.websockets;
+
+import javax.websocket.CloseReason;
+import javax.websocket.Endpoint;
+import javax.websocket.EndpointConfig;
+import javax.websocket.MessageHandler;
+import javax.websocket.Session;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+/**
+ * A Websocket client with callback which is not annotation based.
+ * This handler accepts String and binary messages.
+ * @since 0.14.0
+ */
+public class ProxyInboundClient extends Endpoint {
+
+  /**
+   * Callback to be called once we have events on our socket.
+   */
+  private MessageEventCallback callback;
+
+  protected Session session;
+  protected EndpointConfig config;
+
+
+  public ProxyInboundClient(final MessageEventCallback callback) {
+    super();
+    this.callback = callback;
+  }
+
+  /**
+   * Developers must implement this method to be notified when a new
+   * conversation has just begun.
+   *
+   * @param backendSession the session that has just been activated.
+   * @param config  the configuration used to configure this endpoint.
+   */
+  @Override
+  public void onOpen(final javax.websocket.Session backendSession, final EndpointConfig config) {
+    this.session = backendSession;
+    this.config = config;
+
+    /* Set the max message size */
+    session.setMaxBinaryMessageBufferSize(Integer.MAX_VALUE);
+    session.setMaxTextMessageBufferSize(Integer.MAX_VALUE);
+
+    /* Add message handler for binary data */
+    session.addMessageHandler(new MessageHandler.Whole<byte[]>() {
+
+      /**
+       * Called when the message has been fully received.
+       *
+       * @param message the message data.
+       */
+      @Override
+      public void onMessage(final byte[] message) {
+        callback.onMessageBinary(message, true, session);
+      }
+
+    });
+
+    /* Add message handler for text data */
+    session.addMessageHandler(new MessageHandler.Whole<String>() {
+
+      /**
+       * Called when the message has been fully received.
+       *
+       * @param message the message data.
+       */
+      @Override
+      public void onMessage(final String message) {
+        callback.onMessageText(message, session);
+      }
+
+    });
+
+    callback.onConnectionOpen(backendSession);
+  }
+
+  @Override
+  public void onClose(final javax.websocket.Session backendSession, final CloseReason closeReason) {
+    callback.onConnectionClose(closeReason);
+    this.session = null;
+  }
+
+  @Override
+  public void onError(final javax.websocket.Session backendSession, final Throwable cause) {
+    callback.onError(cause);
+    this.session = null;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/2d236e78/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyWebSocketAdapter.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyWebSocketAdapter.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyWebSocketAdapter.java
index 1e7f583..4ea8d6c 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyWebSocketAdapter.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyWebSocketAdapter.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.gateway.websockets;
 import java.io.IOException;
 import java.net.URI;
 import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
 
+import javax.websocket.ClientEndpointConfig;
 import javax.websocket.CloseReason;
 import javax.websocket.ContainerProvider;
 import javax.websocket.DeploymentException;
@@ -60,12 +60,23 @@ public class ProxyWebSocketAdapter extends WebSocketAdapter {
   private ExecutorService pool;
 
   /**
+   * Used to transmit headers from browser to backend server.
+   * @since 0.14
+   */
+  private ClientEndpointConfig clientConfig;
+
+  /**
    * Create an instance
    */
   public ProxyWebSocketAdapter(final URI backend, final ExecutorService pool) {
+    this(backend, pool, null);
+  }
+
+  public ProxyWebSocketAdapter(final URI backend, final ExecutorService pool, final ClientEndpointConfig clientConfig) {
     super();
     this.backend = backend;
     this.pool = pool;
+    this.clientConfig = clientConfig;
   }
 
   @Override
@@ -76,14 +87,15 @@ public class ProxyWebSocketAdapter extends WebSocketAdapter {
      * plumbing takes place
      */
     container = ContainerProvider.getWebSocketContainer();
-    final ProxyInboundSocket backendSocket = new ProxyInboundSocket(
-        getMessageCallback());
+
+    final ProxyInboundClient backendSocket = new ProxyInboundClient(getMessageCallback());
 
     /* build the configuration */
 
     /* Attempt Connect */
     try {
-      backendSession = container.connectToServer(backendSocket, backend);
+      backendSession = container.connectToServer(backendSocket, clientConfig, backend);
+
       LOG.onConnectionOpen(backend.toString());
 
     } catch (DeploymentException e) {

http://git-wip-us.apache.org/repos/asf/knox/blob/2d236e78/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/ProxyInboundClientTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/ProxyInboundClientTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/ProxyInboundClientTest.java
new file mode 100644
index 0000000..69b45dd
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/ProxyInboundClientTest.java
@@ -0,0 +1,374 @@
+package org.apache.hadoop.gateway.websockets;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership.  The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.eclipse.jetty.server.Handler;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.handler.ContextHandler;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import javax.websocket.CloseReason;
+import javax.websocket.ContainerProvider;
+import javax.websocket.DeploymentException;
+import javax.websocket.Session;
+import javax.websocket.WebSocketContainer;
+import java.io.IOException;
+import java.net.URI;
+import java.nio.ByteBuffer;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static org.hamcrest.Matchers.instanceOf;
+
+/**
+ * Test {@link ProxyInboundClient} class.
+ * @since 0.14.0
+ */
+public class ProxyInboundClientTest {
+
+  private static Server server;
+  private static URI serverUri;
+  private static Handler handler;
+
+  String recievedMessage = null;
+
+  byte[] recievedBinaryMessage = null;
+
+
+  /* create an instance */
+  public ProxyInboundClientTest() {
+    super();
+  }
+
+  @BeforeClass
+  public static void startWSServer() throws Exception
+  {
+    server = new Server();
+    ServerConnector connector = new ServerConnector(server);
+    server.addConnector(connector);
+
+    handler = new WebsocketEchoHandler();
+
+    ContextHandler context = new ContextHandler();
+    context.setContextPath("/");
+    context.setHandler(handler);
+    server.setHandler(context);
+
+    server.start();
+
+    String host = connector.getHost();
+    if (host == null)
+    {
+      host = "localhost";
+    }
+    int port = connector.getLocalPort();
+    serverUri = new URI(String.format("ws://%s:%d/",host,port));
+  }
+
+  @AfterClass
+  public static void stopServer()
+  {
+    try
+    {
+      server.stop();
+    }
+    catch (Exception e)
+    {
+      e.printStackTrace(System.err);
+    }
+  }
+
+  //@Test(timeout = 3000)
+  @Test
+  public void testClientInstance() throws IOException, DeploymentException {
+
+    final String textMessage = "Echo";
+    final ByteBuffer binarymessage = ByteBuffer.wrap(textMessage.getBytes());
+
+    final AtomicBoolean isTestComplete = new AtomicBoolean(false);
+
+    final WebSocketContainer container = ContainerProvider.getWebSocketContainer();
+    final ProxyInboundClient client = new ProxyInboundClient( new MessageEventCallback() {
+
+      /**
+       * A generic callback, can be left un-implemented
+       *
+       * @param message
+       */
+      @Override
+      public void doCallback(String message) {
+
+      }
+
+      /**
+       * Callback when connection is established.
+       *
+       * @param session
+       */
+      @Override
+      public void onConnectionOpen(Object session) {
+
+      }
+
+      /**
+       * Callback when connection is closed.
+       *
+       * @param reason
+       */
+      @Override
+      public void onConnectionClose(CloseReason reason) {
+        isTestComplete.set(true);
+      }
+
+      /**
+       * Callback when there is an error in connection.
+       *
+       * @param cause
+       */
+      @Override
+      public void onError(Throwable cause) {
+        isTestComplete.set(true);
+      }
+
+      /**
+       * Callback when a text message is received.
+       *
+       * @param message
+       * @param session
+       */
+      @Override
+      public void onMessageText(String message, Object session) {
+        recievedMessage = message;
+        isTestComplete.set(true);
+      }
+
+      /**
+       * Callback when a binary message is received.
+       *
+       * @param message
+       * @param last
+       * @param session
+       */
+      @Override
+      public void onMessageBinary(byte[] message, boolean last,
+          Object session) {
+
+      }
+    } );
+
+    Assert.assertThat(client, instanceOf(javax.websocket.Endpoint.class));
+
+    Session session = container.connectToServer(client, serverUri);
+
+    session.getBasicRemote().sendText(textMessage);
+
+    while(!isTestComplete.get()) {
+      /* just wait for the test to finish */
+    }
+
+    Assert.assertEquals("The received text message is not the same as the sent", textMessage, recievedMessage);
+  }
+
+  @Test(timeout = 3000)
+  public void testBinarymessage() throws IOException, DeploymentException {
+
+    final String textMessage = "Echo";
+    final ByteBuffer binarymessage = ByteBuffer.wrap(textMessage.getBytes());
+
+    final AtomicBoolean isTestComplete = new AtomicBoolean(false);
+
+    final WebSocketContainer container = ContainerProvider.getWebSocketContainer();
+    final ProxyInboundClient client = new ProxyInboundClient( new MessageEventCallback() {
+
+      /**
+       * A generic callback, can be left un-implemented
+       *
+       * @param message
+       */
+      @Override
+      public void doCallback(String message) {
+
+      }
+
+      /**
+       * Callback when connection is established.
+       *
+       * @param session
+       */
+      @Override
+      public void onConnectionOpen(Object session) {
+
+      }
+
+      /**
+       * Callback when connection is closed.
+       *
+       * @param reason
+       */
+      @Override
+      public void onConnectionClose(CloseReason reason) {
+        isTestComplete.set(true);
+      }
+
+      /**
+       * Callback when there is an error in connection.
+       *
+       * @param cause
+       */
+      @Override
+      public void onError(Throwable cause) {
+        isTestComplete.set(true);
+      }
+
+      /**
+       * Callback when a text message is received.
+       *
+       * @param message
+       * @param session
+       */
+      @Override
+      public void onMessageText(String message, Object session) {
+        recievedMessage = message;
+        isTestComplete.set(true);
+      }
+
+      /**
+       * Callback when a binary message is received.
+       *
+       * @param message
+       * @param last
+       * @param session
+       */
+      @Override
+      public void onMessageBinary(byte[] message, boolean last,
+          Object session) {
+        recievedBinaryMessage = message;
+        isTestComplete.set(true);
+      }
+    } );
+
+    Assert.assertThat(client, instanceOf(javax.websocket.Endpoint.class));
+
+    Session session = container.connectToServer(client, serverUri);
+
+    session.getBasicRemote().sendBinary(binarymessage);
+
+    while(!isTestComplete.get()) {
+      /* just wait for the test to finish */
+    }
+
+    Assert.assertEquals("Binary message does not match", textMessage, new String(recievedBinaryMessage));
+  }
+
+  @Test(timeout = 3000)
+  public void testTextMaxBufferLimit() throws IOException, DeploymentException {
+
+    final String longMessage = RandomStringUtils.random(100000);
+
+    final AtomicBoolean isTestComplete = new AtomicBoolean(false);
+
+    final WebSocketContainer container = ContainerProvider.getWebSocketContainer();
+    final ProxyInboundClient client = new ProxyInboundClient( new MessageEventCallback() {
+
+      /**
+       * A generic callback, can be left un-implemented
+       *
+       * @param message
+       */
+      @Override
+      public void doCallback(String message) {
+
+      }
+
+      /**
+       * Callback when connection is established.
+       *
+       * @param session
+       */
+      @Override
+      public void onConnectionOpen(Object session) {
+
+      }
+
+      /**
+       * Callback when connection is closed.
+       *
+       * @param reason
+       */
+      @Override
+      public void onConnectionClose(CloseReason reason) {
+        isTestComplete.set(true);
+      }
+
+      /**
+       * Callback when there is an error in connection.
+       *
+       * @param cause
+       */
+      @Override
+      public void onError(Throwable cause) {
+        isTestComplete.set(true);
+      }
+
+      /**
+       * Callback when a text message is received.
+       *
+       * @param message
+       * @param session
+       */
+      @Override
+      public void onMessageText(String message, Object session) {
+        recievedMessage = message;
+        isTestComplete.set(true);
+      }
+
+      /**
+       * Callback when a binary message is received.
+       *
+       * @param message
+       * @param last
+       * @param session
+       */
+      @Override
+      public void onMessageBinary(byte[] message, boolean last,
+          Object session) {
+
+      }
+    } );
+
+    Assert.assertThat(client, instanceOf(javax.websocket.Endpoint.class));
+
+    Session session = container.connectToServer(client, serverUri);
+
+    session.getBasicRemote().sendText(longMessage);
+
+    while(!isTestComplete.get()) {
+      /* just wait for the test to finish */
+    }
+
+    Assert.assertEquals(longMessage, recievedMessage);
+
+  }
+
+
+
+}


[35/37] knox git commit: KNOX-1001 - Knox Shell Sqoop.Request class is Package Private (Andrei Viaryshka via lmccay)

Posted by lm...@apache.org.
KNOX-1001 - Knox Shell Sqoop.Request class is Package Private (Andrei Viaryshka via lmccay)

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/c440344a
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/c440344a
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/c440344a

Branch: refs/heads/KNOX-1049
Commit: c440344ae3216d4f16c897d803cdf766bda2d97d
Parents: 0e13dc7
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Oct 19 18:13:29 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Oct 19 18:13:29 2017 -0400

----------------------------------------------------------------------
 .../src/main/java/org/apache/hadoop/gateway/shell/job/Sqoop.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/c440344a/gateway-shell/src/main/java/org/apache/hadoop/gateway/shell/job/Sqoop.java
----------------------------------------------------------------------
diff --git a/gateway-shell/src/main/java/org/apache/hadoop/gateway/shell/job/Sqoop.java b/gateway-shell/src/main/java/org/apache/hadoop/gateway/shell/job/Sqoop.java
index 979b322..792ae94 100644
--- a/gateway-shell/src/main/java/org/apache/hadoop/gateway/shell/job/Sqoop.java
+++ b/gateway-shell/src/main/java/org/apache/hadoop/gateway/shell/job/Sqoop.java
@@ -34,7 +34,7 @@ import java.util.concurrent.Callable;
 
 public class Sqoop {
 
-  static class Request extends AbstractRequest<Response> {
+  public static class Request extends AbstractRequest<Response> {
 
     private String statusDir;
     List<NameValuePair> params = new ArrayList<NameValuePair>();


[32/37] knox git commit: KNOX-1021 - Should handle empty string for endpoint token service audience list

Posted by lm...@apache.org.
KNOX-1021 - Should handle empty string for endpoint token service audience list


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/22882317
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/22882317
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/22882317

Branch: refs/heads/KNOX-1049
Commit: 2288231780703f13b8f80da8e36787c4d494424c
Parents: bb467b8
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Thu Oct 19 11:05:33 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Thu Oct 19 11:05:33 2017 +0100

----------------------------------------------------------------------
 .../jwt/filter/AbstractJWTFilter.java           |   3 +-
 .../federation/AbstractJWTFilterTest.java       | 105 ++++++++++++++++++-
 2 files changed, 105 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/22882317/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
index 0d8ecb8..24069e3 100644
--- a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
+++ b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
@@ -40,6 +40,7 @@ import javax.servlet.ServletResponse;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.gateway.audit.api.Action;
 import org.apache.hadoop.gateway.audit.api.ActionOutcome;
 import org.apache.hadoop.gateway.audit.api.AuditContext;
@@ -129,7 +130,7 @@ public abstract class AbstractJWTFilter implements Filter {
   protected List<String> parseExpectedAudiences(String expectedAudiences) {
     List<String> audList = null;
     // setup the list of valid audiences for token validation
-    if (expectedAudiences != null) {
+    if (!StringUtils.isEmpty(expectedAudiences)) {
       // parse into the list
       String[] audArray = expectedAudiences.split(",");
       audList = new ArrayList<String>();

http://git-wip-us.apache.org/repos/asf/knox/blob/22882317/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
index 54c596b..ad18491 100644
--- a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
+++ b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
@@ -238,6 +238,99 @@ public abstract class AbstractJWTFilterTest  {
   }
 
   @Test
+  public void testNoTokenAudience() throws Exception {
+    try {
+      Properties props = getProperties();
+      props.put(getAudienceProperty(), "bar");
+      handler.init(new TestFilterConfig(props));
+
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice", null,
+                             new Date(new Date().getTime() + 5000), new Date(), privateKey, "RS256");
+
+      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+      setTokenOnRequest(request, jwt);
+
+      EasyMock.expect(request.getRequestURL()).andReturn(
+          new StringBuffer(SERVICE_URL)).anyTimes();
+      EasyMock.expect(request.getQueryString()).andReturn(null);
+      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+          SERVICE_URL);
+      EasyMock.replay(request);
+
+      TestFilterChain chain = new TestFilterChain();
+      handler.doFilter(request, response, chain);
+      Assert.assertTrue("doFilterCalled should not be true.", !chain.doFilterCalled);
+      Assert.assertTrue("No Subject should be returned.", chain.subject == null);
+    } catch (ServletException se) {
+      fail("Should NOT have thrown a ServletException.");
+    }
+  }
+
+  @Test
+  public void testNoAudienceConfigured() throws Exception {
+    try {
+      Properties props = getProperties();
+      handler.init(new TestFilterConfig(props));
+
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice", null,
+                             new Date(new Date().getTime() + 5000), new Date(), privateKey, "RS256");
+
+      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+      setTokenOnRequest(request, jwt);
+
+      EasyMock.expect(request.getRequestURL()).andReturn(
+          new StringBuffer(SERVICE_URL)).anyTimes();
+      EasyMock.expect(request.getQueryString()).andReturn(null);
+      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+          SERVICE_URL);
+      EasyMock.replay(request);
+
+      TestFilterChain chain = new TestFilterChain();
+      handler.doFilter(request, response, chain);
+      Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled );
+      Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
+      Assert.assertTrue("No PrimaryPrincipal", !principals.isEmpty());
+      Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
+    } catch (ServletException se) {
+      fail("Should NOT have thrown a ServletException.");
+    }
+  }
+
+  @Test
+  public void testEmptyAudienceConfigured() throws Exception {
+    try {
+      Properties props = getProperties();
+      props.put(getAudienceProperty(), "");
+      handler.init(new TestFilterConfig(props));
+
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice", null,
+                             new Date(new Date().getTime() + 5000), new Date(), privateKey, "RS256");
+
+      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+      setTokenOnRequest(request, jwt);
+
+      EasyMock.expect(request.getRequestURL()).andReturn(
+          new StringBuffer(SERVICE_URL)).anyTimes();
+      EasyMock.expect(request.getQueryString()).andReturn(null);
+      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+          SERVICE_URL);
+      EasyMock.replay(request);
+
+      TestFilterChain chain = new TestFilterChain();
+      handler.doFilter(request, response, chain);
+      Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled );
+      Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
+      Assert.assertTrue("No PrimaryPrincipal", !principals.isEmpty());
+      Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
+    } catch (ServletException se) {
+      fail("Should NOT have thrown a ServletException.");
+    }
+  }
+
+  @Test
   public void testValidVerificationPEM() throws Exception {
     try {
       Properties props = getProperties();
@@ -605,8 +698,16 @@ public abstract class AbstractJWTFilterTest  {
   protected SignedJWT getJWT(String issuer, String sub, Date expires, Date nbf, RSAPrivateKey privateKey,
                              String signatureAlgorithm)
       throws Exception {
-    List<String> aud = new ArrayList<String>();
-    aud.add("bar");
+    return getJWT(issuer, sub, "bar", expires, nbf, privateKey, signatureAlgorithm);
+  }
+
+  protected SignedJWT getJWT(String issuer, String sub, String aud, Date expires, Date nbf, RSAPrivateKey privateKey,
+                             String signatureAlgorithm)
+      throws Exception {
+    List<String> audiences = new ArrayList<String>();
+    if (aud != null) {
+      audiences.add(aud);
+    }
 
     JWTClaimsSet claims = new JWTClaimsSet.Builder()
     .issuer(issuer)


[10/37] knox git commit: KNOX-1014 - Service Discovery and Topology Generation Framework (Phil Zampino via lmccay)

Posted by lm...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java b/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
new file mode 100644
index 0000000..1e5e7b2
--- /dev/null
+++ b/b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
@@ -0,0 +1,856 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.*;
+
+
+/**
+ * Test the Ambari ServiceDiscovery implementation.
+ *
+ * N.B. These tests do NOT verify Ambari API responses. They DO validate the Ambari ServiceDiscovery implementation's
+ *      treatment of the responses as they were observed at the time the tests are developed.
+ */
+public class AmbariServiceDiscoveryTest {
+
+    @Test
+    public void testSingleClusterDiscovery() throws Exception {
+        final String discoveryAddress = "http://ambarihost:8080";
+        final String clusterName = "testCluster";
+        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+        EasyMock.replay(sdc);
+
+        ServiceDiscovery.Cluster cluster = sd.discover(sdc, clusterName);
+        assertNotNull(cluster);
+        assertEquals(clusterName, cluster.getName());
+        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+//        printServiceURLs(cluster);
+    }
+
+
+    @Test
+    public void testBulkClusterDiscovery() throws Exception {
+        final String discoveryAddress = "http://ambarihost:8080";
+        final String clusterName = "anotherCluster";
+        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+        EasyMock.replay(sdc);
+
+        Map<String, ServiceDiscovery.Cluster> clusters = sd.discover(sdc);
+        assertNotNull(clusters);
+        assertEquals(1, clusters.size());
+        ServiceDiscovery.Cluster cluster = clusters.get(clusterName);
+        assertNotNull(cluster);
+        assertEquals(clusterName, cluster.getName());
+        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+//        printServiceURLs(cluster, "NAMENODE", "WEBHCAT", "OOZIE", "RESOURCEMANAGER");
+    }
+
+
+    private static void printServiceURLs(ServiceDiscovery.Cluster cluster) {
+        final String[] services = new String[]{"NAMENODE",
+                                               "JOBTRACKER",
+                                               "WEBHDFS",
+                                               "WEBHCAT",
+                                               "OOZIE",
+                                               "WEBHBASE",
+                                               "HIVE",
+                                               "RESOURCEMANAGER"};
+        printServiceURLs(cluster, services);
+    }
+
+
+    private static void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
+        for (String name : services) {
+            StringBuilder sb = new StringBuilder();
+            List<String> urls = cluster.getServiceURLs(name);
+            if (urls != null && !urls.isEmpty()) {
+                for (String url : urls) {
+                    sb.append(url);
+                    sb.append(" ");
+                }
+            }
+            System.out.println(String.format("%18s: %s", name, sb.toString()));
+        }
+    }
+
+
+    /**
+     * ServiceDiscovery implementation derived from AmbariServiceDiscovery, so the invokeREST method can be overridden
+     * to eliminate the need to perform actual HTTP interactions with a real Ambari endpoint.
+     */
+    private static final class TestAmbariServiceDiscovery extends AmbariServiceDiscovery {
+
+        final static String CLUSTER_PLACEHOLDER = "CLUSTER_NAME";
+
+        private Map<String, JSONObject> cannedResponses = new HashMap<>();
+
+        TestAmbariServiceDiscovery(String clusterName) {
+            cannedResponses.put(AmbariServiceDiscovery.AMBARI_CLUSTERS_URI,
+                                (JSONObject) JSONValue.parse(CLUSTERS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                               clusterName)));
+
+            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_HOSTROLES_URI, clusterName),
+                                (JSONObject) JSONValue.parse(HOSTROLES_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                                clusterName)));
+
+            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_SERVICECONFIGS_URI, clusterName),
+                                (JSONObject) JSONValue.parse(SERVICECONFIGS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                                     clusterName)));
+        }
+
+        @Override
+        protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+            return cannedResponses.get(url.substring(url.indexOf("/api")));
+        }
+    }
+
+
+    ////////////////////////////////////////////////////////////////////////
+    //  JSON response templates, based on actual response content excerpts
+    ////////////////////////////////////////////////////////////////////////
+
+    private static final String CLUSTERS_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"Clusters\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"version\" : \"HDP-2.6\"\n" +
+    "      }\n" +
+    "    }\n" +
+    "  ]" +
+    "}";
+
+
+    private static final String HOSTROLES_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services?fields=components/host_components/HostRoles\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"AMBARI_METRICS\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS/components/METRICS_COLLECTOR\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+    "            \"service_name\" : \"AMBARI_METRICS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/METRICS_COLLECTOR\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"AMBARI_METRICS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HBASE/components/HBASE_MASTER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HBASE_MASTER\",\n" +
+    "            \"service_name\" : \"HBASE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/HBASE_MASTER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HBASE_MASTER\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HBASE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"HDFS\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/NAMENODE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"NAMENODE\",\n" +
+    "            \"service_name\" : \"HDFS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/NAMENODE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"NAMENODE\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HDFS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/SECONDARY_NAMENODE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+    "            \"service_name\" : \"HDFS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/SECONDARY_NAMENODE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HDFS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"HIVE\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HCAT\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HCAT\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/HCAT\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HCAT\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_METASTORE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HIVE_METASTORE\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_METASTORE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HIVE_METASTORE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HIVE_SERVER\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HIVE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/WEBHCAT_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/WEBHCAT_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"OOZIE\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE/components/OOZIE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"OOZIE_SERVER\",\n" +
+    "            \"service_name\" : \"OOZIE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/OOZIE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"OOZIE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"OOZIE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"YARN\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/APP_TIMELINE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/APP_TIMELINE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/NODEMANAGER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"NODEMANAGER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/NODEMANAGER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"NODEMANAGER\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/RESOURCEMANAGER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"RESOURCEMANAGER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/RESOURCEMANAGER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"RESOURCEMANAGER\",\n" +
+    "                \"ha_state\" : \"ACTIVE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"ZOOKEEPER\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER/components/ZOOKEEPER_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "            \"service_name\" : \"ZOOKEEPER\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            },\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            },\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    }\n" +
+    "  ]\n" +
+    "}\n";
+
+
+    private static final String SERVICECONFIGS_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?is_current=true\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HBASE&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hbase-site\",\n" +
+    "          \"tag\" : \"version1503410563715\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hbase.master.info.bindAddress\" : \"0.0.0.0\",\n" +
+    "            \"hbase.master.info.port\" : \"16010\",\n" +
+    "            \"hbase.master.port\" : \"16000\",\n" +
+    "            \"hbase.regionserver.info.port\" : \"16030\",\n" +
+    "            \"hbase.regionserver.port\" : \"16020\",\n" +
+    "            \"hbase.zookeeper.property.clientPort\" : \"2181\",\n" +
+    "            \"hbase.zookeeper.quorum\" : \"c6403.ambari.apache.org,c6402.ambari.apache.org,c6401.ambari.apache.org\",\n" +
+    "            \"hbase.zookeeper.useMulti\" : \"true\",\n" +
+    "            \"zookeeper.znode.parent\" : \"/hbase-unsecure\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_config_version_note\" : \"Initial configurations for HBase\",\n" +
+    "      \"service_name\" : \"HBASE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HDFS&service_config_version=2\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hdfs-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"dfs.cluster.administrators\" : \" hdfs\",\n" +
+    "            \"dfs.datanode.address\" : \"0.0.0.0:50010\",\n" +
+    "            \"dfs.datanode.http.address\" : \"0.0.0.0:50075\",\n" +
+    "            \"dfs.datanode.https.address\" : \"0.0.0.0:50475\",\n" +
+    "            \"dfs.datanode.ipc.address\" : \"0.0.0.0:8010\",\n" +
+    "            \"dfs.http.policy\" : \"HTTP_ONLY\",\n" +
+    "            \"dfs.https.port\" : \"50470\",\n" +
+    "            \"dfs.journalnode.http-address\" : \"0.0.0.0:8480\",\n" +
+    "            \"dfs.journalnode.https-address\" : \"0.0.0.0:8481\",\n" +
+    "            \"dfs.namenode.http-address\" : \"c6401.ambari.apache.org:50070\",\n" +
+    "            \"dfs.namenode.https-address\" : \"c6401.ambari.apache.org:50470\",\n" +
+    "            \"dfs.namenode.rpc-address\" : \"c6401.ambari.apache.org:8020\",\n" +
+    "            \"dfs.namenode.secondary.http-address\" : \"c6402.ambari.apache.org:50090\",\n" +
+    "            \"dfs.webhdfs.enabled\" : \"true\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"final\" : {\n" +
+    "              \"dfs.webhdfs.enabled\" : \"true\",\n" +
+    "              \"dfs.namenode.http-address\" : \"true\",\n" +
+    "              \"dfs.support.append\" : \"true\",\n" +
+    "              \"dfs.namenode.name.dir\" : \"true\",\n" +
+    "              \"dfs.datanode.failed.volumes.tolerated\" : \"true\",\n" +
+    "              \"dfs.datanode.data.dir\" : \"true\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"core-site\",\n" +
+    "          \"tag\" : \"version1502131215159\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hadoop.http.authentication.simple.anonymous.allowed\" : \"true\",\n" +
+    "            \"net.topology.script.file.name\" : \"/etc/hadoop/conf/topology_script.py\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"final\" : {\n" +
+    "              \"fs.defaultFS\" : \"true\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 2,\n" +
+    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+    "      \"service_name\" : \"HDFS\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HIVE&service_config_version=3\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-env\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive_security_authorization\" : \"None\",\n" +
+    "            \"webhcat_user\" : \"hcat\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hiveserver2-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.metastore.metrics.enabled\" : \"true\",\n" +
+    "            \"hive.security.authorization.enabled\" : \"false\",\n" +
+    "            \"hive.service.metrics.hadoop2.component\" : \"hiveserver2\",\n" +
+    "            \"hive.service.metrics.reporter\" : \"HADOOP2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-interactive-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.server2.enable.doAs\" : \"false\",\n" +
+    "            \"hive.server2.tez.default.queues\" : \"default\",\n" +
+    "            \"hive.server2.tez.initialize.default.sessions\" : \"true\",\n" +
+    "            \"hive.server2.tez.sessions.custom.queue.allowed\" : \"ignore\",\n" +
+    "            \"hive.server2.tez.sessions.per.default.queue\" : \"1\",\n" +
+    "            \"hive.server2.tez.sessions.restricted.configs\" : \"hive.execution.mode,hive.execution.engine\",\n" +
+    "            \"hive.server2.thrift.http.port\" : \"10501\",\n" +
+    "            \"hive.server2.thrift.port\" : \"10500\",\n" +
+    "            \"hive.server2.webui.port\" : \"10502\",\n" +
+    "            \"hive.server2.webui.use.ssl\" : \"false\",\n" +
+    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2-hive2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"tez-interactive-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"tez.am.am-rm.heartbeat.interval-ms.max\" : \"10000\",\n" +
+    "            \"tez.am.client.heartbeat.poll.interval.millis\" : \"6000\",\n" +
+    "            \"tez.am.client.heartbeat.timeout.secs\" : \"90\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-site\",\n" +
+    "          \"tag\" : \"version1502130841736\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.metastore.sasl.enabled\" : \"false\",\n" +
+    "            \"hive.metastore.server.max.threads\" : \"100000\",\n" +
+    "            \"hive.metastore.uris\" : \"thrift://c6402.ambari.apache.org:9083\",\n" +
+    "            \"hive.server2.allow.user.substitution\" : \"true\",\n" +
+    "            \"hive.server2.authentication\" : \"NONE\",\n" +
+    "            \"hive.server2.authentication.spnego.keytab\" : \"HTTP/_HOST@EXAMPLE.COM\",\n" +
+    "            \"hive.server2.authentication.spnego.principal\" : \"/etc/security/keytabs/spnego.service.keytab\",\n" +
+    "            \"hive.server2.enable.doAs\" : \"true\",\n" +
+    "            \"hive.server2.support.dynamic.service.discovery\" : \"true\",\n" +
+    "            \"hive.server2.thrift.http.path\" : \"cliservice\",\n" +
+    "            \"hive.server2.thrift.http.port\" : \"10001\",\n" +
+    "            \"hive.server2.thrift.max.worker.threads\" : \"500\",\n" +
+    "            \"hive.server2.thrift.port\" : \"10000\",\n" +
+    "            \"hive.server2.thrift.sasl.qop\" : \"auth\",\n" +
+    "            \"hive.server2.transport.mode\" : \"http\",\n" +
+    "            \"hive.server2.use.SSL\" : \"false\",\n" +
+    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"hidden\" : {\n" +
+    "              \"javax.jdo.option.ConnectionPassword\" : \"HIVE_CLIENT,WEBHCAT_SERVER,HCAT,CONFIG_DOWNLOAD\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"webhcat-site\",\n" +
+    "          \"tag\" : \"version1502131111746\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"templeton.port\" : \"50111\",\n" +
+    "            \"templeton.zookeeper.hosts\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+    "            \"webhcat.proxyuser.knox.groups\" : \"users\",\n" +
+    "            \"webhcat.proxyuser.knox.hosts\" : \"*\",\n" +
+    "            \"webhcat.proxyuser.root.groups\" : \"*\",\n" +
+    "            \"webhcat.proxyuser.root.hosts\" : \"c6401.ambari.apache.org\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"createtime\" : 1502131110745,\n" +
+    "      \"group_id\" : -1,\n" +
+    "      \"group_name\" : \"Default\",\n" +
+    "      \"hosts\" : [ ],\n" +
+    "      \"is_cluster_compatible\" : true,\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 3,\n" +
+    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+    "      \"service_name\" : \"HIVE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=OOZIE&service_config_version=3\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"oozie-site\",\n" +
+    "          \"tag\" : \"version1502131137103\",\n" +
+    "          \"version\" : 3,\n" +
+    "          \"properties\" : {\n" +
+    "            \"oozie.base.url\" : \"http://c6402.ambari.apache.org:11000/oozie\",\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 3,\n" +
+    "      \"service_name\" : \"OOZIE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=TEZ&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"tez-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"tez.use.cluster.hadoop-libs\" : \"false\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"createtime\" : 1502122253525,\n" +
+    "      \"group_id\" : -1,\n" +
+    "      \"group_name\" : \"Default\",\n" +
+    "      \"hosts\" : [ ],\n" +
+    "      \"is_cluster_compatible\" : true,\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_config_version_note\" : \"Initial configurations for Tez\",\n" +
+    "      \"service_name\" : \"TEZ\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=YARN&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"yarn-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hadoop.registry.rm.enabled\" : \"true\",\n" +
+    "            \"hadoop.registry.zk.quorum\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+    "            \"yarn.acl.enable\" : \"false\",\n" +
+    "            \"yarn.http.policy\" : \"HTTP_ONLY\",\n" +
+    "            \"yarn.nodemanager.address\" : \"0.0.0.0:45454\",\n" +
+    "            \"yarn.nodemanager.bind-host\" : \"0.0.0.0\",\n" +
+    "            \"yarn.resourcemanager.address\" : \"c6402.ambari.apache.org:8050\",\n" +
+    "            \"yarn.resourcemanager.admin.address\" : \"c6402.ambari.apache.org:8141\",\n" +
+    "            \"yarn.resourcemanager.ha.enabled\" : \"false\",\n" +
+    "            \"yarn.resourcemanager.hostname\" : \"c6402.ambari.apache.org\",\n" +
+    "            \"yarn.resourcemanager.resource-tracker.address\" : \"c6402.ambari.apache.org:8025\",\n" +
+    "            \"yarn.resourcemanager.scheduler.address\" : \"c6402.ambari.apache.org:8030\",\n" +
+    "            \"yarn.resourcemanager.webapp.address\" : \"c6402.ambari.apache.org:8088\",\n" +
+    "            \"yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled\" : \"false\",\n" +
+    "            \"yarn.resourcemanager.webapp.https.address\" : \"c6402.ambari.apache.org:8090\",\n" +
+    "            \"yarn.resourcemanager.zk-address\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_name\" : \"YARN\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    }\n" +
+    "  ]\n" +
+    "}";
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-release/home/conf/descriptors/README
----------------------------------------------------------------------
diff --git a/b/gateway-release/home/conf/descriptors/README b/b/gateway-release/home/conf/descriptors/README
new file mode 100644
index 0000000..a2e5226
--- /dev/null
+++ b/b/gateway-release/home/conf/descriptors/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS WHERE SIMPLE TOPOLOGY DESCRIPTORS CAN BE PLACED
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/b/gateway-release/home/conf/shared-providers/README
----------------------------------------------------------------------
diff --git a/b/gateway-release/home/conf/shared-providers/README b/b/gateway-release/home/conf/shared-providers/README
new file mode 100644
index 0000000..44d12a3
--- /dev/null
+++ b/b/gateway-release/home/conf/shared-providers/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS WHERE SHARED PROVIDER CONFIGURATIONS CAN BE PLACED
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/pom.xml b/gateway-discovery-ambari/pom.xml
new file mode 100644
index 0000000..924e89c
--- /dev/null
+++ b/gateway-discovery-ambari/pom.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.knox</groupId>
+        <artifactId>gateway</artifactId>
+        <version>0.14.0-SNAPSHOT</version>
+    </parent>
+    <artifactId>gateway-discovery-ambari</artifactId>
+
+    <name>gateway-discovery-ambari</name>
+    <description>The extension to the gateway for service discovery using Apache Ambari.</description>
+
+    <licenses>
+        <license>
+            <name>The Apache Software License, Version 2.0</name>
+            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+            <distribution>repo</distribution>
+        </license>
+    </licenses>
+
+    <dependencies>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-spi</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-test-utils</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.easymock</groupId>
+            <artifactId>easymock</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
new file mode 100644
index 0000000..6eaabd3
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+class AmbariCluster implements ServiceDiscovery.Cluster {
+
+    private String name = null;
+
+    private AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+
+    private Map<String, Map<String, ServiceConfiguration>> serviceConfigurations = new HashMap<>();
+
+    private Map<String, AmbariComponent> components = null;
+
+
+    AmbariCluster(String name) {
+        this.name = name;
+        components = new HashMap<String, AmbariComponent>();
+    }
+
+    void addServiceConfiguration(String serviceName, String configurationType, ServiceConfiguration serviceConfig) {
+        if (!serviceConfigurations.keySet().contains(serviceName)) {
+            serviceConfigurations.put(serviceName, new HashMap<String, ServiceConfiguration>());
+        }
+        serviceConfigurations.get(serviceName).put(configurationType, serviceConfig);
+    }
+
+
+    void addComponent(AmbariComponent component) {
+        components.put(component.getName(), component);
+    }
+
+
+    ServiceConfiguration getServiceConfiguration(String serviceName, String configurationType) {
+        ServiceConfiguration sc = null;
+        Map<String, ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+        if (configs != null) {
+            sc = configs.get(configurationType);
+        }
+        return sc;
+    }
+
+
+    Map<String, AmbariComponent> getComponents() {
+        return components;
+    }
+
+
+    AmbariComponent getComponent(String name) {
+        return components.get(name);
+    }
+
+
+    @Override
+    public String getName() {
+        return name;
+    }
+
+
+    @Override
+    public List<String> getServiceURLs(String serviceName) {
+        List<String> urls = new ArrayList<>();
+        urls.addAll(urlCreator.create(this, serviceName));
+        return urls;
+    }
+
+
+    static class ServiceConfiguration {
+
+        private String type;
+        private String version;
+        private Map<String, String> props;
+
+        ServiceConfiguration(String type, String version, Map<String, String> properties) {
+            this.type = type;
+            this.version = version;
+            this.props = properties;
+        }
+
+        public String getVersion() {
+            return version;
+        }
+
+        public String getType() {
+            return type;
+        }
+
+        public Map<String, String> getProperties() {
+            return props;
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
new file mode 100644
index 0000000..55257fb
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import java.util.List;
+import java.util.Map;
+
+class AmbariComponent {
+
+    private String clusterName = null;
+    private String serviceName = null;
+    private String name        = null;
+    private String version     = null;
+
+    private List<String> hostNames = null;
+
+    private Map<String, String> properties = null;
+
+    AmbariComponent(String              name,
+                    String              version,
+                    String              cluster,
+                    String              service,
+                    List<String>        hostNames,
+                    Map<String, String> properties) {
+        this.name = name;
+        this.serviceName = service;
+        this.clusterName = cluster;
+        this.version = version;
+        this.hostNames = hostNames;
+        this.properties = properties;
+    }
+
+    public String getVersion() {
+        return version;
+    }
+
+    public String getName() {
+        return name;
+    }
+
+    public String getServiceName() {
+        return serviceName;
+    }
+
+    public String getClusterName() {
+        return clusterName;
+    }
+
+    public List<String> getHostNames() {
+        return hostNames;
+    }
+
+    public Map<String, String> getConfigProperties() {
+        return properties;
+    }
+
+    public String getConfigProperty(String propertyName) {
+        return properties.get(propertyName);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
new file mode 100644
index 0000000..34f20a7
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
@@ -0,0 +1,291 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.hadoop.gateway.config.ConfigurationException;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.security.AliasService;
+import org.apache.hadoop.gateway.services.security.AliasServiceException;
+import org.apache.hadoop.gateway.topology.discovery.GatewayService;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.message.BasicHeader;
+import org.apache.http.util.EntityUtils;
+
+import java.io.IOException;
+import java.util.*;
+
+
+class AmbariServiceDiscovery implements ServiceDiscovery {
+
+    static final String TYPE = "AMBARI";
+
+    static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
+
+    static final String AMBARI_HOSTROLES_URI =
+                                       AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
+
+    static final String AMBARI_SERVICECONFIGS_URI =
+            AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
+
+    // Map of component names to service configuration types
+    private static Map<String, String> componentServiceConfigs = new HashMap<>();
+    static {
+        componentServiceConfigs.put("NAMENODE", "hdfs-site");
+        componentServiceConfigs.put("RESOURCEMANAGER", "yarn-site");
+        componentServiceConfigs.put("OOZIE_SERVER", "oozie-site");
+        componentServiceConfigs.put("HIVE_SERVER", "hive-site");
+        componentServiceConfigs.put("WEBHCAT_SERVER", "webhcat-site");
+        componentServiceConfigs.put("HBASE_MASTER", "hbase-site");
+    } // TODO: Are there other service components, for which the endpoints can be discovered via Ambari?
+
+    private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
+    private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
+
+    private static AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+
+    private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+    @GatewayService
+    private AliasService aliasService;
+
+    private CloseableHttpClient httpClient = null;
+
+    private Map<String, Map<String, String>> serviceConfiguration = new HashMap<>();
+
+
+    AmbariServiceDiscovery() {
+        httpClient = org.apache.http.impl.client.HttpClients.createDefault();
+    }
+
+
+    @Override
+    public String getType() {
+        return TYPE;
+    }
+
+
+    @Override
+    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+        Map<String, Cluster> clusters = new HashMap<String, Cluster>();
+
+        String discoveryAddress = config.getAddress();
+
+        // Invoke Ambari REST API to discover the available clusters
+        String clustersDiscoveryURL = String.format("%s" + AMBARI_CLUSTERS_URI, discoveryAddress);
+
+        JSONObject json = invokeREST(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
+
+        // Parse the cluster names from the response, and perform the cluster discovery
+        JSONArray clusterItems = (JSONArray) json.get("items");
+        for (Object clusterItem : clusterItems) {
+            String clusterName = (String) ((JSONObject)((JSONObject) clusterItem).get("Clusters")).get("cluster_name");
+            try {
+                Cluster c = discover(config, clusterName);
+                clusters.put(clusterName, c);
+            } catch (Exception e) {
+                log.clusterDiscoveryError(clusterName, e);
+            }
+        }
+
+        return clusters;
+    }
+
+
+    @Override
+    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+        AmbariCluster cluster = new AmbariCluster(clusterName);
+
+        Map<String, String> serviceComponents = new HashMap<>();
+
+        String discoveryAddress = config.getAddress();
+        String discoveryUser = config.getUser();
+        String discoveryPwdAlias = config.getPasswordAlias();
+
+        Map<String, List<String>> componentHostNames = new HashMap<>();
+        String hostRolesURL = String.format("%s" + AMBARI_HOSTROLES_URI, discoveryAddress, clusterName);
+        JSONObject hostRolesJSON = invokeREST(hostRolesURL, discoveryUser, discoveryPwdAlias);
+        if (hostRolesJSON != null) {
+            // Process the host roles JSON
+            JSONArray items = (JSONArray) hostRolesJSON.get("items");
+            for (Object obj : items) {
+                JSONArray components = (JSONArray) ((JSONObject) obj).get("components");
+                for (Object component : components) {
+                    JSONArray hostComponents = (JSONArray) ((JSONObject) component).get("host_components");
+                    for (Object hostComponent : hostComponents) {
+                        JSONObject hostRoles = (JSONObject) ((JSONObject) hostComponent).get("HostRoles");
+                        String serviceName = (String) hostRoles.get("service_name");
+                        String componentName = (String) hostRoles.get("component_name");
+
+                        serviceComponents.put(componentName, serviceName);
+
+//                    String hostName = (String) hostRoles.get("host_name");
+                        String hostName = (String) hostRoles.get("public_host_name"); // Assuming public host name is most applicable
+                        log.discoveredServiceHost(serviceName, hostName);
+                        if (!componentHostNames.containsKey(componentName)) {
+                            componentHostNames.put(componentName, new ArrayList<String>());
+                        }
+                        componentHostNames.get(componentName).add(hostName);
+                    }
+                }
+            }
+        }
+
+        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations =
+                                                 new HashMap<String, Map<String, AmbariCluster.ServiceConfiguration>>();
+        String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
+        JSONObject serviceConfigsJSON = invokeREST(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
+        if (serviceConfigsJSON != null) {
+            // Process the service configurations
+            JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
+            for (Object serviceConfig : serviceConfigs) {
+                String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
+                JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
+                for (Object configuration : configurations) {
+                    String configType = (String) ((JSONObject) configuration).get("type");
+                    String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
+
+                    Map<String, String> configProps = new HashMap<String, String>();
+                    JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
+                    for (String propertyName : configProperties.keySet()) {
+                        configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
+                    }
+                    if (!serviceConfigurations.containsKey(serviceName)) {
+                        serviceConfigurations.put(serviceName, new HashMap<String, AmbariCluster.ServiceConfiguration>());
+                    }
+                    serviceConfigurations.get(serviceName).put(configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+                    cluster.addServiceConfiguration(serviceName, configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+                }
+            }
+        }
+
+        // Construct the AmbariCluster model
+        for (String componentName : serviceComponents.keySet()) {
+            String serviceName = serviceComponents.get(componentName);
+            List<String> hostNames = componentHostNames.get(componentName);
+
+            Map<String, AmbariCluster.ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+            String configType = componentServiceConfigs.get(componentName);
+            if (configType != null) {
+                AmbariCluster.ServiceConfiguration svcConfig = configs.get(configType);
+                AmbariComponent c = new AmbariComponent(componentName,
+                                                        svcConfig.getVersion(),
+                                                        clusterName,
+                                                        serviceName,
+                                                        hostNames,
+                                                        svcConfig.getProperties());
+                cluster.addComponent(c);
+            }
+        }
+
+        return cluster;
+    }
+
+
+    protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+        JSONObject result = null;
+
+        CloseableHttpResponse response = null;
+        try {
+            HttpGet request = new HttpGet(url);
+
+            // If no configured username, then use default username alias
+            String password = null;
+            if (username == null) {
+                if (aliasService != null) {
+                    try {
+                        char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
+                        if (defaultUser != null) {
+                            username = new String(defaultUser);
+                        }
+                    } catch (AliasServiceException e) {
+                        log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
+                    }
+                }
+
+                // If username is still null
+                if (username == null) {
+                    log.aliasServiceUserNotFound();
+                    throw new ConfigurationException("No username is configured for Ambari service discovery.");
+                }
+            }
+
+            if (aliasService != null) {
+                // If not password alias is configured, then try the default alias
+                if (passwordAlias == null) {
+                    passwordAlias = DEFAULT_PWD_ALIAS;
+                }
+                try {
+                    char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
+                    if (pwd != null) {
+                        password = new String(pwd);
+                    }
+
+                } catch (AliasServiceException e) {
+                    log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
+                }
+            }
+
+            // If the password could not be determined
+            if (password == null) {
+                log.aliasServicePasswordNotFound();
+                throw new ConfigurationException("No password is configured for Ambari service discovery.");
+            }
+
+            // Add an auth header if credentials are available
+            String encodedCreds =
+                    org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
+            request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
+
+            response = httpClient.execute(request);
+
+            if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
+                HttpEntity entity = response.getEntity();
+                if (entity != null) {
+                    result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
+                    log.debugJSON(result.toJSONString());
+                } else {
+                    log.noJSON(url);
+                }
+            } else {
+                log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
+            }
+
+        } catch (IOException e) {
+            log.restInvocationError(url, e);
+        } finally {
+            if(response != null) {
+                try {
+                    response.close();
+                } catch (IOException e) {
+                    // Ignore
+                }
+            }
+        }
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
new file mode 100644
index 0000000..caa16ed
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.i18n.messages.Message;
+import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
+import org.apache.hadoop.gateway.i18n.messages.Messages;
+import org.apache.hadoop.gateway.i18n.messages.StackTrace;
+
+@Messages(logger="org.apache.gateway.topology.discovery.ambari")
+public interface AmbariServiceDiscoveryMessages {
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error during cluster {0} discovery: {1}")
+    void clusterDiscoveryError(final String clusterName,
+                               @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "REST invocation {0} failed: {1}")
+    void restInvocationError(final String url,
+                             @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error attempting to determine the user for alias {0} : {1}")
+    void aliasServiceUserError(final String alias, final String error);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Encountered an error attempting to determine the password for alias {0} : {1}")
+    void aliasServicePasswordError(final String alias, final String error);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No user configured for Ambari service discovery.")
+    void aliasServiceUserNotFound();
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "No password configured for Ambari service discovery.")
+    void aliasServicePasswordNotFound();
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "Unexpected REST invocation response code for {0} : {1}")
+    void unexpectedRestResponseStatusCode(final String url, int responseStatusCode);
+
+
+    @Message(level = MessageLevel.ERROR,
+            text = "REST invocation {0} yielded a response without any JSON.")
+    void noJSON(final String url);
+
+
+    @Message(level = MessageLevel.DEBUG,
+            text = "REST invocation result: {0}")
+    void debugJSON(final String json);
+
+
+    @Message(level = MessageLevel.INFO,
+            text = "Discovered: Service: {0}, Host: {1}")
+    void discoveredServiceHost(final String serviceName, final String hostName);
+
+
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
new file mode 100644
index 0000000..723a786
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
+
+public class AmbariServiceDiscoveryType implements ServiceDiscoveryType {
+
+    private static final String IMPL = AmbariServiceDiscovery.class.getCanonicalName();
+
+    @Override
+    public String getType() {
+        return AmbariServiceDiscovery.TYPE;
+    }
+
+    @Override
+    public ServiceDiscovery newInstance() {
+        return new AmbariServiceDiscovery();
+    }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
new file mode 100644
index 0000000..0674642
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
@@ -0,0 +1,184 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+
+import java.util.ArrayList;
+import java.util.List;
+
+class AmbariServiceURLCreator {
+
+    private static final String NAMENODE_SERVICE        = "NAMENODE";
+    private static final String JOBTRACKER_SERVICE      = "JOBTRACKER";
+    private static final String WEBHDFS_SERVICE         = "WEBHDFS";
+    private static final String WEBHCAT_SERVICE         = "WEBHCAT";
+    private static final String OOZIE_SERVICE           = "OOZIE";
+    private static final String WEBHBASE_SERVICE        = "WEBHBASE";
+    private static final String HIVE_SERVICE            = "HIVE";
+    private static final String RESOURCEMANAGER_SERVICE = "RESOURCEMANAGER";
+
+
+    /**
+     * Derive the endpoint URL(s) for the specified service, based on the info from the specified Cluster.
+     *
+     * @param cluster The cluster discovery results
+     * @param serviceName The name of a Hadoop service
+     *
+     * @return One or more endpoint URLs for the specified service.
+     */
+    public List<String> create(AmbariCluster cluster, String serviceName) {
+        List<String> result = null;
+
+        if (NAMENODE_SERVICE.equals(serviceName)) {
+            result = createNameNodeURL(cluster);
+        } else if (JOBTRACKER_SERVICE.equals(serviceName)) {
+            result = createJobTrackerURL(cluster);
+        } else if (WEBHDFS_SERVICE.equals(serviceName)) {
+            result = createWebHDFSURL(cluster);
+        } else if (WEBHCAT_SERVICE.equals(serviceName)) {
+            result = createWebHCatURL(cluster);
+        } else if (OOZIE_SERVICE.equals(serviceName)) {
+            result = createOozieURL(cluster);
+        } else if (WEBHBASE_SERVICE.equals(serviceName)) {
+            result = createWebHBaseURL(cluster);
+        } else if (HIVE_SERVICE.equals(serviceName)) {
+            result = createHiveURL(cluster);
+        } else if (RESOURCEMANAGER_SERVICE.equals(serviceName)) {
+            result = createResourceManagerURL(cluster);
+        }
+
+        return result;
+    }
+
+
+    private List<String> createNameNodeURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("NAMENODE");
+        if (comp != null) {
+            result.add("hdfs://" + comp.getConfigProperty("dfs.namenode.rpc-address"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createJobTrackerURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("RESOURCEMANAGER");
+        if (comp != null) {
+            result.add("rpc://" + comp.getConfigProperty("yarn.resourcemanager.address"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHDFSURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariCluster.ServiceConfiguration sc = cluster.getServiceConfiguration("HDFS", "hdfs-site");
+        if (sc != null) {
+            String address = sc.getProperties().get("dfs.namenode.http-address");
+            result.add("http://" + address + "/webhdfs");
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHCatURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent webhcat = cluster.getComponent("WEBHCAT_SERVER");
+        if (webhcat != null) {
+            String port = webhcat.getConfigProperty("templeton.port");
+            String host = webhcat.getHostNames().get(0);
+
+            result.add("http://" + host + ":" + port + "/templeton");
+        }
+        return result;
+    }
+
+
+    private List<String> createOozieURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("OOZIE_SERVER");
+        if (comp != null) {
+            result.add(comp.getConfigProperty("oozie.base.url"));
+        }
+
+        return result;
+    }
+
+
+    private List<String> createWebHBaseURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent comp = cluster.getComponent("HBASE_MASTER");
+        if (comp != null) {
+            for (String host : comp.getHostNames()) {
+                result.add("http://" + host + ":60080");
+            }
+        }
+
+        return result;
+    }
+
+
+    private List<String> createHiveURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent hive = cluster.getComponent("HIVE_SERVER");
+        if (hive != null) {
+            String path = hive.getConfigProperty("hive.server2.thrift.http.path");
+            String port = hive.getConfigProperty("hive.server2.thrift.http.port");
+            String transport = hive.getConfigProperty("hive.server2.transport.mode");
+            String useSSL = hive.getConfigProperty("hive.server2.use.SSL");
+            String host = hive.getHostNames().get(0);
+
+            String scheme = null; // What is the scheme for the binary transport mode?
+            if ("http".equals(transport)) {
+                scheme = Boolean.valueOf(useSSL) ? "https" : "http";
+            }
+
+            result.add(scheme + "://" + host + ":" + port + "/" + path);
+        }
+        return result;
+    }
+
+
+    private List<String> createResourceManagerURL(AmbariCluster cluster) {
+        List<String> result = new ArrayList<>();
+
+        AmbariComponent resMan = cluster.getComponent("RESOURCEMANAGER");
+        if (resMan != null) {
+            String webappAddress = resMan.getConfigProperty("yarn.resourcemanager.webapp.address");
+            String httpPolicy = resMan.getConfigProperty("yarn.http.policy");
+            String scheme = ("HTTPS_ONLY".equalsIgnoreCase(httpPolicy)) ? "https" : "http";
+
+            result.add(scheme + "://" + webappAddress + "/ws");
+        }
+
+        return result;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
new file mode 100644
index 0000000..1da4fc9
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.hadoop.gateway.topology.discovery.ambari.AmbariServiceDiscoveryType
\ No newline at end of file


[22/37] knox git commit: KNOX-1075 and KNOX-1070 - Enforce minimum maven and Java version (Rick Kellogg via Sandeep More)

Posted by lm...@apache.org.
KNOX-1075 and KNOX-1070 - Enforce minimum maven and Java version (Rick Kellogg via Sandeep More)


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/485769b9
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/485769b9
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/485769b9

Branch: refs/heads/KNOX-1049
Commit: 485769b95b44f47699c48c47945a75c2b1d2ff97
Parents: ff3af36
Author: Sandeep More <mo...@apache.org>
Authored: Wed Oct 4 15:05:50 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Wed Oct 4 15:05:50 2017 -0400

----------------------------------------------------------------------
 pom.xml | 24 ++++++++++++++++++++++++
 1 file changed, 24 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/485769b9/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 2708f6b..fb00d71 100644
--- a/pom.xml
+++ b/pom.xml
@@ -336,6 +336,30 @@
                     </execution>
                 </executions>
             </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-enforcer-plugin</artifactId>
+                <version>3.0.0-M1</version>
+                <executions>
+                  <execution>
+                    <id>enforce-maven</id>
+                    <goals>
+                      <goal>enforce</goal>
+                    </goals>
+                    <configuration>
+                      <rules>
+                        <requireMavenVersion>
+                          <version>[3.0.2,)</version>
+                        </requireMavenVersion>
+                        <!--<requireJavaVersion>
+                          <version>[1.8,)</version>
+                        </requireJavaVersion> -->
+                      </rules>
+                    </configuration>
+                  </execution>
+                </executions>
+            </plugin>
+
         </plugins>
     </build>
 


[29/37] knox git commit: KNOX-1045 - Knox Token Service has a trailing whitespace for the token_type attribute

Posted by lm...@apache.org.
KNOX-1045 - Knox Token Service has a trailing whitespace for the token_type attribute


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/6acfa43f
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/6acfa43f
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/6acfa43f

Branch: refs/heads/KNOX-1049
Commit: 6acfa43f419b7dfc579988efce32091f8349e0e7
Parents: 92b1505
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Fri Sep 29 14:34:53 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Mon Oct 16 15:27:31 2017 +0100

----------------------------------------------------------------------
 .../org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/6acfa43f/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java b/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
index 8dddf02..df8288a 100644
--- a/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
+++ b/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
@@ -50,7 +50,7 @@ public class TokenResource {
   private static final String TOKEN_TYPE = "token_type";
   private static final String ACCESS_TOKEN = "access_token";
   private static final String TARGET_URL = "target_url";
-  private static final String BEARER = "Bearer ";
+  private static final String BEARER = "Bearer";
   private static final String TOKEN_TTL_PARAM = "knox.token.ttl";
   private static final String TOKEN_AUDIENCES_PARAM = "knox.token.audiences";
   private static final String TOKEN_TARGET_URL = "knox.token.target.url";


[28/37] knox git commit: KNOX-1081 - Remove Picketlink Provider Module

Posted by lm...@apache.org.
KNOX-1081 - Remove Picketlink Provider Module

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/92b1505a
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/92b1505a
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/92b1505a

Branch: refs/heads/KNOX-1049
Commit: 92b1505a70057aef762ac20bf80a7249d947e3e9
Parents: 0719da3
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Oct 12 17:28:40 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Oct 12 17:28:40 2017 -0400

----------------------------------------------------------------------
 gateway-provider-security-picketlink/pom.xml    |  76 --------
 .../gateway/picketlink/PicketlinkMessages.java  |  40 ----
 .../picketlink/deploy/PicketlinkConf.java       | 194 -------------------
 ...PicketlinkFederationProviderContributor.java | 132 -------------
 .../filter/CaptureOriginalURLFilter.java        |  89 ---------
 .../filter/PicketlinkIdentityAdapter.java       | 102 ----------
 ...gateway.deploy.ProviderDeploymentContributor |  19 --
 .../gateway/picketlink/PicketlinkTest.java      |  30 ---
 gateway-release/pom.xml                         |   4 -
 pom.xml                                         |  16 --
 10 files changed, 702 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/pom.xml b/gateway-provider-security-picketlink/pom.xml
deleted file mode 100644
index 0e6f1a5..0000000
--- a/gateway-provider-security-picketlink/pom.xml
+++ /dev/null
@@ -1,76 +0,0 @@
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.knox</groupId>
-        <artifactId>gateway</artifactId>
-        <version>0.14.0-SNAPSHOT</version>
-    </parent>
-    <artifactId>gateway-provider-security-picketlink</artifactId>
-
-    <name>gateway-provider-security-picketlink</name>
-    <description>An extension of the gateway introducing picketlink for SAML integration.</description>
-
-    <licenses>
-        <license>
-            <name>The Apache Software License, Version 2.0</name>
-            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
-            <distribution>repo</distribution>
-        </license>
-    </licenses>
-
-    <dependencies>
-        <dependency>
-            <groupId>${gateway-group}</groupId>
-            <artifactId>gateway-spi</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>${gateway-group}</groupId>
-            <artifactId>gateway-util-common</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.picketlink</groupId>
-            <artifactId>picketlink-federation</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.jboss.logging</groupId>
-            <artifactId>jboss-logging</artifactId>
-        </dependency>
-        
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.easymock</groupId>
-            <artifactId>easymock</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.knox</groupId>
-            <artifactId>gateway-test-utils</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-    </dependencies>
-
-</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkMessages.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkMessages.java b/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkMessages.java
deleted file mode 100644
index c49030f..0000000
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkMessages.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.picketlink;
-
-import org.apache.hadoop.gateway.i18n.messages.Message;
-import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
-import org.apache.hadoop.gateway.i18n.messages.StackTrace;
-
-@Messages(logger="org.apache.hadoop.gateway.picketlink")
-public interface PicketlinkMessages {
-
-  @Message( level = MessageLevel.DEBUG, text = "Found Original URL in reequest: {0}")
-  public void foundOriginalURLInRequest(String url);
-
-  @Message( level = MessageLevel.DEBUG, text = "setting cookie for original-url")
-  public void settingCookieForOriginalURL();
-
-  @Message( level = MessageLevel.DEBUG, text = "Secure Flag is set to False for cookie")
-  public void secureFlagFalseForCookie();
-
-  @Message( level = MessageLevel.ERROR, text = "Unable to get the gateway identity passphrase: {0}")
-  public void unableToGetGatewayIdentityPassphrase(@StackTrace( level = MessageLevel.DEBUG) Exception e);
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkConf.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkConf.java b/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkConf.java
deleted file mode 100644
index 59203c6..0000000
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkConf.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.picketlink.deploy;
-
-/**
- * Provides a serializable configuration file for adding to
- * the webapp as an XML string for picketlink.xml
- *
- */
-public class PicketlinkConf {
-  public static final String INDENT = "    ";
-  public static final String LT_OPEN = "<";
-  public static final String LT_CLOSE = "</";
-  public static final String GT = ">";
-  public static final String GT_CLOSE = "/>";
-  public static final String NL = "\n";
-  public static final String PICKETLINK_XMLNS = "urn:picketlink:identity-federation:config:2.1";
-  public static final String PICKETLINK_SP_XMLNS = "urn:picketlink:identity-federation:config:1.0";
-  public static final String C14N_METHOD = "http://www.w3.org/2001/10/xml-exc-c14n#";
-  public static final String KEYPROVIDER_ELEMENT = "KeyProvider";
-  public static final String KEYPROVIDER_CLASSNAME = "org.picketlink.identity.federation.core.impl.KeyStoreKeyManager";
-  public static final String AUTH_HANDLER_CLASSNAME = "org.picketlink.identity.federation.web.handlers.saml2.SAML2AuthenticationHandler";
-  public static final String ROLE_GEN_HANDLER_CLASSNAME = "org.picketlink.identity.federation.web.handlers.saml2.RolesGenerationHandler";
-  public static final String PICKETLINK_ELEMENT = "PicketLink";
-  public static final String PICKETLINKSP_ELEMENT = "PicketLinkSP";
-  public static final String HANDLERS_ELEMENT = "Handlers";
-  public static final String HANDLER_ELEMENT = "Handler";
-  public static final String OPTION_ELEMENT = "Option";
-  public static final String VAL_ALIAS_ELEMENT = "ValidatingAlias";
-  public static final String AUTH_ELEMENT = "Auth";
-
-  private String serverEnvironment = "jetty";
-  private String bindingType = "POST";
-  private String idpUsesPostingBinding = "true";
-  private String supportsSignatures = "true";
-  private String identityURL = null;
-  private String serviceURL = null;
-  private String keystoreURL = null;
-  private String keystorePass = null;
-  private String signingKeyAlias = null;
-  private String signingKeyPass = null;
-  private String validatingKeyAlias = null;
-  private String validatingKeyValue = null;
-  private String nameIDFormat = "urn:oasis:names:tc:SAML:2.0:nameid-format:persistent";
-  private String clockSkewMilis = null;
-  private String assertionSessionAttributeName = "org.picketlink.sp.assertion";
-  
-  public String getServerEnvironment() {
-    return serverEnvironment;
-  }
-  public void setServerEnvironment(String serverEnvironment) {
-    this.serverEnvironment = serverEnvironment;
-  }
-  public String getBindingType() {
-    return bindingType;
-  }
-  public void setBindingType(String bindingType) {
-    this.bindingType = bindingType;
-  }
-  public String getIdpUsesPostingBinding() {
-    return idpUsesPostingBinding;
-  }
-  public void setIdpUsesPostingBinding(String idpUsesPostingBinding) {
-    this.idpUsesPostingBinding = idpUsesPostingBinding;
-  }
-  public String getSupportsSignatures() {
-    return supportsSignatures;
-  }
-  public void setSupportsSignatures(String supportsSignatures) {
-    this.supportsSignatures = supportsSignatures;
-  }
-  public String getIdentityURL() {
-    return identityURL;
-  }
-  public void setIdentityURL(String identityURL) {
-    this.identityURL = identityURL;
-  }
-  public String getServiceURL() {
-    return serviceURL;
-  }
-  public void setServiceURL(String serviceURL) {
-    this.serviceURL = serviceURL;
-  }
-  public String getKeystoreURL() {
-    return keystoreURL;
-  }
-  public void setKeystoreURL(String keystoreURL) {
-    this.keystoreURL = keystoreURL;
-  }
-  public String getKeystorePass() {
-    return keystorePass;
-  }
-  public void setKeystorePass(String keystorePass) {
-    this.keystorePass = keystorePass;
-  }
-  public String getSigningKeyAlias() {
-    return signingKeyAlias;
-  }
-  public void setSigningKeyAlias(String signingKeyAlias) {
-    this.signingKeyAlias = signingKeyAlias;
-  }
-  public String getSigningKeyPass() {
-    return signingKeyPass;
-  }
-  public void setSigningKeyPass(String signingKeyPass) {
-    this.signingKeyPass = signingKeyPass;
-  }
-  public String getValidatingKeyAlias() {
-    return validatingKeyAlias;
-  }
-  public void setValidatingAliasKey(String validatingKeyAlias) {
-    this.validatingKeyAlias = validatingKeyAlias;
-  }
-  public String getValidatingKeyValue() {
-    return validatingKeyValue;
-  }
-  public void setValidatingAliasValue(String validatingKeyValue) {
-    this.validatingKeyValue = validatingKeyValue;
-  }
-  public String getNameIDFormat() {
-    return nameIDFormat;
-  }
-  public void setNameIDFormat(String nameIDFormat) {
-    this.nameIDFormat = nameIDFormat;
-  }
-  public String getClockSkewMilis() {
-    return clockSkewMilis;
-  }
-  public void setClockSkewMilis(String clockSkewMilis) {
-    this.clockSkewMilis = clockSkewMilis;
-  }
-  public String getAssertionSessionAttributeName() {
-    return assertionSessionAttributeName;
-  }
-  public void setAssertionSessionAttributeName(
-      String assertionSessionAttributeName) {
-    this.assertionSessionAttributeName = assertionSessionAttributeName;
-  }
-  @Override
-  public String toString() {
-    // THIS IS HORRID REPLACE WITH DOM+TRANSFORM
-    StringBuffer xml = new StringBuffer();
-    xml.append("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>").append(NL)
-    .append(LT_OPEN).append(PICKETLINK_ELEMENT).append(" xmlns=\"").append(PICKETLINK_XMLNS).append("\"" + GT).append(NL)
-      .append(INDENT).append(LT_OPEN).append(PICKETLINKSP_ELEMENT).append(" xmlns=\"").append(PICKETLINK_SP_XMLNS + "\"").append(NL)
-      .append(INDENT).append(INDENT).append("ServerEnvironment").append("=\"").append(serverEnvironment).append("\"").append(NL)
-      .append(INDENT).append(INDENT).append("BindingType").append("=\"").append(bindingType).append("\"").append(NL)
-      .append(INDENT).append(INDENT).append("IDPUsesPostBinding").append("=\"").append(idpUsesPostingBinding).append("\"").append(NL)
-      .append(INDENT).append(INDENT).append("SupportsSignatures").append("=\"").append(supportsSignatures).append("\"").append(NL)
-      .append(INDENT).append(INDENT).append("CanonicalizationMethod").append("=\"").append(C14N_METHOD).append("\"").append(GT).append(NL).append(NL)
-      .append(INDENT).append(INDENT).append(LT_OPEN).append("IdentityURL").append(GT).append(identityURL).append(LT_CLOSE).append("IdentityURL").append(GT).append(NL)
-      .append(INDENT).append(INDENT).append(LT_OPEN).append("ServiceURL").append(GT).append(serviceURL).append(LT_CLOSE).append("ServiceURL").append(GT).append(NL)
-      .append(INDENT).append(INDENT).append(LT_OPEN).append(KEYPROVIDER_ELEMENT).append(" ").append("ClassName=\"").append(KEYPROVIDER_CLASSNAME + "\"" + GT).append(NL)
-        .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(AUTH_ELEMENT).append(" Key=\"KeyStoreURL\" Value=\"").append(keystoreURL).append("\"").append(GT_CLOSE).append(NL)
-        .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(AUTH_ELEMENT).append(" Key=\"KeyStorePass\" Value=\"").append(keystorePass).append("\"").append(GT_CLOSE).append(NL)
-        .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(AUTH_ELEMENT).append(" Key=\"SigningKeyAlias\" Value=\"").append(signingKeyAlias).append("\"").append(GT_CLOSE).append(NL)
-        .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(AUTH_ELEMENT).append(" Key=\"SigningKeyPass\" Value=\"").append(signingKeyPass).append("\"").append(GT_CLOSE).append(NL)
-        .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(VAL_ALIAS_ELEMENT).append(" Key=\"").append(validatingKeyAlias).append("\" Value=\"").append(validatingKeyValue).append("\"").append(GT_CLOSE).append(NL)
-      .append(INDENT).append(INDENT).append(LT_CLOSE).append(KEYPROVIDER_ELEMENT).append(GT).append(NL)
-      .append(INDENT).append(LT_CLOSE).append(PICKETLINKSP_ELEMENT).append(GT).append(NL)
-      .append(INDENT).append(LT_OPEN).append(HANDLERS_ELEMENT).append(GT).append(NL)
-        .append(INDENT).append(INDENT).append(LT_OPEN).append(HANDLER_ELEMENT).append(" class=\"").append(AUTH_HANDLER_CLASSNAME).append("\">").append(NL)
-          .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(OPTION_ELEMENT).append(" Key=\"NAMEID_FORMAT\" Value=\"").append(nameIDFormat).append("\"").append(GT_CLOSE).append(NL)
-          .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(OPTION_ELEMENT).append(" Key=\"CLOCK_SKEW_MILIS\" Value=\"").append(clockSkewMilis).append("\"").append(GT_CLOSE).append(NL)
-          .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(OPTION_ELEMENT).append(" Key=\"ASSERTION_SESSION_ATTRIBUTE_NAME\" Value=\"").append(assertionSessionAttributeName).append("\"").append(GT_CLOSE).append(NL)
-        .append(INDENT).append(INDENT).append(LT_CLOSE).append(HANDLER_ELEMENT).append(GT).append(NL)
-        .append(INDENT).append(INDENT).append(LT_OPEN).append(HANDLER_ELEMENT).append(" class=\"").append(ROLE_GEN_HANDLER_CLASSNAME).append("\"/>").append(NL)
-      .append(INDENT).append(LT_CLOSE).append(HANDLERS_ELEMENT).append(GT).append(NL)
-    .append(LT_CLOSE).append(PICKETLINK_ELEMENT).append(GT).append(NL);
-     
-    return xml.toString();
-  }
-  
-  public static void main(String[] args) {
-    PicketlinkConf conf = new PicketlinkConf();
-    System.out.println(conf.toString());
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java b/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java
deleted file mode 100644
index 4f90a41..0000000
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.picketlink.deploy;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributorBase;
-import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
-import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.picketlink.PicketlinkMessages;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.services.security.AliasServiceException;
-import org.apache.hadoop.gateway.services.security.MasterService;
-import org.apache.hadoop.gateway.topology.Provider;
-import org.apache.hadoop.gateway.topology.Service;
-import org.jboss.shrinkwrap.api.asset.StringAsset;
-import org.picketlink.identity.federation.web.filters.ServiceProviderContextInitializer;
-
-public class PicketlinkFederationProviderContributor extends
-    ProviderDeploymentContributorBase {
-  private static final String ROLE = "federation";
-  private static final String NAME = "Picketlink";
-  private static final String PICKETLINK_FILTER_CLASSNAME = "org.picketlink.identity.federation.web.filters.SPFilter";
-  private static final String CAPTURE_URL_FILTER_CLASSNAME = "org.apache.hadoop.gateway.picketlink.filter.CaptureOriginalURLFilter";
-  private static final String IDENTITY_ADAPTER_CLASSNAME = "org.apache.hadoop.gateway.picketlink.filter.PicketlinkIdentityAdapter";
-  private static final String IDENTITY_URL_PARAM = "identity.url";
-  private static final String SERVICE_URL_PARAM = "service.url";
-  private static final String KEYSTORE_URL_PARAM = "keystore.url";
-  private static final String SIGNINGKEY_ALIAS = "gateway-identity";
-  private static final String VALIDATING_ALIAS_KEY = "validating.alias.key";
-  private static final String VALIDATING_ALIAS_VALUE = "validating.alias.value";
-  private static final String CLOCK_SKEW_MILIS = "clock.skew.milis";
-  private static PicketlinkMessages log = MessagesFactory.get( PicketlinkMessages.class );
-
-  private MasterService ms = null;
-  private AliasService as = null;
-
-  @Override
-  public String getRole() {
-    return ROLE;
-  }
-
-  @Override
-  public String getName() {
-    return NAME;
-  }
-  
-  public void setMasterService(MasterService ms) {
-    this.ms = ms;
-  }
-
-  public void setAliasService(AliasService as) {
-    this.as = as;
-  }
-
-  @Override
-  public void initializeContribution(DeploymentContext context) {
-    super.initializeContribution(context);
-  }
-
-  @Override
-  public void contributeProvider(DeploymentContext context, Provider provider) {
-    // LJM TODO: consider creating a picketlink configuration provider to
-    // handle the keystore secrets without putting them in a config file directly.
-    // Once that is done then we can remove the unneeded gateway services from those
-    // that are available to providers.
-    context.getWebAppDescriptor().createListener().listenerClass( ServiceProviderContextInitializer.class.getName());
-
-    PicketlinkConf config = new PicketlinkConf( );
-    Map<String,String> params = provider.getParams();
-    config.setIdentityURL(params.get(IDENTITY_URL_PARAM));
-    config.setServiceURL(params.get(SERVICE_URL_PARAM));
-    config.setKeystoreURL(params.get(KEYSTORE_URL_PARAM));
-    if (ms != null) {
-      config.setKeystorePass(new String(ms.getMasterSecret()));
-    }
-    config.setSigningKeyAlias(SIGNINGKEY_ALIAS);
-    if (as != null) {
-      char[] passphrase = null;
-      try {
-        passphrase = as.getGatewayIdentityPassphrase();
-        config.setSigningKeyPass(new String(passphrase));
-      } catch (AliasServiceException e) {
-        log.unableToGetGatewayIdentityPassphrase(e);
-      }
-    }
-    config.setValidatingAliasKey(params.get(VALIDATING_ALIAS_KEY));
-    config.setValidatingAliasValue(params.get(VALIDATING_ALIAS_VALUE));
-    config.setClockSkewMilis(params.get(CLOCK_SKEW_MILIS));
-    String configStr = config.toString();
-    if( config != null ) {
-      context.getWebArchive().addAsWebInfResource( new StringAsset( configStr ), "picketlink.xml" );
-    }
-  }
-
-  @Override
-  public void contributeFilter(DeploymentContext context, Provider provider, Service service, 
-      ResourceDescriptor resource, List<FilterParamDescriptor> params) {
-    // blindly add all the provider params as filter init params
-    if (params == null) {
-      params = new ArrayList<FilterParamDescriptor>();
-    }
-    Map<String, String> providerParams = provider.getParams();
-    for(Entry<String, String> entry : providerParams.entrySet()) {
-      params.add( resource.createFilterParam().name( entry.getKey().toLowerCase() ).value( entry.getValue() ) );
-    }
-    resource.addFilter().name( getName() ).role( getRole() ).impl( CAPTURE_URL_FILTER_CLASSNAME ).params( params );
-    resource.addFilter().name( getName() ).role( getRole() ).impl( PICKETLINK_FILTER_CLASSNAME ).params( params );
-    resource.addFilter().name( getName() ).role( getRole() ).impl( IDENTITY_ADAPTER_CLASSNAME ).params( params );
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/CaptureOriginalURLFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/CaptureOriginalURLFilter.java b/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/CaptureOriginalURLFilter.java
deleted file mode 100644
index 66da6c4..0000000
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/CaptureOriginalURLFilter.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.picketlink.filter;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.Cookie;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.picketlink.PicketlinkMessages;
-
-import java.io.IOException;
-
-public class CaptureOriginalURLFilter implements Filter {
-  private static PicketlinkMessages log = MessagesFactory.get( PicketlinkMessages.class );
-  private static final String COOKIE_PATH = "cookie.path";
-  private static final String COOKIE_SECURE = "cookie.secure";
-  private String cookiePath = null;
-  private String cookieSecure = null;
-
-  @Override
-  public void init( FilterConfig filterConfig ) throws ServletException {
-    cookiePath = filterConfig.getInitParameter(COOKIE_PATH);
-    if (cookiePath == null) {
-      cookiePath = "/gateway/idp/knoxsso/api/v1/websso";
-    }
-    cookieSecure = filterConfig.getInitParameter(COOKIE_SECURE);
-    if (cookieSecure == null) {
-      cookieSecure = "true";
-    }
-  }
-
-  @Override
-  public void doFilter( ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain ) throws IOException, ServletException {
-    String original = null;
-    HttpServletRequest request = (HttpServletRequest)servletRequest;
-    String url = request.getParameter("originalUrl");
-    if (url != null) {
-      log.foundOriginalURLInRequest(url);
-      original = request.getParameter("originalUrl");
-      log.settingCookieForOriginalURL();
-      addCookie(servletResponse, original);
-    }
-    filterChain.doFilter(request, servletResponse);
-  }
-
-  @Override
-  public void destroy() {
-
-  }
-
-  private void addCookie(ServletResponse servletResponse, String original) {
-    Cookie c = new Cookie("original-url", original);
-    c.setPath(cookiePath);
-    c.setHttpOnly(true);
-    boolean secureOnly = true;
-    if (cookieSecure != null) {
-      secureOnly = ("false".equals(cookieSecure) ? false : true);
-      if (!secureOnly) {
-        log.secureFlagFalseForCookie();
-      }
-    }
-    c.setSecure(secureOnly);
-    c.setMaxAge(60);
-    ((HttpServletResponse)servletResponse).addCookie(c);
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/PicketlinkIdentityAdapter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/PicketlinkIdentityAdapter.java b/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/PicketlinkIdentityAdapter.java
deleted file mode 100644
index 333f91d..0000000
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/PicketlinkIdentityAdapter.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.picketlink.filter;
-
-import java.io.IOException;
-import java.security.Principal;
-import java.security.PrivilegedActionException;
-import java.security.PrivilegedExceptionAction;
-import javax.security.auth.Subject;
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.hadoop.gateway.audit.api.Action;
-import org.apache.hadoop.gateway.audit.api.ActionOutcome;
-import org.apache.hadoop.gateway.audit.api.AuditService;
-import org.apache.hadoop.gateway.audit.api.AuditServiceFactory;
-import org.apache.hadoop.gateway.audit.api.Auditor;
-import org.apache.hadoop.gateway.audit.api.ResourceType;
-import org.apache.hadoop.gateway.audit.log4j.audit.AuditConstants;
-import org.apache.hadoop.gateway.filter.AbstractGatewayFilter;
-import org.apache.hadoop.gateway.security.PrimaryPrincipal;
-
-public class PicketlinkIdentityAdapter implements Filter {
-  
-  private static AuditService auditService = AuditServiceFactory.getAuditService();
-  private static Auditor auditor = auditService.getAuditor(
-      AuditConstants.DEFAULT_AUDITOR_NAME, AuditConstants.KNOX_SERVICE_NAME,
-      AuditConstants.KNOX_COMPONENT_NAME );
-  
-
-  @Override
-  public void init( FilterConfig filterConfig ) throws ServletException {
-  }
-
-  public void destroy() {
-  }
-
-  public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) 
-      throws IOException, ServletException {
-    
-    HttpServletRequest httpRequest = (HttpServletRequest) request;
-    String username = httpRequest.getUserPrincipal().getName();
-    PrimaryPrincipal pp = new PrimaryPrincipal(username);
-    Subject subject = new Subject();
-    subject.getPrincipals().add(pp);
-    
-    Principal principal = (Principal) subject.getPrincipals(PrimaryPrincipal.class);
-    auditService.getContext().setUsername( principal.getName() );
-    String sourceUri = (String)request.getAttribute( AbstractGatewayFilter.SOURCE_REQUEST_CONTEXT_URL_ATTRIBUTE_NAME );
-    auditor.audit( Action.AUTHENTICATION , sourceUri, ResourceType.URI, ActionOutcome.SUCCESS );
-
-    doAs(request, response, chain, subject);
-  }
-  
-  private void doAs(final ServletRequest request,
-      final ServletResponse response, final FilterChain chain, Subject subject)
-      throws IOException, ServletException {
-    try {
-      Subject.doAs(
-          subject,
-          new PrivilegedExceptionAction<Object>() {
-            public Object run() throws Exception {
-              chain.doFilter(request, response);
-              return null;
-            }
-          }
-          );
-    }
-    catch (PrivilegedActionException e) {
-      Throwable t = e.getCause();
-      if (t instanceof IOException) {
-        throw (IOException) t;
-      }
-      else if (t instanceof ServletException) {
-        throw (ServletException) t;
-      }
-      else {
-        throw new ServletException(t);
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor b/gateway-provider-security-picketlink/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
deleted file mode 100644
index ec4affc..0000000
--- a/gateway-provider-security-picketlink/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.picketlink.deploy.PicketlinkFederationProviderContributor

http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/picketlink/PicketlinkTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/picketlink/PicketlinkTest.java b/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/picketlink/PicketlinkTest.java
deleted file mode 100644
index 0631eeb..0000000
--- a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/picketlink/PicketlinkTest.java
+++ /dev/null
@@ -1,30 +0,0 @@
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.picketlink;
-
-
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
-import org.junit.Test;
-
-public class PicketlinkTest extends org.junit.Assert {
-  @Test
-  public void testPicketlink() throws Exception {
-    assertTrue(true);
-  }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-release/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-release/pom.xml b/gateway-release/pom.xml
index cbff307..ad07225 100644
--- a/gateway-release/pom.xml
+++ b/gateway-release/pom.xml
@@ -241,10 +241,6 @@
 
         <dependency>
             <groupId>${gateway-group}</groupId>
-            <artifactId>gateway-provider-security-picketlink</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>${gateway-group}</groupId>
             <artifactId>gateway-provider-security-shiro</artifactId>
         </dependency>
         <dependency>

http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index e314415..30a052b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -68,7 +68,6 @@
         <module>gateway-provider-identity-assertion-hadoop-groups</module>
         <module>gateway-provider-identity-assertion-regex</module>
         <module>gateway-provider-identity-assertion-switchcase</module>
-        <module>gateway-provider-security-picketlink</module>
         <module>gateway-provider-identity-assertion-pseudo</module>
         <module>gateway-provider-jersey</module>
         <module>gateway-provider-ha</module>
@@ -499,11 +498,6 @@
             </dependency>
             <dependency>
                 <groupId>${gateway-group}</groupId>
-                <artifactId>gateway-provider-security-picketlink</artifactId>
-                <version>${gateway-version}</version>
-            </dependency>
-            <dependency>
-                <groupId>${gateway-group}</groupId>
                 <artifactId>gateway-provider-security-preauth</artifactId>
                 <version>${gateway-version}</version>
             </dependency>
@@ -733,16 +727,6 @@
                 <version>${gateway-version}</version>
             </dependency>
             <dependency>
-                <groupId>org.picketlink</groupId>
-                <artifactId>picketlink-federation</artifactId>
-                <version>2.7.0.CR3</version>
-            </dependency>
-            <dependency>
-                <groupId>org.jboss.logging</groupId>
-                <artifactId>jboss-logging</artifactId>
-                <version>3.2.0.Final</version>
-            </dependency>
-            <dependency>
                 <groupId>org.glassfish.jersey.containers</groupId>
                 <artifactId>jersey-container-servlet</artifactId>
                 <version>2.6</version>


[06/37] knox git commit: KNOX-1046 - Add Client Cert Wanted Capability with Configurable Validation that Checks for It

Posted by lm...@apache.org.
KNOX-1046 - Add Client Cert Wanted Capability with Configurable Validation that Checks for It

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/5432c872
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/5432c872
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/5432c872

Branch: refs/heads/KNOX-1049
Commit: 5432c872271e42d1ba8981e5f5de2059d5509ba2
Parents: 8537d42
Author: Larry McCay <lm...@hortonworks.com>
Authored: Fri Sep 22 13:40:18 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Fri Sep 22 13:40:31 2017 -0400

----------------------------------------------------------------------
 .../hadoop/gateway/config/impl/GatewayConfigImpl.java    | 10 ++++++++++
 .../gateway/services/security/impl/JettySSLService.java  | 11 +++++++++--
 .../org/apache/hadoop/gateway/config/GatewayConfig.java  |  2 ++
 .../org/apache/hadoop/gateway/GatewayTestConfig.java     |  5 +++++
 4 files changed, 26 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/5432c872/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
index 3b7d19e..0956a4a 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
@@ -118,6 +118,7 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
   public static final String FRONTEND_URL = GATEWAY_CONFIG_FILE_PREFIX + ".frontend.url";
   private static final String TRUST_ALL_CERTS = GATEWAY_CONFIG_FILE_PREFIX + ".trust.all.certs";
   private static final String CLIENT_AUTH_NEEDED = GATEWAY_CONFIG_FILE_PREFIX + ".client.auth.needed";
+  private static final String CLIENT_AUTH_WANTED = GATEWAY_CONFIG_FILE_PREFIX + ".client.auth.wanted";
   private static final String TRUSTSTORE_PATH = GATEWAY_CONFIG_FILE_PREFIX + ".truststore.path";
   private static final String TRUSTSTORE_TYPE = GATEWAY_CONFIG_FILE_PREFIX + ".truststore.type";
   private static final String KEYSTORE_TYPE = GATEWAY_CONFIG_FILE_PREFIX + ".keystore.type";
@@ -535,6 +536,15 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
   }
 
   /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#isClientAuthWanted()
+   */
+  @Override
+  public boolean isClientAuthWanted() {
+    String clientAuthWanted = get( CLIENT_AUTH_WANTED, "false" );
+    return "true".equals(clientAuthWanted);
+  }
+
+  /* (non-Javadoc)
    * @see org.apache.hadoop.gateway.config.GatewayConfig#getTruststorePath()
    */
   @Override

http://git-wip-us.apache.org/repos/asf/knox/blob/5432c872/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/JettySSLService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/JettySSLService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/JettySSLService.java
index ac4bfa3..52c06d9 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/JettySSLService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/JettySSLService.java
@@ -63,6 +63,7 @@ public class JettySSLService implements SSLService {
   private String truststorePath;
   private String keystoreType;
   private String trustStoreType;
+  private boolean clientAuthWanted;
 
   public void setMasterService(MasterService ms) {
     this.ms = ms;
@@ -126,6 +127,7 @@ public class JettySSLService implements SSLService {
     sslExcludeCiphers = config.getExcludedSSLCiphers();
     sslExcludeProtocols = config.getExcludedSSLProtocols();
     clientAuthNeeded = config.isClientAuthNeeded();
+    clientAuthWanted = config.isClientAuthWanted();
     truststorePath = config.getTruststorePath();
     trustAllCerts = config.getTrustAllCerts();
     trustStoreType = config.getTruststoreType();
@@ -186,7 +188,7 @@ public class JettySSLService implements SSLService {
     sslContextFactory.setKeyManagerPassword(new String(keypass));
 
     String truststorePassword = null;
-    if (clientAuthNeeded) {
+    if (clientAuthNeeded || clientAuthWanted) {
       if (truststorePath != null) {
         sslContextFactory.setTrustStore(loadKeyStore(keystoreFileName, keystoreType, master));
         char[] truststorePwd = null;
@@ -212,7 +214,12 @@ public class JettySSLService implements SSLService {
         sslContextFactory.setTrustStoreType(keystoreType);
       }
     }
-    sslContextFactory.setNeedClientAuth( clientAuthNeeded );
+    if (clientAuthNeeded) {
+      sslContextFactory.setNeedClientAuth( clientAuthNeeded );
+    }
+    else {
+      sslContextFactory.setWantClientAuth( clientAuthWanted );
+    }
     sslContextFactory.setTrustAll( trustAllCerts );
     if (sslIncludeCiphers != null && !sslIncludeCiphers.isEmpty()) {
       sslContextFactory.setIncludeCipherSuites( sslIncludeCiphers.toArray(new String[sslIncludeCiphers.size()]) );

http://git-wip-us.apache.org/repos/asf/knox/blob/5432c872/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
index 506c31e..66fb83c 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
@@ -108,6 +108,8 @@ public interface GatewayConfig {
 
   boolean isClientAuthNeeded();
 
+  boolean isClientAuthWanted();
+
   String getTruststorePath();
 
   boolean getTrustAllCerts();

http://git-wip-us.apache.org/repos/asf/knox/blob/5432c872/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
----------------------------------------------------------------------
diff --git a/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java b/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
index 09b0d94..ff9a877 100644
--- a/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
+++ b/gateway-test-release-utils/src/main/java/org/apache/hadoop/gateway/GatewayTestConfig.java
@@ -609,4 +609,9 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
   public boolean isGatewayServerHeaderEnabled() {
 	return false;
   }
+
+  @Override
+  public boolean isClientAuthWanted() {
+    return false;
+  }
 }


[16/37] knox git commit: KNOX-1071 - Replace java.util.Random with SecureRandom

Posted by lm...@apache.org.
KNOX-1071 - Replace java.util.Random with SecureRandom


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/145ed5dd
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/145ed5dd
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/145ed5dd

Branch: refs/heads/KNOX-1049
Commit: 145ed5dd673e89db278ad5b7257690e3b23e3109
Parents: 10b3473
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Thu Sep 28 12:39:00 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Thu Sep 28 12:39:00 2017 +0100

----------------------------------------------------------------------
 .../impl/DefaultServiceRegistryService.java     | 50 ++++++++++----------
 .../security/impl/DefaultAliasService.java      | 12 ++---
 2 files changed, 31 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/145ed5dd/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java
index c590f0d..ec08597 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java
@@ -35,14 +35,14 @@ import org.apache.hadoop.gateway.services.security.CryptoService;
 
 import java.io.File;
 import java.io.IOException;
+import java.security.SecureRandom;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Random;
 
 public class DefaultServiceRegistryService implements ServiceRegistry, Service {
   private static GatewayMessages LOG = MessagesFactory.get( GatewayMessages.class );
-  
+
   protected char[] chars = { 'a', 'b', 'c', 'd', 'e', 'f', 'g',
   'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w',
   'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K',
@@ -53,31 +53,31 @@ public class DefaultServiceRegistryService implements ServiceRegistry, Service {
   private Registry registry = new Registry();
 
   private String registryFileName;
-  
+
   public DefaultServiceRegistryService() {
   }
-  
+
   public void setCryptoService(CryptoService crypto) {
     this.crypto = crypto;
   }
-  
+
   public String getRegistrationCode(String clusterName) {
     String code = generateRegCode(16);
     byte[] signature = crypto.sign("SHA256withRSA","gateway-identity",code);
     String encodedSig = Base64.encodeBase64URLSafeString(signature);
-    
+
     return code + "::" + encodedSig;
   }
-  
+
   private String generateRegCode(int length) {
-    StringBuffer sb = new StringBuffer();
-    Random r = new Random();
+    StringBuilder sb = new StringBuilder();
+    SecureRandom r = new SecureRandom();
     for (int i = 0; i < length; i++) {
       sb.append(chars[r.nextInt(chars.length)]);
     }
     return sb.toString();
   }
-  
+
   public void removeClusterServices(String clusterName) {
     registry.remove(clusterName);
   }
@@ -89,7 +89,7 @@ public class DefaultServiceRegistryService implements ServiceRegistry, Service {
       throw new IllegalArgumentException("Registration Code must not be null.");
     }
     String[] parts = regCode.split("::");
-    
+
     // part one is the code and part two is the signature
     boolean verified = crypto.verify("SHA256withRSA", "gateway-identity", parts[0], Base64.decodeBase64(parts[1]));
     if (verified) {
@@ -114,24 +114,24 @@ public class DefaultServiceRegistryService implements ServiceRegistry, Service {
         e.printStackTrace(); //TODO: I18N
       }
     }
-    
+
     return rc;
   }
-  
+
   private String renderAsJsonString(HashMap<String,HashMap<String,RegEntry>> registry) {
     String json = null;
     ObjectMapper mapper = new ObjectMapper();
-    
+
     try {
       // write JSON to a file
       json = mapper.writeValueAsString((Object)registry);
-    
+
     } catch ( JsonProcessingException e ) {
       e.printStackTrace(); //TODO: I18N
     }
     return json;
   }
-  
+
   @Override
   public String lookupServiceURL(String clusterName, String serviceName) {
     List<String> urls = lookupServiceURLs( clusterName, serviceName );
@@ -144,22 +144,22 @@ public class DefaultServiceRegistryService implements ServiceRegistry, Service {
   @Override
   public List<String> lookupServiceURLs( String clusterName, String serviceName ) {
     RegEntry entry = null;
-    HashMap clusterServices = registry.get(clusterName);
+    HashMap<String, RegEntry> clusterServices = registry.get(clusterName);
     if (clusterServices != null) {
-      entry = (RegEntry) clusterServices.get(serviceName);
+      entry = clusterServices.get(serviceName);
       if( entry != null ) {
         return entry.getUrls();
       }
     }
     return null;
   }
-  
+
   private HashMap<String, HashMap<String,RegEntry>> getMapFromJsonString(String json) {
     Registry map = null;
-    JsonFactory factory = new JsonFactory(); 
-    ObjectMapper mapper = new ObjectMapper(factory); 
-    TypeReference<Registry> typeRef 
-          = new TypeReference<Registry>() {}; 
+    JsonFactory factory = new JsonFactory();
+    ObjectMapper mapper = new ObjectMapper(factory);
+    TypeReference<Registry> typeRef
+          = new TypeReference<Registry>() {};
     try {
       map = mapper.readValue(json, typeRef);
     } catch (JsonParseException e) {
@@ -168,9 +168,9 @@ public class DefaultServiceRegistryService implements ServiceRegistry, Service {
       LOG.failedToGetMapFromJsonString( json, e );
     } catch (IOException e) {
       LOG.failedToGetMapFromJsonString( json, e );
-    } 
+    }
     return map;
-  }   
+  }
 
   @Override
   public void init(GatewayConfig config, Map<String, String> options)

http://git-wip-us.apache.org/repos/asf/knox/blob/145ed5dd/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/DefaultAliasService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/DefaultAliasService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/DefaultAliasService.java
index 8cf31a5..4a3e754 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/DefaultAliasService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/DefaultAliasService.java
@@ -19,12 +19,12 @@ package org.apache.hadoop.gateway.services.security.impl;
 
 import java.security.KeyStore;
 import java.security.KeyStoreException;
+import java.security.SecureRandom;
 import java.security.cert.Certificate;
 import java.util.ArrayList;
 import java.util.Enumeration;
 import java.util.List;
 import java.util.Map;
-import java.util.Random;
 
 import org.apache.hadoop.gateway.GatewayMessages;
 import org.apache.hadoop.gateway.config.GatewayConfig;
@@ -39,7 +39,7 @@ import org.apache.hadoop.gateway.services.security.MasterService;
 public class DefaultAliasService implements AliasService {
   private static final GatewayMessages LOG = MessagesFactory.get( GatewayMessages.class );
 
-  private static final String GATEWAY_IDENTITY_PASSPHRASE = "gateway-identity-passphrase"; 
+  private static final String GATEWAY_IDENTITY_PASSPHRASE = "gateway-identity-passphrase";
 
   protected char[] chars = { 'a', 'b', 'c', 'd', 'e', 'f', 'g',
   'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w',
@@ -104,21 +104,21 @@ public class DefaultAliasService implements AliasService {
   }
 
   private String generatePassword(int length) {
-    StringBuffer sb = new StringBuffer();
-    Random r = new Random();
+    StringBuilder sb = new StringBuilder();
+    SecureRandom r = new SecureRandom();
     for (int i = 0; i < length; i++) {
       sb.append(chars[r.nextInt(chars.length)]);
     }
     return sb.toString();
   }
-  
+
   public void setKeystoreService(KeystoreService ks) {
     this.keystoreService = ks;
   }
 
   public void setMasterService(MasterService ms) {
     this.masterService = ms;
-    
+
   }
 
   @Override


[09/37] knox git commit: KNOX-1014 - Service Discovery and Topology Generation Framework (Phil Zampino via lmccay)

Posted by lm...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
new file mode 100644
index 0000000..1e5e7b2
--- /dev/null
+++ b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
@@ -0,0 +1,856 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.*;
+
+
+/**
+ * Test the Ambari ServiceDiscovery implementation.
+ *
+ * N.B. These tests do NOT verify Ambari API responses. They DO validate the Ambari ServiceDiscovery implementation's
+ *      treatment of the responses as they were observed at the time the tests are developed.
+ */
+public class AmbariServiceDiscoveryTest {
+
+    @Test
+    public void testSingleClusterDiscovery() throws Exception {
+        final String discoveryAddress = "http://ambarihost:8080";
+        final String clusterName = "testCluster";
+        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+        EasyMock.replay(sdc);
+
+        ServiceDiscovery.Cluster cluster = sd.discover(sdc, clusterName);
+        assertNotNull(cluster);
+        assertEquals(clusterName, cluster.getName());
+        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+//        printServiceURLs(cluster);
+    }
+
+
+    @Test
+    public void testBulkClusterDiscovery() throws Exception {
+        final String discoveryAddress = "http://ambarihost:8080";
+        final String clusterName = "anotherCluster";
+        ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+        ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+        EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+        EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+        EasyMock.replay(sdc);
+
+        Map<String, ServiceDiscovery.Cluster> clusters = sd.discover(sdc);
+        assertNotNull(clusters);
+        assertEquals(1, clusters.size());
+        ServiceDiscovery.Cluster cluster = clusters.get(clusterName);
+        assertNotNull(cluster);
+        assertEquals(clusterName, cluster.getName());
+        assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+        assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+//        printServiceURLs(cluster, "NAMENODE", "WEBHCAT", "OOZIE", "RESOURCEMANAGER");
+    }
+
+
+    private static void printServiceURLs(ServiceDiscovery.Cluster cluster) {
+        final String[] services = new String[]{"NAMENODE",
+                                               "JOBTRACKER",
+                                               "WEBHDFS",
+                                               "WEBHCAT",
+                                               "OOZIE",
+                                               "WEBHBASE",
+                                               "HIVE",
+                                               "RESOURCEMANAGER"};
+        printServiceURLs(cluster, services);
+    }
+
+
+    private static void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
+        for (String name : services) {
+            StringBuilder sb = new StringBuilder();
+            List<String> urls = cluster.getServiceURLs(name);
+            if (urls != null && !urls.isEmpty()) {
+                for (String url : urls) {
+                    sb.append(url);
+                    sb.append(" ");
+                }
+            }
+            System.out.println(String.format("%18s: %s", name, sb.toString()));
+        }
+    }
+
+
+    /**
+     * ServiceDiscovery implementation derived from AmbariServiceDiscovery, so the invokeREST method can be overridden
+     * to eliminate the need to perform actual HTTP interactions with a real Ambari endpoint.
+     */
+    private static final class TestAmbariServiceDiscovery extends AmbariServiceDiscovery {
+
+        final static String CLUSTER_PLACEHOLDER = "CLUSTER_NAME";
+
+        private Map<String, JSONObject> cannedResponses = new HashMap<>();
+
+        TestAmbariServiceDiscovery(String clusterName) {
+            cannedResponses.put(AmbariServiceDiscovery.AMBARI_CLUSTERS_URI,
+                                (JSONObject) JSONValue.parse(CLUSTERS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                               clusterName)));
+
+            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_HOSTROLES_URI, clusterName),
+                                (JSONObject) JSONValue.parse(HOSTROLES_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                                clusterName)));
+
+            cannedResponses.put(String.format(AmbariServiceDiscovery.AMBARI_SERVICECONFIGS_URI, clusterName),
+                                (JSONObject) JSONValue.parse(SERVICECONFIGS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+                                                                                                     clusterName)));
+        }
+
+        @Override
+        protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+            return cannedResponses.get(url.substring(url.indexOf("/api")));
+        }
+    }
+
+
+    ////////////////////////////////////////////////////////////////////////
+    //  JSON response templates, based on actual response content excerpts
+    ////////////////////////////////////////////////////////////////////////
+
+    private static final String CLUSTERS_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"Clusters\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"version\" : \"HDP-2.6\"\n" +
+    "      }\n" +
+    "    }\n" +
+    "  ]" +
+    "}";
+
+
+    private static final String HOSTROLES_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services?fields=components/host_components/HostRoles\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"AMBARI_METRICS\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS/components/METRICS_COLLECTOR\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+    "            \"service_name\" : \"AMBARI_METRICS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/METRICS_COLLECTOR\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"AMBARI_METRICS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HBASE/components/HBASE_MASTER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HBASE_MASTER\",\n" +
+    "            \"service_name\" : \"HBASE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/HBASE_MASTER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HBASE_MASTER\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HBASE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"HDFS\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/NAMENODE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"NAMENODE\",\n" +
+    "            \"service_name\" : \"HDFS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/NAMENODE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"NAMENODE\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HDFS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/SECONDARY_NAMENODE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+    "            \"service_name\" : \"HDFS\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/SECONDARY_NAMENODE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HDFS\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"HIVE\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HCAT\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HCAT\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/HCAT\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HCAT\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_METASTORE\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HIVE_METASTORE\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_METASTORE\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HIVE_METASTORE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"HIVE_SERVER\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"HIVE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/WEBHCAT_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+    "            \"service_name\" : \"HIVE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/WEBHCAT_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"HIVE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\",\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"OOZIE\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE/components/OOZIE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"OOZIE_SERVER\",\n" +
+    "            \"service_name\" : \"OOZIE\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/OOZIE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"OOZIE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"OOZIE\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"YARN\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/APP_TIMELINE_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/APP_TIMELINE_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/NODEMANAGER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"NODEMANAGER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/NODEMANAGER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"NODEMANAGER\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/RESOURCEMANAGER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"RESOURCEMANAGER\",\n" +
+    "            \"service_name\" : \"YARN\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/RESOURCEMANAGER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"RESOURCEMANAGER\",\n" +
+    "                \"ha_state\" : \"ACTIVE\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"YARN\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER\",\n" +
+    "      \"ServiceInfo\" : {\n" +
+    "        \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "        \"service_name\" : \"ZOOKEEPER\"\n" +
+    "      },\n" +
+    "      \"components\" : [\n" +
+    "        {\n" +
+    "          \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER/components/ZOOKEEPER_SERVER\",\n" +
+    "          \"ServiceComponentInfo\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "            \"service_name\" : \"ZOOKEEPER\"\n" +
+    "          },\n" +
+    "          \"host_components\" : [\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            },\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            },\n" +
+    "            {\n" +
+    "              \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+    "              \"HostRoles\" : {\n" +
+    "                \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "                \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+    "                \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+    "                \"service_name\" : \"ZOOKEEPER\",\n" +
+    "                \"stack_id\" : \"HDP-2.6\"\n" +
+    "              }\n" +
+    "            }\n" +
+    "          ]\n" +
+    "        }\n" +
+    "      ]\n" +
+    "    }\n" +
+    "  ]\n" +
+    "}\n";
+
+
+    private static final String SERVICECONFIGS_JSON_TEMPLATE =
+    "{\n" +
+    "  \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?is_current=true\",\n" +
+    "  \"items\" : [\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HBASE&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hbase-site\",\n" +
+    "          \"tag\" : \"version1503410563715\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hbase.master.info.bindAddress\" : \"0.0.0.0\",\n" +
+    "            \"hbase.master.info.port\" : \"16010\",\n" +
+    "            \"hbase.master.port\" : \"16000\",\n" +
+    "            \"hbase.regionserver.info.port\" : \"16030\",\n" +
+    "            \"hbase.regionserver.port\" : \"16020\",\n" +
+    "            \"hbase.zookeeper.property.clientPort\" : \"2181\",\n" +
+    "            \"hbase.zookeeper.quorum\" : \"c6403.ambari.apache.org,c6402.ambari.apache.org,c6401.ambari.apache.org\",\n" +
+    "            \"hbase.zookeeper.useMulti\" : \"true\",\n" +
+    "            \"zookeeper.znode.parent\" : \"/hbase-unsecure\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_config_version_note\" : \"Initial configurations for HBase\",\n" +
+    "      \"service_name\" : \"HBASE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HDFS&service_config_version=2\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hdfs-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"dfs.cluster.administrators\" : \" hdfs\",\n" +
+    "            \"dfs.datanode.address\" : \"0.0.0.0:50010\",\n" +
+    "            \"dfs.datanode.http.address\" : \"0.0.0.0:50075\",\n" +
+    "            \"dfs.datanode.https.address\" : \"0.0.0.0:50475\",\n" +
+    "            \"dfs.datanode.ipc.address\" : \"0.0.0.0:8010\",\n" +
+    "            \"dfs.http.policy\" : \"HTTP_ONLY\",\n" +
+    "            \"dfs.https.port\" : \"50470\",\n" +
+    "            \"dfs.journalnode.http-address\" : \"0.0.0.0:8480\",\n" +
+    "            \"dfs.journalnode.https-address\" : \"0.0.0.0:8481\",\n" +
+    "            \"dfs.namenode.http-address\" : \"c6401.ambari.apache.org:50070\",\n" +
+    "            \"dfs.namenode.https-address\" : \"c6401.ambari.apache.org:50470\",\n" +
+    "            \"dfs.namenode.rpc-address\" : \"c6401.ambari.apache.org:8020\",\n" +
+    "            \"dfs.namenode.secondary.http-address\" : \"c6402.ambari.apache.org:50090\",\n" +
+    "            \"dfs.webhdfs.enabled\" : \"true\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"final\" : {\n" +
+    "              \"dfs.webhdfs.enabled\" : \"true\",\n" +
+    "              \"dfs.namenode.http-address\" : \"true\",\n" +
+    "              \"dfs.support.append\" : \"true\",\n" +
+    "              \"dfs.namenode.name.dir\" : \"true\",\n" +
+    "              \"dfs.datanode.failed.volumes.tolerated\" : \"true\",\n" +
+    "              \"dfs.datanode.data.dir\" : \"true\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"core-site\",\n" +
+    "          \"tag\" : \"version1502131215159\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hadoop.http.authentication.simple.anonymous.allowed\" : \"true\",\n" +
+    "            \"net.topology.script.file.name\" : \"/etc/hadoop/conf/topology_script.py\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"final\" : {\n" +
+    "              \"fs.defaultFS\" : \"true\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 2,\n" +
+    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+    "      \"service_name\" : \"HDFS\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HIVE&service_config_version=3\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-env\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive_security_authorization\" : \"None\",\n" +
+    "            \"webhcat_user\" : \"hcat\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hiveserver2-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.metastore.metrics.enabled\" : \"true\",\n" +
+    "            \"hive.security.authorization.enabled\" : \"false\",\n" +
+    "            \"hive.service.metrics.hadoop2.component\" : \"hiveserver2\",\n" +
+    "            \"hive.service.metrics.reporter\" : \"HADOOP2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-interactive-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.server2.enable.doAs\" : \"false\",\n" +
+    "            \"hive.server2.tez.default.queues\" : \"default\",\n" +
+    "            \"hive.server2.tez.initialize.default.sessions\" : \"true\",\n" +
+    "            \"hive.server2.tez.sessions.custom.queue.allowed\" : \"ignore\",\n" +
+    "            \"hive.server2.tez.sessions.per.default.queue\" : \"1\",\n" +
+    "            \"hive.server2.tez.sessions.restricted.configs\" : \"hive.execution.mode,hive.execution.engine\",\n" +
+    "            \"hive.server2.thrift.http.port\" : \"10501\",\n" +
+    "            \"hive.server2.thrift.port\" : \"10500\",\n" +
+    "            \"hive.server2.webui.port\" : \"10502\",\n" +
+    "            \"hive.server2.webui.use.ssl\" : \"false\",\n" +
+    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2-hive2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"tez-interactive-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"tez.am.am-rm.heartbeat.interval-ms.max\" : \"10000\",\n" +
+    "            \"tez.am.client.heartbeat.poll.interval.millis\" : \"6000\",\n" +
+    "            \"tez.am.client.heartbeat.timeout.secs\" : \"90\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"hive-site\",\n" +
+    "          \"tag\" : \"version1502130841736\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hive.metastore.sasl.enabled\" : \"false\",\n" +
+    "            \"hive.metastore.server.max.threads\" : \"100000\",\n" +
+    "            \"hive.metastore.uris\" : \"thrift://c6402.ambari.apache.org:9083\",\n" +
+    "            \"hive.server2.allow.user.substitution\" : \"true\",\n" +
+    "            \"hive.server2.authentication\" : \"NONE\",\n" +
+    "            \"hive.server2.authentication.spnego.keytab\" : \"HTTP/_HOST@EXAMPLE.COM\",\n" +
+    "            \"hive.server2.authentication.spnego.principal\" : \"/etc/security/keytabs/spnego.service.keytab\",\n" +
+    "            \"hive.server2.enable.doAs\" : \"true\",\n" +
+    "            \"hive.server2.support.dynamic.service.discovery\" : \"true\",\n" +
+    "            \"hive.server2.thrift.http.path\" : \"cliservice\",\n" +
+    "            \"hive.server2.thrift.http.port\" : \"10001\",\n" +
+    "            \"hive.server2.thrift.max.worker.threads\" : \"500\",\n" +
+    "            \"hive.server2.thrift.port\" : \"10000\",\n" +
+    "            \"hive.server2.thrift.sasl.qop\" : \"auth\",\n" +
+    "            \"hive.server2.transport.mode\" : \"http\",\n" +
+    "            \"hive.server2.use.SSL\" : \"false\",\n" +
+    "            \"hive.server2.zookeeper.namespace\" : \"hiveserver2\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : {\n" +
+    "            \"hidden\" : {\n" +
+    "              \"javax.jdo.option.ConnectionPassword\" : \"HIVE_CLIENT,WEBHCAT_SERVER,HCAT,CONFIG_DOWNLOAD\"\n" +
+    "            }\n" +
+    "          }\n" +
+    "        },\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"webhcat-site\",\n" +
+    "          \"tag\" : \"version1502131111746\",\n" +
+    "          \"version\" : 2,\n" +
+    "          \"properties\" : {\n" +
+    "            \"templeton.port\" : \"50111\",\n" +
+    "            \"templeton.zookeeper.hosts\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+    "            \"webhcat.proxyuser.knox.groups\" : \"users\",\n" +
+    "            \"webhcat.proxyuser.knox.hosts\" : \"*\",\n" +
+    "            \"webhcat.proxyuser.root.groups\" : \"*\",\n" +
+    "            \"webhcat.proxyuser.root.hosts\" : \"c6401.ambari.apache.org\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"createtime\" : 1502131110745,\n" +
+    "      \"group_id\" : -1,\n" +
+    "      \"group_name\" : \"Default\",\n" +
+    "      \"hosts\" : [ ],\n" +
+    "      \"is_cluster_compatible\" : true,\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 3,\n" +
+    "      \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+    "      \"service_name\" : \"HIVE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=OOZIE&service_config_version=3\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"oozie-site\",\n" +
+    "          \"tag\" : \"version1502131137103\",\n" +
+    "          \"version\" : 3,\n" +
+    "          \"properties\" : {\n" +
+    "            \"oozie.base.url\" : \"http://c6402.ambari.apache.org:11000/oozie\",\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 3,\n" +
+    "      \"service_name\" : \"OOZIE\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=TEZ&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"tez-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"tez.use.cluster.hadoop-libs\" : \"false\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"createtime\" : 1502122253525,\n" +
+    "      \"group_id\" : -1,\n" +
+    "      \"group_name\" : \"Default\",\n" +
+    "      \"hosts\" : [ ],\n" +
+    "      \"is_cluster_compatible\" : true,\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_config_version_note\" : \"Initial configurations for Tez\",\n" +
+    "      \"service_name\" : \"TEZ\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    },\n" +
+    "    {\n" +
+    "      \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=YARN&service_config_version=1\",\n" +
+    "      \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "      \"configurations\" : [\n" +
+    "        {\n" +
+    "          \"Config\" : {\n" +
+    "            \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+    "            \"stack_id\" : \"HDP-2.6\"\n" +
+    "          },\n" +
+    "          \"type\" : \"yarn-site\",\n" +
+    "          \"tag\" : \"version1\",\n" +
+    "          \"version\" : 1,\n" +
+    "          \"properties\" : {\n" +
+    "            \"hadoop.registry.rm.enabled\" : \"true\",\n" +
+    "            \"hadoop.registry.zk.quorum\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+    "            \"yarn.acl.enable\" : \"false\",\n" +
+    "            \"yarn.http.policy\" : \"HTTP_ONLY\",\n" +
+    "            \"yarn.nodemanager.address\" : \"0.0.0.0:45454\",\n" +
+    "            \"yarn.nodemanager.bind-host\" : \"0.0.0.0\",\n" +
+    "            \"yarn.resourcemanager.address\" : \"c6402.ambari.apache.org:8050\",\n" +
+    "            \"yarn.resourcemanager.admin.address\" : \"c6402.ambari.apache.org:8141\",\n" +
+    "            \"yarn.resourcemanager.ha.enabled\" : \"false\",\n" +
+    "            \"yarn.resourcemanager.hostname\" : \"c6402.ambari.apache.org\",\n" +
+    "            \"yarn.resourcemanager.resource-tracker.address\" : \"c6402.ambari.apache.org:8025\",\n" +
+    "            \"yarn.resourcemanager.scheduler.address\" : \"c6402.ambari.apache.org:8030\",\n" +
+    "            \"yarn.resourcemanager.webapp.address\" : \"c6402.ambari.apache.org:8088\",\n" +
+    "            \"yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled\" : \"false\",\n" +
+    "            \"yarn.resourcemanager.webapp.https.address\" : \"c6402.ambari.apache.org:8090\",\n" +
+    "            \"yarn.resourcemanager.zk-address\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\"\n" +
+    "          },\n" +
+    "          \"properties_attributes\" : { }\n" +
+    "        }\n" +
+    "      ],\n" +
+    "      \"is_current\" : true,\n" +
+    "      \"service_config_version\" : 1,\n" +
+    "      \"service_name\" : \"YARN\",\n" +
+    "      \"stack_id\" : \"HDP-2.6\",\n" +
+    "      \"user\" : \"admin\"\n" +
+    "    }\n" +
+    "  ]\n" +
+    "}";
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-release/home/conf/descriptors/README
----------------------------------------------------------------------
diff --git a/gateway-release/home/conf/descriptors/README b/gateway-release/home/conf/descriptors/README
new file mode 100644
index 0000000..a2e5226
--- /dev/null
+++ b/gateway-release/home/conf/descriptors/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS WHERE SIMPLE TOPOLOGY DESCRIPTORS CAN BE PLACED
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-release/home/conf/shared-providers/README
----------------------------------------------------------------------
diff --git a/gateway-release/home/conf/shared-providers/README b/gateway-release/home/conf/shared-providers/README
new file mode 100644
index 0000000..44d12a3
--- /dev/null
+++ b/gateway-release/home/conf/shared-providers/README
@@ -0,0 +1 @@
+THIS DIRECTORY IS WHERE SHARED PROVIDER CONFIGURATIONS CAN BE PLACED
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-release/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-release/pom.xml b/gateway-release/pom.xml
index 83824cd..cbff307 100644
--- a/gateway-release/pom.xml
+++ b/gateway-release/pom.xml
@@ -334,6 +334,10 @@
             <groupId>${gateway-group}</groupId>
             <artifactId>gateway-shell-samples</artifactId>
         </dependency>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-discovery-ambari</artifactId>
+        </dependency>
 
         <dependency>
             <groupId>junit</groupId>

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-server/pom.xml b/gateway-server/pom.xml
index 437d22d..5ebf793 100644
--- a/gateway-server/pom.xml
+++ b/gateway-server/pom.xml
@@ -248,6 +248,11 @@
            <artifactId>javax-websocket-client-impl</artifactId>
         </dependency>
 
+        <dependency>
+            <groupId>com.fasterxml.jackson.dataformat</groupId>
+            <artifactId>jackson-dataformat-yaml</artifactId>
+            <version>2.3.0</version>
+        </dependency>
 
         <!-- ********** ********** ********** ********** ********** ********** -->
         <!-- ********** Test Dependencies                           ********** -->

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
index 1f94584..6f73c1e 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
@@ -511,6 +511,11 @@ public interface GatewayMessages {
                " Could not find topology \"{0}\" mapped to port \"{1}\" configured in gateway-config.xml. "
                    + "This invalid topology mapping will be ignored by the gateway. "
                    + "Gateway restart will be required if in the future \"{0}\" topology is added.")
-  void topologyPortMappingCannotFindTopology(final String topology,
-      final int port);
+  void topologyPortMappingCannotFindTopology(final String topology, final int port);
+
+
+  @Message( level = MessageLevel.ERROR, text = "An error occurred while processing {0} : {1}" )
+  void simpleDescriptorHandlingError(final String simpleDesc,
+                                     @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
index cefada1..02ac154 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/DefaultGatewayServices.java
@@ -105,14 +105,13 @@ public class DefaultGatewayServices implements GatewayServices {
     services.put( SERVER_INFO_SERVICE, sis );
 
     DefaultTopologyService tops = new DefaultTopologyService();
+    tops.setAliasService(alias);
     tops.init(  config, options  );
     services.put(  TOPOLOGY_SERVICE, tops  );
 
     DefaultServiceDefinitionRegistry sdr = new DefaultServiceDefinitionRegistry();
     sdr.init( config, options );
     services.put( SERVICE_DEFINITION_REGISTRY, sdr );
-    tops.init( config, options );
-    services.put( TOPOLOGY_SERVICE, tops );
 
     DefaultMetricsService metricsService = new DefaultMetricsService();
     metricsService.init( config, options );

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
index 3321f3d..a493bc4 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.gateway.config.GatewayConfig;
 import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
 import org.apache.hadoop.gateway.service.definition.ServiceDefinition;
 import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.apache.hadoop.gateway.services.security.AliasService;
 import org.apache.hadoop.gateway.services.topology.TopologyService;
 import org.apache.hadoop.gateway.topology.Topology;
 import org.apache.hadoop.gateway.topology.TopologyEvent;
@@ -45,6 +46,7 @@ import org.apache.hadoop.gateway.topology.TopologyListener;
 import org.apache.hadoop.gateway.topology.TopologyMonitor;
 import org.apache.hadoop.gateway.topology.TopologyProvider;
 import org.apache.hadoop.gateway.topology.builder.TopologyBuilder;
+import org.apache.hadoop.gateway.topology.simple.SimpleDescriptorHandler;
 import org.apache.hadoop.gateway.topology.validation.TopologyValidator;
 import org.apache.hadoop.gateway.topology.xml.AmbariFormatXmlTopologyRules;
 import org.apache.hadoop.gateway.topology.xml.KnoxFormatXmlTopologyRules;
@@ -74,20 +76,27 @@ import static org.apache.commons.digester3.binder.DigesterLoader.newLoader;
 public class DefaultTopologyService
     extends FileAlterationListenerAdaptor
     implements TopologyService, TopologyMonitor, TopologyProvider, FileFilter, FileAlterationListener {
+
   private static Auditor auditor = AuditServiceFactory.getAuditService().getAuditor(
     AuditConstants.DEFAULT_AUDITOR_NAME, AuditConstants.KNOX_SERVICE_NAME,
     AuditConstants.KNOX_COMPONENT_NAME);
+
   private static final List<String> SUPPORTED_TOPOLOGY_FILE_EXTENSIONS = new ArrayList<String>();
   static {
     SUPPORTED_TOPOLOGY_FILE_EXTENSIONS.add("xml");
     SUPPORTED_TOPOLOGY_FILE_EXTENSIONS.add("conf");
   }
+
   private static GatewayMessages log = MessagesFactory.get(GatewayMessages.class);
   private static DigesterLoader digesterLoader = newLoader(new KnoxFormatXmlTopologyRules(), new AmbariFormatXmlTopologyRules());
-  private FileAlterationMonitor monitor;
-  private File directory;
+  private List<FileAlterationMonitor> monitors = new ArrayList<>();
+  private File topologiesDirectory;
+  private File descriptorsDirectory;
+
   private Set<TopologyListener> listeners;
   private volatile Map<File, Topology> topologies;
+  private AliasService aliasService;
+
 
   private Topology loadTopology(File file) throws IOException, SAXException, URISyntaxException, InterruptedException {
     final long TIMEOUT = 250; //ms
@@ -202,29 +211,40 @@ public class DefaultTopologyService
   }
 
   private File calculateAbsoluteTopologiesDir(GatewayConfig config) {
-
-    File topoDir = new File(config.getGatewayTopologyDir());
+    String normalizedTopologyDir = FilenameUtils.normalize(config.getGatewayTopologyDir());
+    File topoDir = new File(normalizedTopologyDir);
     topoDir = topoDir.getAbsoluteFile();
     return topoDir;
   }
 
-  private void initListener(FileAlterationMonitor monitor, File directory) {
-    this.directory = directory;
-    this.monitor = monitor;
+  private File calculateAbsoluteConfigDir(GatewayConfig config) {
+    File configDir = null;
 
+    String path = FilenameUtils.normalize(config.getGatewayConfDir());
+    if (path != null) {
+      configDir = new File(config.getGatewayConfDir());
+    } else {
+      configDir = (new File(config.getGatewayTopologyDir())).getParentFile();
+    }
+    configDir = configDir.getAbsoluteFile();
 
-    FileAlterationObserver observer = new FileAlterationObserver(this.directory, this);
-    observer.addListener(this);
-    monitor.addObserver(observer);
+    return configDir;
+  }
 
-    this.listeners = new HashSet<>();
-    this.topologies = new HashMap<>(); //loadTopologies( this.directory );
+  private void  initListener(FileAlterationMonitor  monitor,
+                            File                   directory,
+                            FileFilter             filter,
+                            FileAlterationListener listener) {
+    monitors.add(monitor);
+    FileAlterationObserver observer = new FileAlterationObserver(directory, filter);
+    observer.addListener(listener);
+    monitor.addObserver(observer);
   }
 
-  private void initListener(File directory) throws IOException, SAXException {
+  private void initListener(File directory, FileFilter filter, FileAlterationListener listener) throws IOException, SAXException {
     // Increasing the monitoring interval to 5 seconds as profiling has shown
     // this is rather expensive in terms of generated garbage objects.
-    initListener(new FileAlterationMonitor(5000L), directory);
+    initListener(new FileAlterationMonitor(5000L), directory, filter, listener);
   }
 
   private Map<File, Topology> loadTopologies(File directory) {
@@ -261,10 +281,14 @@ public class DefaultTopologyService
     return map;
   }
 
+  public void setAliasService(AliasService as) {
+    this.aliasService = as;
+  }
+
   public void deployTopology(Topology t){
 
     try {
-      File temp = new File(directory.getAbsolutePath() + "/" + t.getName() + ".xml.temp");
+      File temp = new File(topologiesDirectory.getAbsolutePath() + "/" + t.getName() + ".xml.temp");
       Package topologyPkg = Topology.class.getPackage();
       String pkgName = topologyPkg.getName();
       String bindingFile = pkgName.replace(".", "/") + "/topology_binding-xml.xml";
@@ -277,7 +301,7 @@ public class DefaultTopologyService
       mr.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
       mr.marshal(t, temp);
 
-      File topology = new File(directory.getAbsolutePath() + "/" + t.getName() + ".xml");
+      File topology = new File(topologiesDirectory.getAbsolutePath() + "/" + t.getName() + ".xml");
       if(!temp.renameTo(topology)) {
         FileUtils.forceDelete(temp);
         throw new IOException("Could not rename temp file");
@@ -317,7 +341,7 @@ public class DefaultTopologyService
     try {
       synchronized (this) {
         Map<File, Topology> oldTopologies = topologies;
-        Map<File, Topology> newTopologies = loadTopologies(directory);
+        Map<File, Topology> newTopologies = loadTopologies(topologiesDirectory);
         List<TopologyEvent> events = createChangeEvents(oldTopologies, newTopologies);
         topologies = newTopologies;
         notifyChangeListeners(events);
@@ -329,7 +353,7 @@ public class DefaultTopologyService
   }
 
   public void deleteTopology(Topology t) {
-    File topoDir = directory;
+    File topoDir = topologiesDirectory;
 
     if(topoDir.isDirectory() && topoDir.canRead()) {
       File[] results = topoDir.listFiles();
@@ -357,8 +381,8 @@ public class DefaultTopologyService
   public Map<String, List<String>> getServiceTestURLs(Topology t, GatewayConfig config) {
     File tFile = null;
     Map<String, List<String>> urls = new HashMap<>();
-    if(directory.isDirectory() && directory.canRead()) {
-      for(File f : directory.listFiles()){
+    if(topologiesDirectory.isDirectory() && topologiesDirectory.canRead()) {
+      for(File f : topologiesDirectory.listFiles()){
         if(FilenameUtils.removeExtension(f.getName()).equals(t.getName())){
           tFile = f;
         }
@@ -387,12 +411,16 @@ public class DefaultTopologyService
 
   @Override
   public void startMonitor() throws Exception {
-    monitor.start();
+    for (FileAlterationMonitor monitor : monitors) {
+      monitor.start();
+    }
   }
 
   @Override
   public void stopMonitor() throws Exception {
-    monitor.stop();
+    for (FileAlterationMonitor monitor : monitors) {
+      monitor.stop();
+    }
   }
 
   @Override
@@ -414,6 +442,16 @@ public class DefaultTopologyService
 
   @Override
   public void onFileDelete(java.io.File file) {
+    // For full topology descriptors, we need to make sure to delete any corresponding simple descriptors to prevent
+    // unintended subsequent generation of the topology descriptor
+    for (String ext : DescriptorsMonitor.SUPPORTED_EXTENSIONS) {
+      File simpleDesc =
+              new File(descriptorsDirectory, FilenameUtils.getBaseName(file.getName()) + "." + ext);
+      if (simpleDesc.exists()) {
+        simpleDesc.delete();
+      }
+    }
+
     onFileChange(file);
   }
 
@@ -436,12 +474,200 @@ public class DefaultTopologyService
   public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
 
     try {
-      initListener(calculateAbsoluteTopologiesDir(config));
-    } catch (IOException io) {
+      listeners = new HashSet<>();
+      topologies = new HashMap<>();
+
+      topologiesDirectory = calculateAbsoluteTopologiesDir(config);
+
+      File configDirectory = calculateAbsoluteConfigDir(config);
+      descriptorsDirectory = new File(configDirectory, "descriptors");
+      File sharedProvidersDirectory = new File(configDirectory, "shared-providers");
+
+      // Add support for conf/topologies
+      initListener(topologiesDirectory, this, this);
+
+      // Add support for conf/descriptors
+      DescriptorsMonitor dm = new DescriptorsMonitor(topologiesDirectory, aliasService);
+      initListener(descriptorsDirectory,
+                   dm,
+                   dm);
+
+      // Add support for conf/shared-providers
+      SharedProviderConfigMonitor spm = new SharedProviderConfigMonitor(dm, descriptorsDirectory);
+      initListener(sharedProvidersDirectory, spm, spm);
+
+    } catch (IOException | SAXException io) {
       throw new ServiceLifecycleException(io.getMessage());
-    } catch (SAXException sax) {
-      throw new ServiceLifecycleException(sax.getMessage());
+    }
+  }
+
+
+  /**
+   * Change handler for simple descriptors
+   */
+  public static class DescriptorsMonitor extends FileAlterationListenerAdaptor
+                                          implements FileFilter {
+
+    static final List<String> SUPPORTED_EXTENSIONS = new ArrayList<String>();
+    static {
+      SUPPORTED_EXTENSIONS.add("json");
+      SUPPORTED_EXTENSIONS.add("yml");
+    }
+
+    private File topologiesDir;
+
+    private AliasService aliasService;
+
+    private Map<String, List<String>> providerConfigReferences = new HashMap<>();
+
+
+    public DescriptorsMonitor(File topologiesDir, AliasService aliasService) {
+      this.topologiesDir  = topologiesDir;
+      this.aliasService   = aliasService;
+    }
+
+    List<String> getReferencingDescriptors(String providerConfigPath) {
+      List<String> result = providerConfigReferences.get(providerConfigPath);
+      if (result == null) {
+        result = Collections.emptyList();
+      }
+      return result;
+    }
+
+    @Override
+    public void onFileCreate(File file) {
+      onFileChange(file);
+    }
+
+    @Override
+    public void onFileDelete(File file) {
+      // For simple descriptors, we need to make sure to delete any corresponding full topology descriptors to trigger undeployment
+      for (String ext : DefaultTopologyService.SUPPORTED_TOPOLOGY_FILE_EXTENSIONS) {
+        File topologyFile =
+                new File(topologiesDir, FilenameUtils.getBaseName(file.getName()) + "." + ext);
+        if (topologyFile.exists()) {
+          topologyFile.delete();
+        }
+      }
+
+      String normalizedFilePath = FilenameUtils.normalize(file.getAbsolutePath());
+      String reference = null;
+      for (Map.Entry<String, List<String>> entry : providerConfigReferences.entrySet()) {
+        if (entry.getValue().contains(normalizedFilePath)) {
+          reference = entry.getKey();
+          break;
+        }
+      }
+      if (reference != null) {
+        providerConfigReferences.get(reference).remove(normalizedFilePath);
+      }
+    }
+
+    @Override
+    public void onFileChange(File file) {
+      try {
+        // When a simple descriptor has been created or modified, generate the new topology descriptor
+        Map<String, File> result = SimpleDescriptorHandler.handle(file, topologiesDir, aliasService);
+
+        // Add the provider config reference relationship for handling updates to the provider config
+        String providerConfig = FilenameUtils.normalize(result.get("reference").getAbsolutePath());
+        if (!providerConfigReferences.containsKey(providerConfig)) {
+          providerConfigReferences.put(providerConfig, new ArrayList<String>());
+        }
+        List<String> refs = providerConfigReferences.get(providerConfig);
+        String descriptorName = FilenameUtils.normalize(file.getAbsolutePath());
+        if (!refs.contains(descriptorName)) {
+          // Need to check if descriptor had previously referenced another provider config, so it can be removed
+          for (List<String> descs : providerConfigReferences.values()) {
+            if (descs.contains(descriptorName)) {
+              descs.remove(descriptorName);
+            }
+          }
+
+          // Add the current reference relationship
+          refs.add(descriptorName);
+        }
+      } catch (Exception e) {
+        log.simpleDescriptorHandlingError(file.getName(), e);
+      }
+    }
+
+    @Override
+    public boolean accept(File file) {
+      boolean accept = false;
+      if (!file.isDirectory() && file.canRead()) {
+        String extension = FilenameUtils.getExtension(file.getName());
+        if (SUPPORTED_EXTENSIONS.contains(extension)) {
+          accept = true;
+        }
+      }
+      return accept;
+    }
+  }
+
+  /**
+   * Change handler for shared provider configurations
+   */
+  public static class SharedProviderConfigMonitor extends FileAlterationListenerAdaptor
+          implements FileFilter {
+
+    static final List<String> SUPPORTED_EXTENSIONS = new ArrayList<>();
+    static {
+      SUPPORTED_EXTENSIONS.add("xml");
     }
 
+    private DescriptorsMonitor descriptorsMonitor;
+    private File descriptorsDir;
+
+
+    SharedProviderConfigMonitor(DescriptorsMonitor descMonitor, File descriptorsDir) {
+      this.descriptorsMonitor = descMonitor;
+      this.descriptorsDir     = descriptorsDir;
+    }
+
+    @Override
+    public void onFileCreate(File file) {
+      onFileChange(file);
+    }
+
+    @Override
+    public void onFileDelete(File file) {
+      onFileChange(file);
+    }
+
+    @Override
+    public void onFileChange(File file) {
+      // For shared provider configuration, we need to update any simple descriptors that reference it
+      for (File descriptor : getReferencingDescriptors(file)) {
+        descriptor.setLastModified(System.currentTimeMillis());
+      }
+    }
+
+    private List<File> getReferencingDescriptors(File sharedProviderConfig) {
+      List<File> references = new ArrayList<>();
+
+      for (File descriptor : descriptorsDir.listFiles()) {
+        if (DescriptorsMonitor.SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(descriptor.getName()))) {
+          for (String reference : descriptorsMonitor.getReferencingDescriptors(FilenameUtils.normalize(sharedProviderConfig.getAbsolutePath()))) {
+            references.add(new File(reference));
+          }
+        }
+      }
+
+      return references;
+    }
+
+    @Override
+    public boolean accept(File file) {
+      boolean accept = false;
+      if (!file.isDirectory() && file.canRead()) {
+        String extension = FilenameUtils.getExtension(file.getName());
+        if (SUPPORTED_EXTENSIONS.contains(extension)) {
+          accept = true;
+        }
+      }
+      return accept;
+    }
   }
+
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
index b5e80d2..a30cf13 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
@@ -77,7 +77,7 @@ public class BeanPropertyTopologyBuilder implements TopologyBuilder {
         Topology topology = new Topology();
         topology.setName(name);
 
-          for (Provider provider : providers) {
+        for (Provider provider : providers) {
             topology.addProvider(provider);
         }
 

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java
new file mode 100644
index 0000000..6534b5e
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/DefaultServiceDiscoveryConfig.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+public class DefaultServiceDiscoveryConfig implements ServiceDiscoveryConfig {
+    private String address  = null;
+    private String user     = null;
+    private String pwdAlias = null;
+
+    public DefaultServiceDiscoveryConfig(String address) {
+        this.address = address;
+    }
+
+    public void setUser(String username) {
+        this.user = username;
+    }
+
+    public void setPasswordAlias(String alias) {
+        this.pwdAlias = alias;
+    }
+
+    public String getAddress() {
+        return address;
+    }
+
+    public String getUser() {
+        return user;
+    }
+
+    public String getPasswordAlias() {
+        return pwdAlias;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java
new file mode 100644
index 0000000..70d5f61
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/discovery/ServiceDiscoveryFactory.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery;
+
+import org.apache.hadoop.gateway.services.Service;
+
+import java.lang.reflect.Field;
+import java.util.ServiceLoader;
+
+/**
+ * Creates instances of ServiceDiscovery implementations.
+ *
+ * This factory uses the ServiceLoader mechanism to load ServiceDiscovery implementations as extensions.
+ *
+ */
+public abstract class ServiceDiscoveryFactory {
+
+    private static final Service[] NO_GATEWAY_SERVICS = new Service[]{};
+
+
+    public static ServiceDiscovery get(String type) {
+        return get(type, NO_GATEWAY_SERVICS);
+    }
+
+
+    public static ServiceDiscovery get(String type, Service...gatewayServices) {
+        ServiceDiscovery sd  = null;
+
+        // Look up the available ServiceDiscovery types
+        ServiceLoader<ServiceDiscoveryType> loader = ServiceLoader.load(ServiceDiscoveryType.class);
+        for (ServiceDiscoveryType sdt : loader) {
+            if (sdt.getType().equalsIgnoreCase(type)) {
+                try {
+                    ServiceDiscovery instance = sdt.newInstance();
+                    // Make sure the type reported by the instance matches the type declared by the factory
+                    // (is this necessary?)
+                    if (instance.getType().equalsIgnoreCase(type)) {
+                        sd = instance;
+
+                        // Inject any gateway services that were specified, and which are referenced in the impl
+                        if (gatewayServices != null && gatewayServices.length > 0) {
+                            for (Field field : sd.getClass().getDeclaredFields()) {
+                                if (field.getDeclaredAnnotation(GatewayService.class) != null) {
+                                    for (Service s : gatewayServices) {
+                                        if (s != null) {
+                                            if (field.getType().isAssignableFrom(s.getClass())) {
+                                                field.setAccessible(true);
+                                                field.set(sd, s);
+                                            }
+                                        }
+                                    }
+                                }
+                            }
+                        }
+                        break;
+                    }
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+        }
+
+        return sd;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
new file mode 100644
index 0000000..aa28469
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptor.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import java.util.List;
+
+public interface SimpleDescriptor {
+
+    String getName();
+
+    String getDiscoveryType();
+
+    String getDiscoveryAddress();
+
+    String getDiscoveryUser();
+
+    String getDiscoveryPasswordAlias();
+
+    String getClusterName();
+
+    String getProviderConfig();
+
+    List<Service> getServices();
+
+
+    interface Service {
+        String getName();
+
+        List<String> getURLs();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/c2ca4432/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
new file mode 100644
index 0000000..3df6d2f
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorFactory.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.simple;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
+import org.apache.commons.io.FilenameUtils;
+
+import java.io.File;
+import java.io.IOException;
+
+
+public class SimpleDescriptorFactory {
+
+    /**
+     * Create a SimpleDescriptor from the specified file.
+     *
+     * @param path The path to the file.
+     * @return A SimpleDescriptor based on the contents of the file.
+     *
+     * @throws IOException
+     */
+    public static SimpleDescriptor parse(String path) throws IOException {
+        SimpleDescriptor sd;
+
+        if (path.endsWith(".json")) {
+            sd = parseJSON(path);
+        } else if (path.endsWith(".yml")) {
+            sd = parseYAML(path);
+        } else {
+           throw new IllegalArgumentException("Unsupported simple descriptor format: " + path.substring(path.lastIndexOf('.')));
+        }
+
+        return sd;
+    }
+
+
+    static SimpleDescriptor parseJSON(String path) throws IOException {
+        final ObjectMapper mapper = new ObjectMapper();
+        SimpleDescriptorImpl sd = mapper.readValue(new File(path), SimpleDescriptorImpl.class);
+        if (sd != null) {
+            sd.setName(FilenameUtils.getBaseName(path));
+        }
+        return sd;
+    }
+
+
+    static SimpleDescriptor parseYAML(String path) throws IOException {
+        final ObjectMapper mapper = new ObjectMapper(new YAMLFactory());
+        SimpleDescriptorImpl sd = mapper.readValue(new File(path), SimpleDescriptorImpl.class);
+        if (sd != null) {
+            sd.setName(FilenameUtils.getBaseName(path));
+        }
+        return sd;
+    }
+
+}


[36/37] knox git commit: KNOX-1063 - Simple Descriptor discovery should notice YAML file with .yaml extension (Phil Zampino via lmccay)

Posted by lm...@apache.org.
KNOX-1063 - Simple Descriptor discovery should notice YAML file with .yaml extension (Phil Zampino via lmccay)

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/986615ff
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/986615ff
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/986615ff

Branch: refs/heads/KNOX-1049
Commit: 986615ff6756ee9b6940dbba0e536c5bcba5de25
Parents: c440344
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Oct 19 18:28:18 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Oct 19 18:28:18 2017 -0400

----------------------------------------------------------------------
 .../gateway/services/topology/impl/DefaultTopologyService.java      | 1 +
 1 file changed, 1 insertion(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/986615ff/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
index a8d6de8..13e1a3d 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
@@ -523,6 +523,7 @@ public class DefaultTopologyService
     static {
       SUPPORTED_EXTENSIONS.add("json");
       SUPPORTED_EXTENSIONS.add("yml");
+      SUPPORTED_EXTENSIONS.add("yaml");
     }
 
     private File topologiesDir;


[31/37] knox git commit: KNOX-1082 - Add support to validate the "nbf" claim for JWTs

Posted by lm...@apache.org.
KNOX-1082 - Add support to validate the "nbf" claim for JWTs


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/bb467b8c
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/bb467b8c
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/bb467b8c

Branch: refs/heads/KNOX-1049
Commit: bb467b8c4ecd87fc83ec1cf2863767b0330f171e
Parents: 9c7aa7e
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Tue Oct 17 12:49:04 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Tue Oct 17 12:49:04 2017 +0100

----------------------------------------------------------------------
 .../provider/federation/jwt/JWTMessages.java    |  3 ++
 .../jwt/filter/AbstractJWTFilter.java           |  9 ++++-
 .../federation/AbstractJWTFilterTest.java       | 40 ++++++++++++++++++--
 .../services/security/token/impl/JWT.java       |  3 ++
 .../services/security/token/impl/JWTToken.java  | 11 ++++++
 5 files changed, 61 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/bb467b8c/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/JWTMessages.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/JWTMessages.java b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/JWTMessages.java
index f6969c6..f38d13b 100644
--- a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/JWTMessages.java
+++ b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/JWTMessages.java
@@ -34,6 +34,9 @@ public interface JWTMessages {
   @Message( level = MessageLevel.INFO, text = "Access token has expired; a new one must be acquired." )
   void tokenHasExpired();
 
+  @Message( level = MessageLevel.INFO, text = "The NotBefore check failed." )
+  void notBeforeCheckFailed();
+
   @Message( level = MessageLevel.WARN, text = "Expected Bearer token is missing." )
   void missingBearerToken();
 

http://git-wip-us.apache.org/repos/asf/knox/blob/bb467b8c/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
index deb3d5b..0d8ecb8 100644
--- a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
+++ b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
@@ -275,7 +275,14 @@ public abstract class AbstractJWTFilter implements Filter {
         if (tokenIsStillValid(token)) {
           boolean audValid = validateAudiences(token);
           if (audValid) {
-            return true;
+              Date nbf = token.getNotBeforeDate();
+              if (nbf == null || new Date().after(nbf)) {
+                return true;
+              } else {
+                log.notBeforeCheckFailed();
+                handleValidationError(request, response, HttpServletResponse.SC_BAD_REQUEST,
+                                      "Bad request: the NotBefore check failed");
+              }
           }
           else {
             log.failedToValidateAudience();

http://git-wip-us.apache.org/repos/asf/knox/blob/bb467b8c/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
index b261081..54c596b 100644
--- a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
+++ b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
@@ -505,7 +505,7 @@ public abstract class AbstractJWTFilterTest  {
       handler.init(new TestFilterConfig(props));
 
       SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice", new Date(new Date().getTime() + 5000),
-                             privateKey, JWSAlgorithm.RS512.getName());
+                             new Date(), privateKey, JWSAlgorithm.RS512.getName());
 
       HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
       setTokenOnRequest(request, jwt);
@@ -536,7 +536,7 @@ public abstract class AbstractJWTFilterTest  {
       handler.init(new TestFilterConfig(props));
 
       SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice", new Date(new Date().getTime() + 5000),
-                             privateKey, JWSAlgorithm.RS384.getName());
+                             new Date(), privateKey, JWSAlgorithm.RS384.getName());
 
       HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
       setTokenOnRequest(request, jwt);
@@ -558,6 +558,37 @@ public abstract class AbstractJWTFilterTest  {
     }
   }
 
+  @Test
+  public void testNotBeforeJWT() throws Exception {
+    try {
+      Properties props = getProperties();
+      handler.init(new TestFilterConfig(props));
+
+      SignedJWT jwt = getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, "alice",
+                             new Date(new Date().getTime() + 5000),
+                             new Date(new Date().getTime() + 5000), privateKey,
+                             JWSAlgorithm.RS256.getName());
+
+      HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+      setTokenOnRequest(request, jwt);
+
+      EasyMock.expect(request.getRequestURL()).andReturn(
+          new StringBuffer(SERVICE_URL)).anyTimes();
+      EasyMock.expect(request.getQueryString()).andReturn(null);
+      HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+      EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+          SERVICE_URL);
+      EasyMock.replay(request);
+
+      TestFilterChain chain = new TestFilterChain();
+      handler.doFilter(request, response, chain);
+      Assert.assertTrue("doFilterCalled should not be false.", !chain.doFilterCalled);
+      Assert.assertTrue("No Subject should be returned.", chain.subject == null);
+    } catch (ServletException se) {
+      fail("Should NOT have thrown a ServletException.");
+    }
+  }
+
   protected Properties getProperties() {
     Properties props = new Properties();
     props.setProperty(
@@ -568,10 +599,10 @@ public abstract class AbstractJWTFilterTest  {
 
   protected SignedJWT getJWT(String issuer, String sub, Date expires, RSAPrivateKey privateKey)
       throws Exception {
-    return getJWT(issuer, sub, expires, privateKey, JWSAlgorithm.RS256.getName());
+    return getJWT(issuer, sub, expires, new Date(), privateKey, JWSAlgorithm.RS256.getName());
   }
 
-  protected SignedJWT getJWT(String issuer, String sub, Date expires, RSAPrivateKey privateKey,
+  protected SignedJWT getJWT(String issuer, String sub, Date expires, Date nbf, RSAPrivateKey privateKey,
                              String signatureAlgorithm)
       throws Exception {
     List<String> aud = new ArrayList<String>();
@@ -582,6 +613,7 @@ public abstract class AbstractJWTFilterTest  {
     .subject(sub)
     .audience(aud)
     .expirationTime(expires)
+    .notBeforeTime(nbf)
     .claim("scope", "openid")
     .build();
 

http://git-wip-us.apache.org/repos/asf/knox/blob/bb467b8c/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWT.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWT.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWT.java
index 1a6f4f9..fa9076e 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWT.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWT.java
@@ -29,6 +29,7 @@ public interface JWT {
   String ISSUER = "iss";
   String AUDIENCE = "aud";
   String EXPIRES = "exp";
+  String NOT_BEFORE = "nbf";
 
   String getPayload();
 
@@ -50,6 +51,8 @@ public interface JWT {
 
   Date getExpiresDate();
 
+  Date getNotBeforeDate();
+
   String getSubject();
 
   String getHeader();

http://git-wip-us.apache.org/repos/asf/knox/blob/bb467b8c/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
index be2a331..f985caf 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/security/token/impl/JWTToken.java
@@ -231,6 +231,17 @@ public class JWTToken implements JWT {
     return date;
   }
 
+  @Override
+  public Date getNotBeforeDate() {
+    Date date = null;
+    try {
+      date = jwt.getJWTClaimsSet().getNotBeforeTime();
+    } catch (ParseException e) {
+      log.unableToParseToken(e);
+    }
+    return date;
+  }
+
   /* (non-Javadoc)
    * @see org.apache.hadoop.gateway.services.security.token.impl.JWT#getPrincipal()
    */


[14/37] knox git commit: KNOX-1014 - remove extraneous directory

Posted by lm...@apache.org.
KNOX-1014 - remove extraneous directory

Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/a841e265
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/a841e265
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/a841e265

Branch: refs/heads/KNOX-1049
Commit: a841e2656b359e4f9bd6e819eba2b9723faec1bb
Parents: 78ef4e5
Author: Larry McCay <lm...@hortonworks.com>
Authored: Mon Sep 25 13:37:54 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Mon Sep 25 13:55:30 2017 -0400

----------------------------------------------------------------------
 b/gateway-discovery-ambari/pom.xml              |  66 --
 .../discovery/ambari/AmbariCluster.java         | 114 ---
 .../discovery/ambari/AmbariComponent.java       |  76 --
 .../ambari/AmbariServiceDiscovery.java          | 291 -------
 .../ambari/AmbariServiceDiscoveryMessages.java  |  81 --
 .../ambari/AmbariServiceDiscoveryType.java      |  35 -
 .../ambari/AmbariServiceURLCreator.java         | 184 ----
 ...eway.topology.discovery.ServiceDiscoveryType |  19 -
 .../ambari/AmbariServiceDiscoveryTest.java      | 856 -------------------
 b/gateway-release/home/conf/descriptors/README  |   1 -
 .../home/conf/shared-providers/README           |   1 -
 11 files changed, 1724 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/pom.xml
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/pom.xml b/b/gateway-discovery-ambari/pom.xml
deleted file mode 100644
index 924e89c..0000000
--- a/b/gateway-discovery-ambari/pom.xml
+++ /dev/null
@@ -1,66 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.knox</groupId>
-        <artifactId>gateway</artifactId>
-        <version>0.14.0-SNAPSHOT</version>
-    </parent>
-    <artifactId>gateway-discovery-ambari</artifactId>
-
-    <name>gateway-discovery-ambari</name>
-    <description>The extension to the gateway for service discovery using Apache Ambari.</description>
-
-    <licenses>
-        <license>
-            <name>The Apache Software License, Version 2.0</name>
-            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
-            <distribution>repo</distribution>
-        </license>
-    </licenses>
-
-    <dependencies>
-        <dependency>
-            <groupId>${gateway-group}</groupId>
-            <artifactId>gateway-spi</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>${gateway-group}</groupId>
-            <artifactId>gateway-test-utils</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.easymock</groupId>
-            <artifactId>easymock</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-    </dependencies>
-
-</project>

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
deleted file mode 100644
index 6eaabd3..0000000
--- a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-class AmbariCluster implements ServiceDiscovery.Cluster {
-
-    private String name = null;
-
-    private AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
-
-    private Map<String, Map<String, ServiceConfiguration>> serviceConfigurations = new HashMap<>();
-
-    private Map<String, AmbariComponent> components = null;
-
-
-    AmbariCluster(String name) {
-        this.name = name;
-        components = new HashMap<String, AmbariComponent>();
-    }
-
-    void addServiceConfiguration(String serviceName, String configurationType, ServiceConfiguration serviceConfig) {
-        if (!serviceConfigurations.keySet().contains(serviceName)) {
-            serviceConfigurations.put(serviceName, new HashMap<String, ServiceConfiguration>());
-        }
-        serviceConfigurations.get(serviceName).put(configurationType, serviceConfig);
-    }
-
-
-    void addComponent(AmbariComponent component) {
-        components.put(component.getName(), component);
-    }
-
-
-    ServiceConfiguration getServiceConfiguration(String serviceName, String configurationType) {
-        ServiceConfiguration sc = null;
-        Map<String, ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
-        if (configs != null) {
-            sc = configs.get(configurationType);
-        }
-        return sc;
-    }
-
-
-    Map<String, AmbariComponent> getComponents() {
-        return components;
-    }
-
-
-    AmbariComponent getComponent(String name) {
-        return components.get(name);
-    }
-
-
-    @Override
-    public String getName() {
-        return name;
-    }
-
-
-    @Override
-    public List<String> getServiceURLs(String serviceName) {
-        List<String> urls = new ArrayList<>();
-        urls.addAll(urlCreator.create(this, serviceName));
-        return urls;
-    }
-
-
-    static class ServiceConfiguration {
-
-        private String type;
-        private String version;
-        private Map<String, String> props;
-
-        ServiceConfiguration(String type, String version, Map<String, String> properties) {
-            this.type = type;
-            this.version = version;
-            this.props = properties;
-        }
-
-        public String getVersion() {
-            return version;
-        }
-
-        public String getType() {
-            return type;
-        }
-
-        public Map<String, String> getProperties() {
-            return props;
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
deleted file mode 100644
index 55257fb..0000000
--- a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import java.util.List;
-import java.util.Map;
-
-class AmbariComponent {
-
-    private String clusterName = null;
-    private String serviceName = null;
-    private String name        = null;
-    private String version     = null;
-
-    private List<String> hostNames = null;
-
-    private Map<String, String> properties = null;
-
-    AmbariComponent(String              name,
-                    String              version,
-                    String              cluster,
-                    String              service,
-                    List<String>        hostNames,
-                    Map<String, String> properties) {
-        this.name = name;
-        this.serviceName = service;
-        this.clusterName = cluster;
-        this.version = version;
-        this.hostNames = hostNames;
-        this.properties = properties;
-    }
-
-    public String getVersion() {
-        return version;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public String getServiceName() {
-        return serviceName;
-    }
-
-    public String getClusterName() {
-        return clusterName;
-    }
-
-    public List<String> getHostNames() {
-        return hostNames;
-    }
-
-    public Map<String, String> getConfigProperties() {
-        return properties;
-    }
-
-    public String getConfigProperty(String propertyName) {
-        return properties.get(propertyName);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
deleted file mode 100644
index 34f20a7..0000000
--- a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
+++ /dev/null
@@ -1,291 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import net.minidev.json.JSONArray;
-import net.minidev.json.JSONObject;
-import net.minidev.json.JSONValue;
-import org.apache.hadoop.gateway.config.ConfigurationException;
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.services.security.AliasServiceException;
-import org.apache.hadoop.gateway.topology.discovery.GatewayService;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpStatus;
-import org.apache.http.client.methods.CloseableHttpResponse;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.message.BasicHeader;
-import org.apache.http.util.EntityUtils;
-
-import java.io.IOException;
-import java.util.*;
-
-
-class AmbariServiceDiscovery implements ServiceDiscovery {
-
-    static final String TYPE = "AMBARI";
-
-    static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
-
-    static final String AMBARI_HOSTROLES_URI =
-                                       AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
-
-    static final String AMBARI_SERVICECONFIGS_URI =
-            AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
-
-    // Map of component names to service configuration types
-    private static Map<String, String> componentServiceConfigs = new HashMap<>();
-    static {
-        componentServiceConfigs.put("NAMENODE", "hdfs-site");
-        componentServiceConfigs.put("RESOURCEMANAGER", "yarn-site");
-        componentServiceConfigs.put("OOZIE_SERVER", "oozie-site");
-        componentServiceConfigs.put("HIVE_SERVER", "hive-site");
-        componentServiceConfigs.put("WEBHCAT_SERVER", "webhcat-site");
-        componentServiceConfigs.put("HBASE_MASTER", "hbase-site");
-    } // TODO: Are there other service components, for which the endpoints can be discovered via Ambari?
-
-    private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
-    private static final String DEFAULT_PWD_ALIAS  = "ambari.discovery.password";
-
-    private static AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
-
-    private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
-
-    @GatewayService
-    private AliasService aliasService;
-
-    private CloseableHttpClient httpClient = null;
-
-    private Map<String, Map<String, String>> serviceConfiguration = new HashMap<>();
-
-
-    AmbariServiceDiscovery() {
-        httpClient = org.apache.http.impl.client.HttpClients.createDefault();
-    }
-
-
-    @Override
-    public String getType() {
-        return TYPE;
-    }
-
-
-    @Override
-    public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
-        Map<String, Cluster> clusters = new HashMap<String, Cluster>();
-
-        String discoveryAddress = config.getAddress();
-
-        // Invoke Ambari REST API to discover the available clusters
-        String clustersDiscoveryURL = String.format("%s" + AMBARI_CLUSTERS_URI, discoveryAddress);
-
-        JSONObject json = invokeREST(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
-
-        // Parse the cluster names from the response, and perform the cluster discovery
-        JSONArray clusterItems = (JSONArray) json.get("items");
-        for (Object clusterItem : clusterItems) {
-            String clusterName = (String) ((JSONObject)((JSONObject) clusterItem).get("Clusters")).get("cluster_name");
-            try {
-                Cluster c = discover(config, clusterName);
-                clusters.put(clusterName, c);
-            } catch (Exception e) {
-                log.clusterDiscoveryError(clusterName, e);
-            }
-        }
-
-        return clusters;
-    }
-
-
-    @Override
-    public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
-        AmbariCluster cluster = new AmbariCluster(clusterName);
-
-        Map<String, String> serviceComponents = new HashMap<>();
-
-        String discoveryAddress = config.getAddress();
-        String discoveryUser = config.getUser();
-        String discoveryPwdAlias = config.getPasswordAlias();
-
-        Map<String, List<String>> componentHostNames = new HashMap<>();
-        String hostRolesURL = String.format("%s" + AMBARI_HOSTROLES_URI, discoveryAddress, clusterName);
-        JSONObject hostRolesJSON = invokeREST(hostRolesURL, discoveryUser, discoveryPwdAlias);
-        if (hostRolesJSON != null) {
-            // Process the host roles JSON
-            JSONArray items = (JSONArray) hostRolesJSON.get("items");
-            for (Object obj : items) {
-                JSONArray components = (JSONArray) ((JSONObject) obj).get("components");
-                for (Object component : components) {
-                    JSONArray hostComponents = (JSONArray) ((JSONObject) component).get("host_components");
-                    for (Object hostComponent : hostComponents) {
-                        JSONObject hostRoles = (JSONObject) ((JSONObject) hostComponent).get("HostRoles");
-                        String serviceName = (String) hostRoles.get("service_name");
-                        String componentName = (String) hostRoles.get("component_name");
-
-                        serviceComponents.put(componentName, serviceName);
-
-//                    String hostName = (String) hostRoles.get("host_name");
-                        String hostName = (String) hostRoles.get("public_host_name"); // Assuming public host name is most applicable
-                        log.discoveredServiceHost(serviceName, hostName);
-                        if (!componentHostNames.containsKey(componentName)) {
-                            componentHostNames.put(componentName, new ArrayList<String>());
-                        }
-                        componentHostNames.get(componentName).add(hostName);
-                    }
-                }
-            }
-        }
-
-        Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations =
-                                                 new HashMap<String, Map<String, AmbariCluster.ServiceConfiguration>>();
-        String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
-        JSONObject serviceConfigsJSON = invokeREST(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
-        if (serviceConfigsJSON != null) {
-            // Process the service configurations
-            JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
-            for (Object serviceConfig : serviceConfigs) {
-                String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
-                JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
-                for (Object configuration : configurations) {
-                    String configType = (String) ((JSONObject) configuration).get("type");
-                    String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
-
-                    Map<String, String> configProps = new HashMap<String, String>();
-                    JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
-                    for (String propertyName : configProperties.keySet()) {
-                        configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
-                    }
-                    if (!serviceConfigurations.containsKey(serviceName)) {
-                        serviceConfigurations.put(serviceName, new HashMap<String, AmbariCluster.ServiceConfiguration>());
-                    }
-                    serviceConfigurations.get(serviceName).put(configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
-                    cluster.addServiceConfiguration(serviceName, configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
-                }
-            }
-        }
-
-        // Construct the AmbariCluster model
-        for (String componentName : serviceComponents.keySet()) {
-            String serviceName = serviceComponents.get(componentName);
-            List<String> hostNames = componentHostNames.get(componentName);
-
-            Map<String, AmbariCluster.ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
-            String configType = componentServiceConfigs.get(componentName);
-            if (configType != null) {
-                AmbariCluster.ServiceConfiguration svcConfig = configs.get(configType);
-                AmbariComponent c = new AmbariComponent(componentName,
-                                                        svcConfig.getVersion(),
-                                                        clusterName,
-                                                        serviceName,
-                                                        hostNames,
-                                                        svcConfig.getProperties());
-                cluster.addComponent(c);
-            }
-        }
-
-        return cluster;
-    }
-
-
-    protected JSONObject invokeREST(String url, String username, String passwordAlias) {
-        JSONObject result = null;
-
-        CloseableHttpResponse response = null;
-        try {
-            HttpGet request = new HttpGet(url);
-
-            // If no configured username, then use default username alias
-            String password = null;
-            if (username == null) {
-                if (aliasService != null) {
-                    try {
-                        char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
-                        if (defaultUser != null) {
-                            username = new String(defaultUser);
-                        }
-                    } catch (AliasServiceException e) {
-                        log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
-                    }
-                }
-
-                // If username is still null
-                if (username == null) {
-                    log.aliasServiceUserNotFound();
-                    throw new ConfigurationException("No username is configured for Ambari service discovery.");
-                }
-            }
-
-            if (aliasService != null) {
-                // If not password alias is configured, then try the default alias
-                if (passwordAlias == null) {
-                    passwordAlias = DEFAULT_PWD_ALIAS;
-                }
-                try {
-                    char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
-                    if (pwd != null) {
-                        password = new String(pwd);
-                    }
-
-                } catch (AliasServiceException e) {
-                    log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
-                }
-            }
-
-            // If the password could not be determined
-            if (password == null) {
-                log.aliasServicePasswordNotFound();
-                throw new ConfigurationException("No password is configured for Ambari service discovery.");
-            }
-
-            // Add an auth header if credentials are available
-            String encodedCreds =
-                    org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
-            request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
-
-            response = httpClient.execute(request);
-
-            if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
-                HttpEntity entity = response.getEntity();
-                if (entity != null) {
-                    result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
-                    log.debugJSON(result.toJSONString());
-                } else {
-                    log.noJSON(url);
-                }
-            } else {
-                log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
-            }
-
-        } catch (IOException e) {
-            log.restInvocationError(url, e);
-        } finally {
-            if(response != null) {
-                try {
-                    response.close();
-                } catch (IOException e) {
-                    // Ignore
-                }
-            }
-        }
-        return result;
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
deleted file mode 100644
index caa16ed..0000000
--- a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import org.apache.hadoop.gateway.i18n.messages.Message;
-import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
-import org.apache.hadoop.gateway.i18n.messages.StackTrace;
-
-@Messages(logger="org.apache.gateway.topology.discovery.ambari")
-public interface AmbariServiceDiscoveryMessages {
-
-    @Message(level = MessageLevel.ERROR,
-            text = "Encountered an error during cluster {0} discovery: {1}")
-    void clusterDiscoveryError(final String clusterName,
-                               @StackTrace(level = MessageLevel.ERROR) Exception e);
-
-
-    @Message(level = MessageLevel.DEBUG,
-            text = "REST invocation {0} failed: {1}")
-    void restInvocationError(final String url,
-                             @StackTrace(level = MessageLevel.ERROR) Exception e);
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "Encountered an error attempting to determine the user for alias {0} : {1}")
-    void aliasServiceUserError(final String alias, final String error);
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "Encountered an error attempting to determine the password for alias {0} : {1}")
-    void aliasServicePasswordError(final String alias, final String error);
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "No user configured for Ambari service discovery.")
-    void aliasServiceUserNotFound();
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "No password configured for Ambari service discovery.")
-    void aliasServicePasswordNotFound();
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "Unexpected REST invocation response code for {0} : {1}")
-    void unexpectedRestResponseStatusCode(final String url, int responseStatusCode);
-
-
-    @Message(level = MessageLevel.ERROR,
-            text = "REST invocation {0} yielded a response without any JSON.")
-    void noJSON(final String url);
-
-
-    @Message(level = MessageLevel.DEBUG,
-            text = "REST invocation result: {0}")
-    void debugJSON(final String json);
-
-
-    @Message(level = MessageLevel.INFO,
-            text = "Discovered: Service: {0}, Host: {1}")
-    void discoveredServiceHost(final String serviceName, final String hostName);
-
-
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
deleted file mode 100644
index 723a786..0000000
--- a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryType.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType;
-
-public class AmbariServiceDiscoveryType implements ServiceDiscoveryType {
-
-    private static final String IMPL = AmbariServiceDiscovery.class.getCanonicalName();
-
-    @Override
-    public String getType() {
-        return AmbariServiceDiscovery.TYPE;
-    }
-
-    @Override
-    public ServiceDiscovery newInstance() {
-        return new AmbariServiceDiscovery();
-    }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java b/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
deleted file mode 100644
index 0674642..0000000
--- a/b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-
-import java.util.ArrayList;
-import java.util.List;
-
-class AmbariServiceURLCreator {
-
-    private static final String NAMENODE_SERVICE        = "NAMENODE";
-    private static final String JOBTRACKER_SERVICE      = "JOBTRACKER";
-    private static final String WEBHDFS_SERVICE         = "WEBHDFS";
-    private static final String WEBHCAT_SERVICE         = "WEBHCAT";
-    private static final String OOZIE_SERVICE           = "OOZIE";
-    private static final String WEBHBASE_SERVICE        = "WEBHBASE";
-    private static final String HIVE_SERVICE            = "HIVE";
-    private static final String RESOURCEMANAGER_SERVICE = "RESOURCEMANAGER";
-
-
-    /**
-     * Derive the endpoint URL(s) for the specified service, based on the info from the specified Cluster.
-     *
-     * @param cluster The cluster discovery results
-     * @param serviceName The name of a Hadoop service
-     *
-     * @return One or more endpoint URLs for the specified service.
-     */
-    public List<String> create(AmbariCluster cluster, String serviceName) {
-        List<String> result = null;
-
-        if (NAMENODE_SERVICE.equals(serviceName)) {
-            result = createNameNodeURL(cluster);
-        } else if (JOBTRACKER_SERVICE.equals(serviceName)) {
-            result = createJobTrackerURL(cluster);
-        } else if (WEBHDFS_SERVICE.equals(serviceName)) {
-            result = createWebHDFSURL(cluster);
-        } else if (WEBHCAT_SERVICE.equals(serviceName)) {
-            result = createWebHCatURL(cluster);
-        } else if (OOZIE_SERVICE.equals(serviceName)) {
-            result = createOozieURL(cluster);
-        } else if (WEBHBASE_SERVICE.equals(serviceName)) {
-            result = createWebHBaseURL(cluster);
-        } else if (HIVE_SERVICE.equals(serviceName)) {
-            result = createHiveURL(cluster);
-        } else if (RESOURCEMANAGER_SERVICE.equals(serviceName)) {
-            result = createResourceManagerURL(cluster);
-        }
-
-        return result;
-    }
-
-
-    private List<String> createNameNodeURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("NAMENODE");
-        if (comp != null) {
-            result.add("hdfs://" + comp.getConfigProperty("dfs.namenode.rpc-address"));
-        }
-
-        return result;
-    }
-
-
-    private List<String> createJobTrackerURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("RESOURCEMANAGER");
-        if (comp != null) {
-            result.add("rpc://" + comp.getConfigProperty("yarn.resourcemanager.address"));
-        }
-
-        return result;
-    }
-
-
-    private List<String> createWebHDFSURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariCluster.ServiceConfiguration sc = cluster.getServiceConfiguration("HDFS", "hdfs-site");
-        if (sc != null) {
-            String address = sc.getProperties().get("dfs.namenode.http-address");
-            result.add("http://" + address + "/webhdfs");
-        }
-
-        return result;
-    }
-
-
-    private List<String> createWebHCatURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent webhcat = cluster.getComponent("WEBHCAT_SERVER");
-        if (webhcat != null) {
-            String port = webhcat.getConfigProperty("templeton.port");
-            String host = webhcat.getHostNames().get(0);
-
-            result.add("http://" + host + ":" + port + "/templeton");
-        }
-        return result;
-    }
-
-
-    private List<String> createOozieURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("OOZIE_SERVER");
-        if (comp != null) {
-            result.add(comp.getConfigProperty("oozie.base.url"));
-        }
-
-        return result;
-    }
-
-
-    private List<String> createWebHBaseURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent comp = cluster.getComponent("HBASE_MASTER");
-        if (comp != null) {
-            for (String host : comp.getHostNames()) {
-                result.add("http://" + host + ":60080");
-            }
-        }
-
-        return result;
-    }
-
-
-    private List<String> createHiveURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent hive = cluster.getComponent("HIVE_SERVER");
-        if (hive != null) {
-            String path = hive.getConfigProperty("hive.server2.thrift.http.path");
-            String port = hive.getConfigProperty("hive.server2.thrift.http.port");
-            String transport = hive.getConfigProperty("hive.server2.transport.mode");
-            String useSSL = hive.getConfigProperty("hive.server2.use.SSL");
-            String host = hive.getHostNames().get(0);
-
-            String scheme = null; // What is the scheme for the binary transport mode?
-            if ("http".equals(transport)) {
-                scheme = Boolean.valueOf(useSSL) ? "https" : "http";
-            }
-
-            result.add(scheme + "://" + host + ":" + port + "/" + path);
-        }
-        return result;
-    }
-
-
-    private List<String> createResourceManagerURL(AmbariCluster cluster) {
-        List<String> result = new ArrayList<>();
-
-        AmbariComponent resMan = cluster.getComponent("RESOURCEMANAGER");
-        if (resMan != null) {
-            String webappAddress = resMan.getConfigProperty("yarn.resourcemanager.webapp.address");
-            String httpPolicy = resMan.getConfigProperty("yarn.http.policy");
-            String scheme = ("HTTPS_ONLY".equalsIgnoreCase(httpPolicy)) ? "https" : "http";
-
-            result.add(scheme + "://" + webappAddress + "/ws");
-        }
-
-        return result;
-    }
-
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/a841e265/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
----------------------------------------------------------------------
diff --git a/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType b/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
deleted file mode 100644
index 1da4fc9..0000000
--- a/b/gateway-discovery-ambari/src/main/resources/META-INF/services/org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryType
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.topology.discovery.ambari.AmbariServiceDiscoveryType
\ No newline at end of file