You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by mo...@apache.org on 2017/10/25 19:20:50 UTC

[14/17] knox git commit: Merge branch 'master' into KNOX-998-Package_Restructuring

http://git-wip-us.apache.org/repos/asf/knox/blob/58780d37/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
----------------------------------------------------------------------
diff --cc gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
index eba98a4,48616c0..da55422
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
@@@ -21,13 -21,14 +21,15 @@@ import java.io.File
  import java.security.Principal;
  import java.util.HashMap;
  
 -import org.apache.hadoop.gateway.config.GatewayConfig;
 -import org.apache.hadoop.gateway.services.security.AliasService;
 -import org.apache.hadoop.gateway.services.security.KeystoreService;
 -import org.apache.hadoop.gateway.services.security.MasterService;
 -import org.apache.hadoop.gateway.services.security.impl.DefaultKeystoreService;
 -import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
 -import org.apache.hadoop.gateway.services.security.token.TokenServiceException;
 -import org.apache.hadoop.gateway.services.security.token.impl.JWT;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.services.security.AliasService;
 +import org.apache.knox.gateway.services.security.KeystoreService;
 +import org.apache.knox.gateway.services.security.MasterService;
 +import org.apache.knox.gateway.services.security.impl.DefaultKeystoreService;
 +import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
 +import org.apache.knox.gateway.services.security.token.impl.JWT;
++import org.apache.knox.gateway.services.security.token.TokenServiceException;
++
  import org.easymock.EasyMock;
  import org.junit.Test;
  

http://git-wip-us.apache.org/repos/asf/knox/blob/58780d37/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactoryTest.java
----------------------------------------------------------------------
diff --cc gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactoryTest.java
index efee1d8,0000000..41a7c10
mode 100644,000000..100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactoryTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorFactoryTest.java
@@@ -1,218 -1,0 +1,422 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one or more
 + * contributor license agreements. See the NOTICE file distributed with this
 + * work for additional information regarding copyright ownership. The ASF
 + * licenses this file to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance with the License.
 + * You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 + * License for the specific language governing permissions and limitations under
 + * the License.
 + */
 +package org.apache.knox.gateway.topology.simple;
 +
 +import java.io.File;
 +import java.io.FileWriter;
 +import java.io.Writer;
 +import java.util.*;
 +
 +import org.junit.Test;
 +import static org.junit.Assert.*;
 +
 +
 +public class SimpleDescriptorFactoryTest {
 +
 +
 +    @Test
 +    public void testParseJSONSimpleDescriptor() throws Exception {
 +
 +        final String   discoveryType    = "AMBARI";
 +        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
 +        final String   discoveryUser    = "admin";
 +        final String   providerConfig   = "ambari-cluster-policy.xml";
 +        final String   clusterName      = "myCluster";
 +
 +        final Map<String, List<String>> services = new HashMap<>();
 +        services.put("NODEMANAGER", null);
 +        services.put("JOBTRACKER", null);
 +        services.put("RESOURCEMANAGER", null);
 +        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
 +        services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
 +
 +        String fileName = "test-topology.json";
 +        File testJSON = null;
 +        try {
 +            testJSON = writeJSON(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services);
 +            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testJSON.getAbsolutePath());
 +            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services);
 +        } catch (Exception e) {
 +            e.printStackTrace();
 +        } finally {
 +            if (testJSON != null) {
 +                try {
 +                    testJSON.delete();
 +                } catch (Exception e) {
 +                    // Ignore
 +                }
 +            }
 +        }
 +    }
 +
 +    @Test
++    public void testParseJSONSimpleDescriptorWithServiceParams() throws Exception {
++
++        final String   discoveryType    = "AMBARI";
++        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
++        final String   discoveryUser    = "admin";
++        final String   providerConfig   = "ambari-cluster-policy.xml";
++        final String   clusterName      = "myCluster";
++
++        final Map<String, List<String>> services = new HashMap<>();
++        services.put("NODEMANAGER", null);
++        services.put("JOBTRACKER", null);
++        services.put("RESOURCEMANAGER", null);
++        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
++        services.put("AMBARIUI", Collections.singletonList("http://c6401.ambari.apache.org:8080"));
++        services.put("KNOXSSO", null);
++        services.put("KNOXTOKEN", null);
++        services.put("CustomRole", Collections.singletonList("http://c6402.ambari.apache.org:1234"));
++
++        final Map<String, Map<String, String>> serviceParams = new HashMap<>();
++        Map<String, String> knoxSSOParams = new HashMap<>();
++        knoxSSOParams.put("knoxsso.cookie.secure.only", "true");
++        knoxSSOParams.put("knoxsso.token.ttl", "100000");
++        serviceParams.put("KNOXSSO", knoxSSOParams);
++
++        Map<String, String> knoxTokenParams = new HashMap<>();
++        knoxTokenParams.put("knox.token.ttl", "36000000");
++        knoxTokenParams.put("knox.token.audiences", "tokenbased");
++        knoxTokenParams.put("knox.token.target.url", "https://localhost:8443/gateway/tokenbased");
++        serviceParams.put("KNOXTOKEN", knoxTokenParams);
++
++        Map<String, String> customRoleParams = new HashMap<>();
++        customRoleParams.put("custom.param.1", "value1");
++        customRoleParams.put("custom.param.2", "value2");
++        serviceParams.put("CustomRole", customRoleParams);
++
++        String fileName = "test-topology.json";
++        File testJSON = null;
++        try {
++            testJSON = writeJSON(fileName,
++                                 discoveryType,
++                                 discoveryAddress,
++                                 discoveryUser,
++                                 providerConfig,
++                                 clusterName,
++                                 services,
++                                 serviceParams);
++            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testJSON.getAbsolutePath());
++            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services, serviceParams);
++        } catch (Exception e) {
++            e.printStackTrace();
++        } finally {
++            if (testJSON != null) {
++                try {
++                    testJSON.delete();
++                } catch (Exception e) {
++                    // Ignore
++                }
++            }
++        }
++    }
++
++    @Test
 +    public void testParseYAMLSimpleDescriptor() throws Exception {
 +
 +        final String   discoveryType    = "AMBARI";
 +        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
 +        final String   discoveryUser    = "joeblow";
 +        final String   providerConfig   = "ambari-cluster-policy.xml";
 +        final String   clusterName      = "myCluster";
 +
 +        final Map<String, List<String>> services = new HashMap<>();
 +        services.put("NODEMANAGER", null);
 +        services.put("JOBTRACKER", null);
 +        services.put("RESOURCEMANAGER", null);
 +        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
 +        services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
 +
 +        String fileName = "test-topology.yml";
 +        File testYAML = null;
 +        try {
 +            testYAML = writeYAML(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services);
 +            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testYAML.getAbsolutePath());
 +            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services);
 +        } catch (Exception e) {
 +            e.printStackTrace();
 +        } finally {
 +            if (testYAML != null) {
 +                try {
 +                    testYAML.delete();
 +                } catch (Exception e) {
 +                    // Ignore
 +                }
 +            }
 +        }
 +    }
 +
 +
-     private void validateSimpleDescriptor(SimpleDescriptor    sd,
-                                           String              discoveryType,
-                                           String              discoveryAddress,
-                                           String              providerConfig,
-                                           String              clusterName,
++    @Test
++    public void testParseYAMLSimpleDescriptorWithServiceParams() throws Exception {
++
++        final String   discoveryType    = "AMBARI";
++        final String   discoveryAddress = "http://c6401.ambari.apache.org:8080";
++        final String   discoveryUser    = "joeblow";
++        final String   providerConfig   = "ambari-cluster-policy.xml";
++        final String   clusterName      = "myCluster";
++
++        final Map<String, List<String>> services = new HashMap<>();
++        services.put("NODEMANAGER", null);
++        services.put("JOBTRACKER", null);
++        services.put("RESOURCEMANAGER", null);
++        services.put("HIVE", Arrays.asList("http://c6401.ambari.apache.org", "http://c6402.ambari.apache.org", "http://c6403.ambari.apache.org"));
++        services.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
++        services.put("KNOXSSO", null);
++        services.put("KNOXTOKEN", null);
++        services.put("CustomRole", Collections.singletonList("http://c6402.ambari.apache.org:1234"));
++
++        final Map<String, Map<String, String>> serviceParams = new HashMap<>();
++        Map<String, String> knoxSSOParams = new HashMap<>();
++        knoxSSOParams.put("knoxsso.cookie.secure.only", "true");
++        knoxSSOParams.put("knoxsso.token.ttl", "100000");
++        serviceParams.put("KNOXSSO", knoxSSOParams);
++
++        Map<String, String> knoxTokenParams = new HashMap<>();
++        knoxTokenParams.put("knox.token.ttl", "36000000");
++        knoxTokenParams.put("knox.token.audiences", "tokenbased");
++        knoxTokenParams.put("knox.token.target.url", "https://localhost:8443/gateway/tokenbased");
++        serviceParams.put("KNOXTOKEN", knoxTokenParams);
++
++        Map<String, String> customRoleParams = new HashMap<>();
++        customRoleParams.put("custom.param.1", "value1");
++        customRoleParams.put("custom.param.2", "value2");
++        serviceParams.put("CustomRole", customRoleParams);
++
++        String fileName = "test-topology.yml";
++        File testYAML = null;
++        try {
++            testYAML = writeYAML(fileName, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services, serviceParams);
++            SimpleDescriptor sd = SimpleDescriptorFactory.parse(testYAML.getAbsolutePath());
++            validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, services, serviceParams);
++        } catch (Exception e) {
++            e.printStackTrace();
++        } finally {
++            if (testYAML != null) {
++                try {
++                    testYAML.delete();
++                } catch (Exception e) {
++                    // Ignore
++                }
++            }
++        }
++    }
++
++
++    private void validateSimpleDescriptor(SimpleDescriptor          sd,
++                                          String                    discoveryType,
++                                          String                    discoveryAddress,
++                                          String                    providerConfig,
++                                          String                    clusterName,
 +                                          Map<String, List<String>> expectedServices) {
++        validateSimpleDescriptor(sd, discoveryType, discoveryAddress, providerConfig, clusterName, expectedServices, null);
++    }
++
++
++    private void validateSimpleDescriptor(SimpleDescriptor                 sd,
++                                          String                           discoveryType,
++                                          String                           discoveryAddress,
++                                          String                           providerConfig,
++                                          String                           clusterName,
++                                          Map<String, List<String>>        expectedServices,
++                                          Map<String, Map<String, String>> expectedServiceParameters) {
 +        assertNotNull(sd);
 +        assertEquals(discoveryType, sd.getDiscoveryType());
 +        assertEquals(discoveryAddress, sd.getDiscoveryAddress());
 +        assertEquals(providerConfig, sd.getProviderConfig());
 +        assertEquals(clusterName, sd.getClusterName());
 +
 +        List<SimpleDescriptor.Service> actualServices = sd.getServices();
 +
 +        assertEquals(expectedServices.size(), actualServices.size());
 +
 +        for (SimpleDescriptor.Service actualService : actualServices) {
 +            assertTrue(expectedServices.containsKey(actualService.getName()));
 +            assertEquals(expectedServices.get(actualService.getName()), actualService.getURLs());
++
++            // Validate service parameters
++            if (expectedServiceParameters != null) {
++                if (expectedServiceParameters.containsKey(actualService.getName())) {
++                    Map<String, String> expectedParams = expectedServiceParameters.get(actualService.getName());
++
++                    Map<String, String> actualServiceParams = actualService.getParams();
++                    assertNotNull(actualServiceParams);
++
++                    // Validate the size of the service parameter set
++                    assertEquals(expectedParams.size(), actualServiceParams.size());
++
++                    // Validate the parameter contents
++                    for (String paramName : actualServiceParams.keySet()) {
++                        assertTrue(expectedParams.containsKey(paramName));
++                        assertEquals(expectedParams.get(paramName), actualServiceParams.get(paramName));
++                    }
++                }
++            }
 +        }
 +    }
 +
 +
 +    private File writeJSON(String path, String content) throws Exception {
 +        File f = new File(path);
 +
 +        Writer fw = new FileWriter(f);
 +        fw.write(content);
 +        fw.flush();
 +        fw.close();
 +
 +        return f;
 +    }
 +
 +
 +    private File writeJSON(String path,
 +                           String discoveryType,
 +                           String discoveryAddress,
 +                           String discoveryUser,
 +                           String providerConfig,
 +                           String clusterName,
 +                           Map<String, List<String>> services) throws Exception {
++        return writeJSON(path, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services, null);
++    }
++
++    private File writeJSON(String path,
++                           String discoveryType,
++                           String discoveryAddress,
++                           String discoveryUser,
++                           String providerConfig,
++                           String clusterName,
++                           Map<String, List<String>> services,
++                           Map<String, Map<String, String>> serviceParams) throws Exception {
 +        File f = new File(path);
 +
 +        Writer fw = new FileWriter(f);
 +        fw.write("{" + "\n");
 +        fw.write("\"discovery-type\":\"" + discoveryType + "\",\n");
 +        fw.write("\"discovery-address\":\"" + discoveryAddress + "\",\n");
 +        fw.write("\"discovery-user\":\"" + discoveryUser + "\",\n");
 +        fw.write("\"provider-config-ref\":\"" + providerConfig + "\",\n");
 +        fw.write("\"cluster\":\"" + clusterName + "\",\n");
 +        fw.write("\"services\":[\n");
 +
 +        int i = 0;
 +        for (String name : services.keySet()) {
 +            fw.write("{\"name\":\"" + name + "\"");
++
++            // Service params
++            if (serviceParams != null && !serviceParams.isEmpty()) {
++                Map<String, String> params = serviceParams.get(name);
++                if (params != null && !params.isEmpty()) {
++                    fw.write(",\n\"params\":{\n");
++                    Iterator<String> paramNames = params.keySet().iterator();
++                    while (paramNames.hasNext()) {
++                        String paramName = paramNames.next();
++                        String paramValue = params.get(paramName);
++                        fw.write("\"" + paramName + "\":\"" + paramValue + "\"");
++                        fw.write(paramNames.hasNext() ? ",\n" : "");
++                    }
++                    fw.write("\n}");
++                }
++            }
++
++            // Service URLs
 +            List<String> urls = services.get(name);
 +            if (urls != null) {
-                 fw.write(", \"urls\":[");
++                fw.write(",\n\"urls\":[");
 +                Iterator<String> urlIter = urls.iterator();
 +                while (urlIter.hasNext()) {
 +                    fw.write("\"" + urlIter.next() + "\"");
 +                    if (urlIter.hasNext()) {
 +                        fw.write(", ");
 +                    }
 +                }
-                 fw.write("]");
++                fw.write("]\n");
 +            }
++
 +            fw.write("}");
 +            if (i++ < services.size() - 1) {
 +                fw.write(",");
 +            }
 +            fw.write("\n");
 +        }
 +        fw.write("]\n");
 +        fw.write("}\n");
 +        fw.flush();
 +        fw.close();
 +
 +        return f;
 +    }
 +
-     private File writeYAML(String path,
-                            String discoveryType,
-                            String discoveryAddress,
-                            String discoveryUser,
-                            String providerConfig,
-                            String clusterName,
++
++    private File writeYAML(String                    path,
++                           String                    discoveryType,
++                           String                    discoveryAddress,
++                           String                    discoveryUser,
++                           String                    providerConfig,
++                           String                    clusterName,
 +                           Map<String, List<String>> services) throws Exception {
++        return writeYAML(path, discoveryType, discoveryAddress, discoveryUser, providerConfig, clusterName, services, null);
++    }
++
++
++    private File writeYAML(String                           path,
++                           String                           discoveryType,
++                           String                           discoveryAddress,
++                           String                           discoveryUser,
++                           String                           providerConfig,
++                           String                           clusterName,
++                           Map<String, List<String>>        services,
++                           Map<String, Map<String, String>> serviceParams) throws Exception {
 +        File f = new File(path);
 +
 +        Writer fw = new FileWriter(f);
 +        fw.write("---" + "\n");
 +        fw.write("discovery-type: " + discoveryType + "\n");
 +        fw.write("discovery-address: " + discoveryAddress + "\n");
 +        fw.write("discovery-user: " + discoveryUser + "\n");
 +        fw.write("provider-config-ref: " + providerConfig + "\n");
 +        fw.write("cluster: " + clusterName+ "\n");
 +        fw.write("services:\n");
 +        for (String name : services.keySet()) {
 +            fw.write("    - name: " + name + "\n");
++
++            // Service params
++            if (serviceParams != null && !serviceParams.isEmpty()) {
++                if (serviceParams.containsKey(name)) {
++                    Map<String, String> params = serviceParams.get(name);
++                    fw.write("      params:\n");
++                    for (String paramName : params.keySet()) {
++                        fw.write("            " + paramName + ": " + params.get(paramName) + "\n");
++                    }
++                }
++            }
++
++            // Service URLs
 +            List<String> urls = services.get(name);
 +            if (urls != null) {
 +                fw.write("      urls:\n");
 +                for (String url : urls) {
 +                    fw.write("          - " + url + "\n");
 +                }
 +            }
 +        }
 +        fw.flush();
 +        fw.close();
 +
 +        return f;
 +    }
 +
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/58780d37/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
----------------------------------------------------------------------
diff --cc gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
index b5558fd,0000000..a0c977a
mode 100644,000000..100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
@@@ -1,392 -1,0 +1,447 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.topology.simple;
 +
 +import org.apache.knox.gateway.topology.validation.TopologyValidator;
 +import org.apache.knox.gateway.util.XmlUtils;
 +import java.io.ByteArrayInputStream;
 +import java.io.File;
++import java.io.FileNotFoundException;
 +import java.io.FileOutputStream;
 +import java.io.IOException;
 +
 +import java.util.ArrayList;
 +import java.util.Collections;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Properties;
 +
 +import javax.xml.xpath.XPath;
 +import javax.xml.xpath.XPathConstants;
 +import javax.xml.xpath.XPathFactory;
 +
 +import org.apache.commons.io.FileUtils;
 +import org.easymock.EasyMock;
 +import org.junit.Test;
 +import org.w3c.dom.Document;
 +import org.w3c.dom.Node;
 +import org.w3c.dom.NodeList;
 +import org.xml.sax.SAXException;
 +
 +import static org.junit.Assert.assertEquals;
 +import static org.junit.Assert.assertFalse;
 +import static org.junit.Assert.assertNotNull;
 +import static org.junit.Assert.assertTrue;
 +import static org.junit.Assert.fail;
 +
 +
 +public class SimpleDescriptorHandlerTest {
 +
 +    private static final String TEST_PROVIDER_CONFIG =
 +            "    <gateway>\n" +
 +                    "        <provider>\n" +
 +                    "            <role>authentication</role>\n" +
 +                    "            <name>ShiroProvider</name>\n" +
 +                    "            <enabled>true</enabled>\n" +
 +                    "            <param>\n" +
 +                    "                <!-- \n" +
 +                    "                session timeout in minutes,  this is really idle timeout,\n" +
 +                    "                defaults to 30mins, if the property value is not defined,, \n" +
 +                    "                current client authentication would expire if client idles contiuosly for more than this value\n" +
 +                    "                -->\n" +
 +                    "                <name>sessionTimeout</name>\n" +
 +                    "                <value>30</value>\n" +
 +                    "            </param>\n" +
 +                    "            <param>\n" +
 +                    "                <name>main.ldapRealm</name>\n" +
 +                    "                <value>org.apache.knox.gateway.shirorealm.KnoxLdapRealm</value>\n" +
 +                    "            </param>\n" +
 +                    "            <param>\n" +
 +                    "                <name>main.ldapContextFactory</name>\n" +
 +                    "                <value>org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
 +                    "            </param>\n" +
 +                    "            <param>\n" +
 +                    "                <name>main.ldapRealm.contextFactory</name>\n" +
 +                    "                <value>$ldapContextFactory</value>\n" +
 +                    "            </param>\n" +
 +                    "            <param>\n" +
 +                    "                <name>main.ldapRealm.userDnTemplate</name>\n" +
 +                    "                <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
 +                    "            </param>\n" +
 +                    "            <param>\n" +
 +                    "                <name>main.ldapRealm.contextFactory.url</name>\n" +
 +                    "                <value>ldap://localhost:33389</value>\n" +
 +                    "            </param>\n" +
 +                    "            <param>\n" +
 +                    "                <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
 +                    "                <value>simple</value>\n" +
 +                    "            </param>\n" +
 +                    "            <param>\n" +
 +                    "                <name>urls./**</name>\n" +
 +                    "                <value>authcBasic</value>\n" +
 +                    "            </param>\n" +
 +                    "        </provider>\n" +
 +                    "\n" +
 +                    "        <provider>\n" +
 +                    "            <role>identity-assertion</role>\n" +
 +                    "            <name>Default</name>\n" +
 +                    "            <enabled>true</enabled>\n" +
 +                    "        </provider>\n" +
 +                    "\n" +
 +                    "        <!--\n" +
 +                    "        Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.\n" +
 +                    "        For example, a hadoop service running in AWS may return a response that includes URLs containing the\n" +
 +                    "        some AWS internal host name.  If the client needs to make a subsequent request to the host identified\n" +
 +                    "        in those URLs they need to be mapped to external host names that the client Knox can use to connect.\n" +
 +                    "\n" +
 +                    "        If the external hostname and internal host names are same turn of this provider by setting the value of\n" +
 +                    "        enabled parameter as false.\n" +
 +                    "\n" +
 +                    "        The name parameter specifies the external host names in a comma separated list.\n" +
 +                    "        The value parameter specifies corresponding internal host names in a comma separated list.\n" +
 +                    "\n" +
 +                    "        Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out\n" +
 +                    "        of box sandbox.xml.  This is because Sandbox uses port mapping to allow clients to connect to the\n" +
 +                    "        Hadoop services using localhost.  In real clusters, external host names would almost never be localhost.\n" +
 +                    "        -->\n" +
 +                    "        <provider>\n" +
 +                    "            <role>hostmap</role>\n" +
 +                    "            <name>static</name>\n" +
 +                    "            <enabled>true</enabled>\n" +
 +                    "            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
 +                    "        </provider>\n" +
 +                    "    </gateway>\n";
 +
 +
 +    /**
 +     * KNOX-1006
 +     *
-      * N.B. This test depends on the DummyServiceDiscovery extension being configured:
-      *             org.apache.knox.gateway.topology.discovery.test.extension.DummyServiceDiscovery
++     * N.B. This test depends on the PropertiesFileServiceDiscovery extension being configured:
++     *             org.apache.knox.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscovery
 +     */
 +    @Test
 +    public void testSimpleDescriptorHandler() throws Exception {
 +
-         final String type = "DUMMY";
-         final String address = "http://c6401.ambari.apache.org:8080";
++        final String type = "PROPERTIES_FILE";
 +        final String clusterName = "dummy";
++
++        // Create a properties file to be the source of service discovery details for this test
++        final File discoveryConfig = File.createTempFile(getClass().getName() + "_discovery-config", ".properties");
++
++        final String address = discoveryConfig.getAbsolutePath();
++
++        final Properties DISCOVERY_PROPERTIES = new Properties();
++        DISCOVERY_PROPERTIES.setProperty(clusterName + ".name", clusterName);
++        DISCOVERY_PROPERTIES.setProperty(clusterName + ".NAMENODE", "hdfs://namenodehost:8020");
++        DISCOVERY_PROPERTIES.setProperty(clusterName + ".JOBTRACKER", "rpc://jobtrackerhostname:8050");
++        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHDFS", "http://webhdfshost:1234");
++        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHCAT", "http://webhcathost:50111/templeton");
++        DISCOVERY_PROPERTIES.setProperty(clusterName + ".OOZIE", "http://ooziehost:11000/oozie");
++        DISCOVERY_PROPERTIES.setProperty(clusterName + ".WEBHBASE", "http://webhbasehost:1234");
++        DISCOVERY_PROPERTIES.setProperty(clusterName + ".HIVE", "http://hivehostname:10001/clipath");
++        DISCOVERY_PROPERTIES.setProperty(clusterName + ".RESOURCEMANAGER", "http://remanhost:8088/ws");
++
++        try {
++            DISCOVERY_PROPERTIES.store(new FileOutputStream(discoveryConfig), null);
++        } catch (FileNotFoundException e) {
++            fail(e.getMessage());
++        }
++
 +        final Map<String, List<String>> serviceURLs = new HashMap<>();
 +        serviceURLs.put("NAMENODE", null);
 +        serviceURLs.put("JOBTRACKER", null);
 +        serviceURLs.put("WEBHDFS", null);
 +        serviceURLs.put("WEBHCAT", null);
 +        serviceURLs.put("OOZIE", null);
 +        serviceURLs.put("WEBHBASE", null);
 +        serviceURLs.put("HIVE", null);
 +        serviceURLs.put("RESOURCEMANAGER", null);
 +        serviceURLs.put("AMBARIUI", Collections.singletonList("http://c6401.ambari.apache.org:8080"));
++        serviceURLs.put("KNOXSSO", null);
 +
 +        // Write the externalized provider config to a temp file
-         File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", TEST_PROVIDER_CONFIG);
++        File providerConfig = new File(System.getProperty("java.io.tmpdir"), "ambari-cluster-policy.xml");
++        FileUtils.write(providerConfig, TEST_PROVIDER_CONFIG);
 +
 +        File topologyFile = null;
 +        try {
-             File destDir = (new File(".")).getCanonicalFile();
++            File destDir = new File(System.getProperty("java.io.tmpdir")).getCanonicalFile();
++
++            Map<String, Map<String, String>> serviceParameters = new HashMap<>();
++            Map<String, String> knoxssoParams = new HashMap<>();
++            knoxssoParams.put("knoxsso.cookie.secure.only", "true");
++            knoxssoParams.put("knoxsso.token.ttl", "100000");
++            serviceParameters.put("KNOXSSO", knoxssoParams);
 +
 +            // Mock out the simple descriptor
 +            SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
 +            EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
 +            EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(address).anyTimes();
 +            EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(type).anyTimes();
 +            EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
 +            EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
 +            EasyMock.expect(testDescriptor.getClusterName()).andReturn(clusterName).anyTimes();
 +            List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
 +            for (String serviceName : serviceURLs.keySet()) {
 +                SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
 +                EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
 +                EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
++                EasyMock.expect(svc.getParams()).andReturn(serviceParameters.get(serviceName)).anyTimes();
 +                EasyMock.replay(svc);
 +                serviceMocks.add(svc);
 +            }
 +            EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
 +            EasyMock.replay(testDescriptor);
 +
 +            // Invoke the simple descriptor handler
 +            Map<String, File> files =
 +                           SimpleDescriptorHandler.handle(testDescriptor,
 +                                                          providerConfig.getParentFile(), // simple desc co-located with provider config
 +                                                          destDir);
 +            topologyFile = files.get("topology");
 +
 +            // Validate the resulting topology descriptor
 +            assertTrue(topologyFile.exists());
 +
 +            // Validate the topology descriptor's correctness
 +            TopologyValidator validator = new TopologyValidator( topologyFile.getAbsolutePath() );
 +            if( !validator.validateTopology() ){
 +                throw new SAXException( validator.getErrorString() );
 +            }
 +
 +            XPathFactory xPathfactory = XPathFactory.newInstance();
 +            XPath xpath = xPathfactory.newXPath();
 +
 +            // Parse the topology descriptor
 +            Document topologyXml = XmlUtils.readXml(topologyFile);
 +
 +            // Validate the provider configuration
 +            Document extProviderConf = XmlUtils.readXml(new ByteArrayInputStream(TEST_PROVIDER_CONFIG.getBytes()));
 +            Node gatewayNode = (Node) xpath.compile("/topology/gateway").evaluate(topologyXml, XPathConstants.NODE);
 +            assertTrue("Resulting provider config should be identical to the referenced content.",
 +                       extProviderConf.getDocumentElement().isEqualNode(gatewayNode));
 +
 +            // Validate the service declarations
 +            Map<String, List<String>> topologyServiceURLs = new HashMap<>();
 +            NodeList serviceNodes =
 +                        (NodeList) xpath.compile("/topology/service").evaluate(topologyXml, XPathConstants.NODESET);
 +            for (int serviceNodeIndex=0; serviceNodeIndex < serviceNodes.getLength(); serviceNodeIndex++) {
 +                Node serviceNode = serviceNodes.item(serviceNodeIndex);
++
++                // Validate the role
 +                Node roleNode = (Node) xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE);
 +                assertNotNull(roleNode);
 +                String role = roleNode.getNodeValue();
++
++                // Validate the URLs
 +                NodeList urlNodes = (NodeList) xpath.compile("url/text()").evaluate(serviceNode, XPathConstants.NODESET);
 +                for(int urlNodeIndex = 0 ; urlNodeIndex < urlNodes.getLength(); urlNodeIndex++) {
 +                    Node urlNode = urlNodes.item(urlNodeIndex);
 +                    assertNotNull(urlNode);
 +                    String url = urlNode.getNodeValue();
-                     assertNotNull("Every declared service should have a URL.", url);
-                     if (!topologyServiceURLs.containsKey(role)) {
-                         topologyServiceURLs.put(role, new ArrayList<String>());
++
++                    // If the service should have a URL (some don't require it)
++                    if (serviceURLs.containsKey(role)) {
++                        assertNotNull("Declared service should have a URL.", url);
++                        if (!topologyServiceURLs.containsKey(role)) {
++                            topologyServiceURLs.put(role, new ArrayList<>());
++                        }
++                        topologyServiceURLs.get(role).add(url); // Add it for validation later
 +                    }
-                     topologyServiceURLs.get(role).add(url);
 +                }
++
++                // If params were declared in the descriptor, then validate them in the resulting topology file
++                Map<String, String> params = serviceParameters.get(role);
++                if (params != null) {
++                    NodeList paramNodes = (NodeList) xpath.compile("param").evaluate(serviceNode, XPathConstants.NODESET);
++                    for (int paramNodeIndex = 0; paramNodeIndex < paramNodes.getLength(); paramNodeIndex++) {
++                        Node paramNode = paramNodes.item(paramNodeIndex);
++                        String paramName = (String) xpath.compile("name/text()").evaluate(paramNode, XPathConstants.STRING);
++                        String paramValue = (String) xpath.compile("value/text()").evaluate(paramNode, XPathConstants.STRING);
++                        assertTrue(params.keySet().contains(paramName));
++                        assertEquals(params.get(paramName), paramValue);
++                    }
++                }
++
 +            }
-             assertEquals("Unexpected number of service declarations.", serviceURLs.size(), topologyServiceURLs.size());
++            assertEquals("Unexpected number of service declarations.", (serviceURLs.size() - 1), topologyServiceURLs.size());
 +
 +        } catch (Exception e) {
 +            e.printStackTrace();
 +            fail(e.getMessage());
 +        } finally {
 +            providerConfig.delete();
++            discoveryConfig.delete();
 +            if (topologyFile != null) {
 +                topologyFile.delete();
 +            }
 +        }
 +    }
 +
 +
 +    /**
 +     * KNOX-1006
 +     *
 +     * Verify the behavior of the SimpleDescriptorHandler when service discovery fails to produce a valid URL for
 +     * a service.
 +     *
 +     * N.B. This test depends on the PropertiesFileServiceDiscovery extension being configured:
 +     *             org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscovery
 +     */
 +    @Test
 +    public void testInvalidServiceURLFromDiscovery() throws Exception {
 +        final String CLUSTER_NAME = "myproperties";
 +
 +        // Configure the PropertiesFile Service Discovery implementation for this test
 +        final String DEFAULT_VALID_SERVICE_URL = "http://localhost:9999/thiswillwork";
 +        Properties serviceDiscoverySourceProps = new Properties();
 +        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".NAMENODE",
 +                                                DEFAULT_VALID_SERVICE_URL.replace("http", "hdfs"));
 +        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".JOBTRACKER",
 +                                                DEFAULT_VALID_SERVICE_URL.replace("http", "rpc"));
 +        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHDFS",         DEFAULT_VALID_SERVICE_URL);
 +        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHCAT",         DEFAULT_VALID_SERVICE_URL);
 +        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".OOZIE",           DEFAULT_VALID_SERVICE_URL);
 +        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHBASE",        DEFAULT_VALID_SERVICE_URL);
 +        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".HIVE",            "{SCHEME}://localhost:10000/");
 +        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".RESOURCEMANAGER", DEFAULT_VALID_SERVICE_URL);
 +        serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".AMBARIUI",        DEFAULT_VALID_SERVICE_URL);
 +        File serviceDiscoverySource = File.createTempFile("service-discovery", ".properties");
 +        serviceDiscoverySourceProps.store(new FileOutputStream(serviceDiscoverySource),
 +                                          "Test Service Discovery Source");
 +
 +        // Prepare a mock SimpleDescriptor
 +        final String type = "PROPERTIES_FILE";
 +        final String address = serviceDiscoverySource.getAbsolutePath();
 +        final Map<String, List<String>> serviceURLs = new HashMap<>();
 +        serviceURLs.put("NAMENODE", null);
 +        serviceURLs.put("JOBTRACKER", null);
 +        serviceURLs.put("WEBHDFS", null);
 +        serviceURLs.put("WEBHCAT", null);
 +        serviceURLs.put("OOZIE", null);
 +        serviceURLs.put("WEBHBASE", null);
 +        serviceURLs.put("HIVE", null);
 +        serviceURLs.put("RESOURCEMANAGER", null);
 +        serviceURLs.put("AMBARIUI", Collections.singletonList("http://c6401.ambari.apache.org:8080"));
 +
 +        // Write the externalized provider config to a temp file
 +        File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", TEST_PROVIDER_CONFIG);
 +
 +        File topologyFile = null;
 +        try {
 +            File destDir = (new File(".")).getCanonicalFile();
 +
 +            // Mock out the simple descriptor
 +            SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
 +            EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
 +            EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(address).anyTimes();
 +            EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(type).anyTimes();
 +            EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
 +            EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
 +            EasyMock.expect(testDescriptor.getClusterName()).andReturn(CLUSTER_NAME).anyTimes();
 +            List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
 +            for (String serviceName : serviceURLs.keySet()) {
 +                SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
 +                EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
 +                EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
 +                EasyMock.replay(svc);
 +                serviceMocks.add(svc);
 +            }
 +            EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
 +            EasyMock.replay(testDescriptor);
 +
 +            // Invoke the simple descriptor handler
 +            Map<String, File> files =
 +                    SimpleDescriptorHandler.handle(testDescriptor,
 +                                                   providerConfig.getParentFile(), // simple desc co-located with provider config
 +                                                   destDir);
 +
 +            topologyFile = files.get("topology");
 +
 +            // Validate the resulting topology descriptor
 +            assertTrue(topologyFile.exists());
 +
 +            // Validate the topology descriptor's correctness
 +            TopologyValidator validator = new TopologyValidator( topologyFile.getAbsolutePath() );
 +            if( !validator.validateTopology() ){
 +                throw new SAXException( validator.getErrorString() );
 +            }
 +
 +            XPathFactory xPathfactory = XPathFactory.newInstance();
 +            XPath xpath = xPathfactory.newXPath();
 +
 +            // Parse the topology descriptor
 +            Document topologyXml = XmlUtils.readXml(topologyFile);
 +
 +            // Validate the provider configuration
 +            Document extProviderConf = XmlUtils.readXml(new ByteArrayInputStream(TEST_PROVIDER_CONFIG.getBytes()));
 +            Node gatewayNode = (Node) xpath.compile("/topology/gateway").evaluate(topologyXml, XPathConstants.NODE);
 +            assertTrue("Resulting provider config should be identical to the referenced content.",
 +                    extProviderConf.getDocumentElement().isEqualNode(gatewayNode));
 +
 +            // Validate the service declarations
 +            List<String> topologyServices = new ArrayList<>();
 +            Map<String, List<String>> topologyServiceURLs = new HashMap<>();
 +            NodeList serviceNodes =
 +                    (NodeList) xpath.compile("/topology/service").evaluate(topologyXml, XPathConstants.NODESET);
 +            for (int serviceNodeIndex=0; serviceNodeIndex < serviceNodes.getLength(); serviceNodeIndex++) {
 +                Node serviceNode = serviceNodes.item(serviceNodeIndex);
 +                Node roleNode = (Node) xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE);
 +                assertNotNull(roleNode);
 +                String role = roleNode.getNodeValue();
 +                topologyServices.add(role);
 +                NodeList urlNodes = (NodeList) xpath.compile("url/text()").evaluate(serviceNode, XPathConstants.NODESET);
 +                for(int urlNodeIndex = 0 ; urlNodeIndex < urlNodes.getLength(); urlNodeIndex++) {
 +                    Node urlNode = urlNodes.item(urlNodeIndex);
 +                    assertNotNull(urlNode);
 +                    String url = urlNode.getNodeValue();
 +                    assertNotNull("Every declared service should have a URL.", url);
 +                    if (!topologyServiceURLs.containsKey(role)) {
-                         topologyServiceURLs.put(role, new ArrayList<String>());
++                        topologyServiceURLs.put(role, new ArrayList<>());
 +                    }
 +                    topologyServiceURLs.get(role).add(url);
 +                }
 +            }
 +
 +            // There should not be a service element for HIVE, since it had no valid URLs
 +            assertEquals("Unexpected number of service declarations.", serviceURLs.size() - 1, topologyServices.size());
 +            assertFalse("The HIVE service should have been omitted from the generated topology.", topologyServices.contains("HIVE"));
 +
 +            assertEquals("Unexpected number of service URLs.", serviceURLs.size() - 1, topologyServiceURLs.size());
 +
 +        } catch (Exception e) {
 +            e.printStackTrace();
 +            fail(e.getMessage());
 +        } finally {
 +            serviceDiscoverySource.delete();
 +            providerConfig.delete();
 +            if (topologyFile != null) {
 +                topologyFile.delete();
 +            }
 +        }
 +    }
 +
 +
 +    private File writeProviderConfig(String path, String content) throws IOException {
 +        File f = new File(path);
 +        FileUtils.write(f, content);
 +        return f;
 +    }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/58780d37/gateway-service-knoxsso/src/main/java/org/apache/knox/gateway/service/knoxsso/WebSSOResource.java
----------------------------------------------------------------------
diff --cc gateway-service-knoxsso/src/main/java/org/apache/knox/gateway/service/knoxsso/WebSSOResource.java
index a97cee2,0000000..a103dac
mode 100644,000000..100644
--- a/gateway-service-knoxsso/src/main/java/org/apache/knox/gateway/service/knoxsso/WebSSOResource.java
+++ b/gateway-service-knoxsso/src/main/java/org/apache/knox/gateway/service/knoxsso/WebSSOResource.java
@@@ -1,322 -1,0 +1,334 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.service.knoxsso;
 +
 +import java.io.IOException;
 +import java.net.URI;
 +import java.net.URISyntaxException;
 +import java.security.Principal;
 +import java.util.ArrayList;
 +import java.util.Arrays;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Map.Entry;
 +
 +import javax.annotation.PostConstruct;
 +import javax.servlet.ServletContext;
 +import javax.servlet.http.Cookie;
 +import javax.servlet.http.HttpServletRequest;
 +import javax.servlet.http.HttpServletResponse;
 +import javax.servlet.http.HttpSession;
 +import javax.ws.rs.GET;
 +import javax.ws.rs.POST;
 +import javax.ws.rs.Path;
 +import javax.ws.rs.Produces;
 +import javax.ws.rs.core.Context;
 +import javax.ws.rs.core.Response;
 +import javax.ws.rs.WebApplicationException;
 +
 +import org.apache.knox.gateway.i18n.messages.MessagesFactory;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
 +import org.apache.knox.gateway.services.security.token.TokenServiceException;
 +import org.apache.knox.gateway.services.security.token.impl.JWT;
 +import org.apache.knox.gateway.util.RegExUtils;
 +import org.apache.knox.gateway.util.Urls;
 +
 +import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
 +import static javax.ws.rs.core.MediaType.APPLICATION_XML;
 +
 +@Path( WebSSOResource.RESOURCE_PATH )
 +public class WebSSOResource {
 +  private static final String SSO_COOKIE_NAME = "knoxsso.cookie.name";
 +  private static final String SSO_COOKIE_SECURE_ONLY_INIT_PARAM = "knoxsso.cookie.secure.only";
 +  private static final String SSO_COOKIE_MAX_AGE_INIT_PARAM = "knoxsso.cookie.max.age";
 +  private static final String SSO_COOKIE_DOMAIN_SUFFIX_PARAM = "knoxsso.cookie.domain.suffix";
 +  private static final String SSO_COOKIE_TOKEN_TTL_PARAM = "knoxsso.token.ttl";
 +  private static final String SSO_COOKIE_TOKEN_AUDIENCES_PARAM = "knoxsso.token.audiences";
++  private static final String SSO_COOKIE_TOKEN_SIG_ALG = "knoxsso.token.sigalg";
 +  private static final String SSO_COOKIE_TOKEN_WHITELIST_PARAM = "knoxsso.redirect.whitelist.regex";
 +  private static final String SSO_ENABLE_SESSION_PARAM = "knoxsso.enable.session";
 +  private static final String ORIGINAL_URL_REQUEST_PARAM = "originalUrl";
 +  private static final String ORIGINAL_URL_COOKIE_NAME = "original-url";
 +  private static final String DEFAULT_SSO_COOKIE_NAME = "hadoop-jwt";
 +  // default for the whitelist - open up for development - relative paths and localhost only
 +  private static final String DEFAULT_WHITELIST = "^/.*$;^https?://(localhost|127.0.0.1|0:0:0:0:0:0:0:1|::1):\\d{0,9}/.*$";
++  private static final long TOKEN_TTL_DEFAULT = 30000L;
 +  static final String RESOURCE_PATH = "/api/v1/websso";
 +  private static KnoxSSOMessages log = MessagesFactory.get( KnoxSSOMessages.class );
 +  private String cookieName = null;
 +  private boolean secureOnly = true;
 +  private int maxAge = -1;
-   private long tokenTTL = 30000l;
++  private long tokenTTL = TOKEN_TTL_DEFAULT;
 +  private String whitelist = null;
 +  private String domainSuffix = null;
 +  private List<String> targetAudiences = new ArrayList<>();
 +  private boolean enableSession = false;
++  private String signatureAlgorithm = "RS256";
 +
 +  @Context
 +  HttpServletRequest request;
 +
 +  @Context
 +  HttpServletResponse response;
 +
 +  @Context
 +  ServletContext context;
 +
 +  @PostConstruct
 +  public void init() {
 +
 +    // configured cookieName
 +    cookieName = context.getInitParameter(SSO_COOKIE_NAME);
 +    if (cookieName == null) {
 +      cookieName = DEFAULT_SSO_COOKIE_NAME;
 +    }
 +
 +    String secure = context.getInitParameter(SSO_COOKIE_SECURE_ONLY_INIT_PARAM);
 +    if (secure != null) {
 +      secureOnly = ("false".equals(secure) ? false : true);
 +      if (!secureOnly) {
 +        log.cookieSecureOnly(secureOnly);
 +      }
 +    }
 +
 +    String age = context.getInitParameter(SSO_COOKIE_MAX_AGE_INIT_PARAM);
 +    if (age != null) {
 +      try {
 +        log.setMaxAge(age);
 +        maxAge = Integer.parseInt(age);
 +      }
 +      catch (NumberFormatException nfe) {
 +        log.invalidMaxAgeEncountered(age);
 +      }
 +    }
 +
 +    domainSuffix = context.getInitParameter(SSO_COOKIE_DOMAIN_SUFFIX_PARAM);
 +
 +    whitelist = context.getInitParameter(SSO_COOKIE_TOKEN_WHITELIST_PARAM);
 +    if (whitelist == null) {
 +      // default to local/relative targets
 +      whitelist = DEFAULT_WHITELIST;
 +    }
 +
 +    String audiences = context.getInitParameter(SSO_COOKIE_TOKEN_AUDIENCES_PARAM);
 +    if (audiences != null) {
 +      String[] auds = audiences.split(",");
 +      for (int i = 0; i < auds.length; i++) {
 +        targetAudiences.add(auds[i].trim());
 +      }
 +    }
 +
 +    String ttl = context.getInitParameter(SSO_COOKIE_TOKEN_TTL_PARAM);
 +    if (ttl != null) {
 +      try {
 +        tokenTTL = Long.parseLong(ttl);
++        if (tokenTTL < -1 || (tokenTTL + System.currentTimeMillis() < 0)) {
++          log.invalidTokenTTLEncountered(ttl);
++          tokenTTL = TOKEN_TTL_DEFAULT;
++        }
 +      }
 +      catch (NumberFormatException nfe) {
 +        log.invalidTokenTTLEncountered(ttl);
 +      }
 +    }
 +
 +    String enableSession = context.getInitParameter(SSO_ENABLE_SESSION_PARAM);
 +    this.enableSession = ("true".equals(enableSession));
++
++    String sigAlg = context.getInitParameter(SSO_COOKIE_TOKEN_SIG_ALG);
++    if (sigAlg != null) {
++      signatureAlgorithm = sigAlg;
++    }
 +  }
 +
 +  @GET
 +  @Produces({APPLICATION_JSON, APPLICATION_XML})
 +  public Response doGet() {
 +    return getAuthenticationToken(HttpServletResponse.SC_TEMPORARY_REDIRECT);
 +  }
 +
 +  @POST
 +  @Produces({APPLICATION_JSON, APPLICATION_XML})
 +  public Response doPost() {
 +    return getAuthenticationToken(HttpServletResponse.SC_SEE_OTHER);
 +  }
 +
 +  private Response getAuthenticationToken(int statusCode) {
 +    GatewayServices services = (GatewayServices) request.getServletContext()
 +            .getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
 +    boolean removeOriginalUrlCookie = true;
 +    String original = getCookieValue((HttpServletRequest) request, ORIGINAL_URL_COOKIE_NAME);
 +    if (original == null) {
 +      // in the case where there are no SAML redirects done before here
 +      // we need to get it from the request parameters
 +      removeOriginalUrlCookie = false;
 +      original = getOriginalUrlFromQueryParams();
 +      if (original.isEmpty()) {
 +        log.originalURLNotFound();
 +        throw new WebApplicationException("Original URL not found in the request.", Response.Status.BAD_REQUEST);
 +      }
 +      boolean validRedirect = RegExUtils.checkWhitelist(whitelist, original);
 +      if (!validRedirect) {
 +        log.whiteListMatchFail(original, whitelist);
 +        throw new WebApplicationException("Original URL not valid according to the configured whitelist.",
 +                Response.Status.BAD_REQUEST);
 +      }
 +    }
 +
 +    JWTokenAuthority ts = services.getService(GatewayServices.TOKEN_SERVICE);
 +    Principal p = ((HttpServletRequest)request).getUserPrincipal();
 +
 +    try {
 +      JWT token = null;
 +      if (targetAudiences.isEmpty()) {
-         token = ts.issueToken(p, "RS256", getExpiry());
++        token = ts.issueToken(p, signatureAlgorithm, getExpiry());
 +      } else {
-         token = ts.issueToken(p, targetAudiences, "RS256", getExpiry());
++        token = ts.issueToken(p, targetAudiences, signatureAlgorithm, getExpiry());
 +      }
 +
 +      // Coverity CID 1327959
 +      if( token != null ) {
 +        addJWTHadoopCookie( original, token );
 +      }
 +
 +      if (removeOriginalUrlCookie) {
 +        removeOriginalUrlCookie(response);
 +      }
 +
 +      log.aboutToRedirectToOriginal(original);
 +      response.setStatus(statusCode);
 +      response.setHeader("Location", original);
 +      try {
 +        response.getOutputStream().close();
 +      } catch (IOException e) {
 +        log.unableToCloseOutputStream(e.getMessage(), Arrays.toString(e.getStackTrace()));
 +      }
 +    }
 +    catch (TokenServiceException e) {
 +      log.unableToIssueToken(e);
 +    }
 +    URI location = null;
 +    try {
 +      location = new URI(original);
 +    }
 +    catch(URISyntaxException urise) {
 +      // todo log return error response
 +    }
 +
 +    if (!enableSession) {
 +      // invalidate the session to avoid autologin
 +      // Coverity CID 1352857
 +      HttpSession session = request.getSession(false);
 +      if( session != null ) {
 +        session.invalidate();
 +      }
 +    }
 +
 +    return Response.seeOther(location).entity("{ \"redirectTo\" : " + original + " }").build();
 +  }
 +
 +  private String getOriginalUrlFromQueryParams() {
 +    String original = request.getParameter(ORIGINAL_URL_REQUEST_PARAM);
 +    StringBuffer buf = new StringBuffer(original);
 +
 +    // Add any other query params.
 +    // Probably not ideal but will not break existing integrations by requiring
 +    // some encoding.
 +    Map<String, String[]> params = request.getParameterMap();
 +    for (Entry<String, String[]> entry : params.entrySet()) {
 +      if (!ORIGINAL_URL_REQUEST_PARAM.equals(entry.getKey())
 +          && !original.contains(entry.getKey() + "=")) {
 +        buf.append("&").append(entry.getKey());
 +        String[] values = entry.getValue();
 +        if (values.length > 0 && values[0] != null) {
 +          buf.append("=");
 +        }
 +        for (int i = 0; i < values.length; i++) {
 +          if (values[0] != null) {
 +            buf.append(values[i]);
 +            if (i < values.length-1) {
 +              buf.append("&").append(entry.getKey()).append("=");
 +            }
 +          }
 +        }
 +      }
 +    }
 +
 +    return buf.toString();
 +  }
 +
 +  private long getExpiry() {
 +    long expiry = 0l;
 +    if (tokenTTL == -1) {
 +      expiry = -1;
 +    }
 +    else {
 +      expiry = System.currentTimeMillis() + tokenTTL;
 +    }
 +    return expiry;
 +  }
 +
 +  private void addJWTHadoopCookie(String original, JWT token) {
 +    log.addingJWTCookie(token.toString());
 +    Cookie c = new Cookie(cookieName,  token.toString());
 +    c.setPath("/");
 +    try {
 +      String domain = Urls.getDomainName(original, domainSuffix);
 +      if (domain != null) {
 +        c.setDomain(domain);
 +      }
 +      c.setHttpOnly(true);
 +      if (secureOnly) {
 +        c.setSecure(true);
 +      }
 +      if (maxAge != -1) {
 +        c.setMaxAge(maxAge);
 +      }
 +      response.addCookie(c);
 +      log.addedJWTCookie();
 +    }
 +    catch(Exception e) {
 +      log.unableAddCookieToResponse(e.getMessage(), Arrays.toString(e.getStackTrace()));
 +      throw new WebApplicationException("Unable to add JWT cookie to response.");
 +    }
 +  }
 +
 +  private void removeOriginalUrlCookie(HttpServletResponse response) {
 +    Cookie c = new Cookie(ORIGINAL_URL_COOKIE_NAME, null);
 +    c.setMaxAge(0);
 +    c.setPath(RESOURCE_PATH);
 +    response.addCookie(c);
 +  }
 +
 +  private String getCookieValue(HttpServletRequest request, String name) {
 +    Cookie[] cookies = request.getCookies();
 +    String value = null;
 +    if (cookies != null) {
 +      for(Cookie cookie : cookies){
 +        if(name.equals(cookie.getName())){
 +          value = cookie.getValue();
 +        }
 +      }
 +    }
 +    if (value == null) {
 +      log.cookieNotFound(name);
 +    }
 +    return value;
 +  }
 +}