You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by mo...@apache.org on 2017/11/02 18:48:05 UTC
[01/25] knox git commit: KNOX-1049 - Default Service or App Context
for Topologies
Repository: knox
Updated Branches:
refs/heads/KNOX-998-Package_Restructuring 9577842b1 -> 1451428f7
KNOX-1049 - Default Service or App Context for Topologies
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/3a411555
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/3a411555
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/3a411555
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 3a411555fa9a89965281604c4e56709325abf743
Parents: a5a8825
Author: Larry McCay <lm...@hortonworks.com>
Authored: Tue Sep 19 20:00:24 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Tue Sep 19 20:00:24 2017 -0400
----------------------------------------------------------------------
.../apache/hadoop/gateway/GatewayFilter.java | 64 +++++++++++++++++++-
.../builder/BeanPropertyTopologyBuilder.java | 11 ++++
.../xml/KnoxFormatXmlTopologyRules.java | 2 +
.../src/main/resources/conf/topology-v1.xsd | 1 +
.../service/admin/TopologiesResource.java | 11 ++++
.../service/admin/beans/BeanConverter.java | 2 +
.../gateway/service/admin/beans/Topology.java | 11 ++++
.../hadoop/gateway/topology/Topology.java | 9 +++
.../gateway/topology/topology_binding-xml.xml | 3 +-
9 files changed, 112 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/3a411555/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayFilter.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayFilter.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayFilter.java
index c92ebfe..2885fe3 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayFilter.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayFilter.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.gateway.config.GatewayConfig;
import org.apache.hadoop.gateway.filter.AbstractGatewayFilter;
import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
import org.apache.hadoop.gateway.i18n.resources.ResourcesFactory;
+import org.apache.hadoop.gateway.topology.Topology;
import org.apache.hadoop.gateway.util.urltemplate.Matcher;
import org.apache.hadoop.gateway.util.urltemplate.Parser;
import org.apache.hadoop.gateway.util.urltemplate.Template;
@@ -43,6 +44,7 @@ import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
@@ -122,7 +124,35 @@ public class GatewayFilter implements Filter {
AbstractGatewayFilter.SOURCE_REQUEST_CONTEXT_URL_ATTRIBUTE_NAME, contextWithPathAndQuery );
Matcher<Chain>.Match match = chains.match( pathWithQueryTemplate );
-
+
+ // if there was no match then look for a default service for the topology
+ if (match == null) {
+ Topology topology = (Topology) servletRequest.getServletContext().getAttribute("org.apache.hadoop.gateway.topology");
+ String defaultServicePath = topology.getDefaultServicePath();
+ if (defaultServicePath != null) {
+ try {
+ String newPathWithQuery = defaultServicePath + "/" + pathWithQueryTemplate;
+ match = chains.match(Parser.parseLiteral(newPathWithQuery));
+ String origUrl = ((HttpServletRequest) servletRequest).getRequestURL().toString();
+ String url = origUrl;
+ if (path.equals("/")) {
+ url += defaultServicePath;
+ }
+ else {
+ int index = origUrl.indexOf(path);
+ url = origUrl.substring(0, index) + "/" + defaultServicePath + path;
+ }
+ String contextPath = defaultServicePath;
+ servletRequest = new ForwardedRequest((HttpServletRequest) servletRequest,
+ contextPath,
+ url);
+ } catch (URISyntaxException e) {
+ throw new ServletException( e );
+ }
+// ((HttpServletRequest) servletRequest).getRequestURL();
+ }
+ }
+
assignCorrelationRequestId();
// Populate Audit/correlation parameters
AuditContext auditContext = auditService.getContext();
@@ -387,4 +417,36 @@ public class GatewayFilter implements Filter {
}
+ /**
+ * A request wrapper class that wraps a request and adds the context path if
+ * needed.
+ */
+ static class ForwardedRequest extends HttpServletRequestWrapper {
+
+ private String newURL;
+ private String contextpath;
+
+ public ForwardedRequest(final HttpServletRequest request,
+ final String contextpath, final String newURL) {
+ super(request);
+ this.newURL = newURL;
+ this.contextpath = contextpath;
+ }
+
+ @Override
+ public StringBuffer getRequestURL() {
+ return new StringBuffer(newURL);
+ }
+
+ @Override
+ public String getRequestURI() {
+ return newURL;
+ }
+
+ @Override
+ public String getContextPath() {
+ return super.getContextPath() + "/" + this.contextpath;
+ }
+
+ }
}
http://git-wip-us.apache.org/repos/asf/knox/blob/3a411555/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
index b5e80d2..b33e52c 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.gateway.topology.Topology;
public class BeanPropertyTopologyBuilder implements TopologyBuilder {
private String name;
+ private String defaultService;
private List<Provider> providers;
private List<Service> services;
private List<Application> applications;
@@ -46,6 +47,15 @@ public class BeanPropertyTopologyBuilder implements TopologyBuilder {
return name;
}
+ public BeanPropertyTopologyBuilder defaultService(String defaultService) {
+ this.defaultService = defaultService;
+ return this;
+ }
+
+ public String defaultService() {
+ return defaultService;
+ }
+
public BeanPropertyTopologyBuilder addProvider(Provider provider) {
providers.add(provider);
return this;
@@ -76,6 +86,7 @@ public class BeanPropertyTopologyBuilder implements TopologyBuilder {
public Topology build() {
Topology topology = new Topology();
topology.setName(name);
+ topology.setDefaultServicePath(defaultService);
for (Provider provider : providers) {
topology.addProvider(provider);
http://git-wip-us.apache.org/repos/asf/knox/blob/3a411555/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
index e573d63..e221507 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
@@ -32,6 +32,7 @@ public class KnoxFormatXmlTopologyRules extends AbstractRulesModule {
private static final String ROOT_TAG = "topology";
private static final String NAME_TAG = "name";
private static final String VERSION_TAG = "version";
+ private static final String DEFAULT_SERVICE_TAG = "default-service";
private static final String APPLICATION_TAG = "application";
private static final String SERVICE_TAG = "service";
private static final String ROLE_TAG = "role";
@@ -48,6 +49,7 @@ public class KnoxFormatXmlTopologyRules extends AbstractRulesModule {
forPattern( ROOT_TAG ).createObject().ofType( BeanPropertyTopologyBuilder.class );
forPattern( ROOT_TAG + "/" + NAME_TAG ).callMethod("name").usingElementBodyAsArgument();
forPattern( ROOT_TAG + "/" + VERSION_TAG ).callMethod("version").usingElementBodyAsArgument();
+ forPattern( ROOT_TAG + "/" + DEFAULT_SERVICE_TAG ).callMethod("defaultService").usingElementBodyAsArgument();
forPattern( ROOT_TAG + "/" + APPLICATION_TAG ).createObject().ofType( Application.class ).then().setNext( "addApplication" );
forPattern( ROOT_TAG + "/" + APPLICATION_TAG + "/" + ROLE_TAG ).setBeanProperty();
http://git-wip-us.apache.org/repos/asf/knox/blob/3a411555/gateway-server/src/main/resources/conf/topology-v1.xsd
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/resources/conf/topology-v1.xsd b/gateway-server/src/main/resources/conf/topology-v1.xsd
index 9e3bed6..ab07caa 100644
--- a/gateway-server/src/main/resources/conf/topology-v1.xsd
+++ b/gateway-server/src/main/resources/conf/topology-v1.xsd
@@ -21,6 +21,7 @@ limitations under the License.
<h:sequence maxOccurs="1">
<h:element name="name" minOccurs="0" maxOccurs="1"/>
+ <h:element name="default-service" minOccurs="0" maxOccurs="1"/>
<h:element name="gateway" minOccurs="0" maxOccurs="1">
<h:complexType>
http://git-wip-us.apache.org/repos/asf/knox/blob/3a411555/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/TopologiesResource.java
----------------------------------------------------------------------
diff --git a/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/TopologiesResource.java b/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/TopologiesResource.java
index 1504eca..4748047 100644
--- a/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/TopologiesResource.java
+++ b/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/TopologiesResource.java
@@ -245,6 +245,8 @@ public class TopologiesResource {
@XmlElement
private String timestamp;
@XmlElement
+ private String defaultServicePath;
+ @XmlElement
private String uri;
@XmlElement
private String href;
@@ -254,6 +256,7 @@ public class TopologiesResource {
public SimpleTopology(org.apache.hadoop.gateway.topology.Topology t, String uri, String href) {
this.name = t.getName();
this.timestamp = Long.toString(t.getTimestamp());
+ this.defaultServicePath = t.getDefaultServicePath();
this.uri = uri;
this.href = href;
}
@@ -270,6 +273,14 @@ public class TopologiesResource {
return timestamp;
}
+ public void setDefaultService(String defaultServicePath) {
+ this.defaultServicePath = defaultServicePath;
+ }
+
+ public String getDefaultService() {
+ return defaultServicePath;
+ }
+
public void setTimestamp(String timestamp) {
this.timestamp = timestamp;
}
http://git-wip-us.apache.org/repos/asf/knox/blob/3a411555/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/BeanConverter.java
----------------------------------------------------------------------
diff --git a/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/BeanConverter.java b/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/BeanConverter.java
index e0c9d06..ac3b19c 100644
--- a/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/BeanConverter.java
+++ b/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/BeanConverter.java
@@ -27,6 +27,7 @@ public class BeanConverter {
Topology topologyResource = new Topology();
topologyResource.setName(topology.getName());
topologyResource.setTimestamp(topology.getTimestamp());
+ topologyResource.setDefaultService(topology.getDefaultServicePath());
topologyResource.setUri(topology.getUri());
for ( org.apache.hadoop.gateway.topology.Provider provider : topology.getProviders() ) {
topologyResource.getProviders().add( getProvider(provider) );
@@ -44,6 +45,7 @@ public class BeanConverter {
org.apache.hadoop.gateway.topology.Topology deploymentTopology = new org.apache.hadoop.gateway.topology.Topology();
deploymentTopology.setName(topology.getName());
deploymentTopology.setTimestamp(topology.getTimestamp());
+ deploymentTopology.setDefaultServicePath(topology.getDefaultService());
deploymentTopology.setUri(topology.getUri());
for ( Provider provider : topology.getProviders() ) {
deploymentTopology.addProvider( getProvider(provider) );
http://git-wip-us.apache.org/repos/asf/knox/blob/3a411555/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/Topology.java
----------------------------------------------------------------------
diff --git a/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/Topology.java b/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/Topology.java
index 6e85b7a..8bc5fa7 100644
--- a/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/Topology.java
+++ b/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/Topology.java
@@ -34,6 +34,9 @@ public class Topology {
private String name;
@XmlElement
+ private String defaultServicePath;
+
+ @XmlElement
private long timestamp;
@XmlElement(name="provider")
@@ -69,6 +72,14 @@ public class Topology {
return timestamp;
}
+ public void setDefaultService( String defaultServicePath ) {
+ this.defaultServicePath = defaultServicePath;
+ }
+
+ public String getDefaultService() {
+ return defaultServicePath;
+ }
+
public void setTimestamp( long timestamp ) {
this.timestamp = timestamp;
}
http://git-wip-us.apache.org/repos/asf/knox/blob/3a411555/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Topology.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Topology.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Topology.java
index 7f37336..c366421 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Topology.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Topology.java
@@ -31,6 +31,7 @@ public class Topology {
private URI uri;
private String name;
+ private String defaultServicePath = null;
private long timestamp;
public List<Provider> providerList = new ArrayList<Provider>();
private Map<String,Map<String,Provider>> providerMap = new HashMap<>();
@@ -67,6 +68,14 @@ public class Topology {
this.timestamp = timestamp;
}
+ public String getDefaultServicePath() {
+ return defaultServicePath;
+ }
+
+ public void setDefaultServicePath(String servicePath) {
+ defaultServicePath = servicePath;
+ }
+
public Collection<Service> getServices() {
return services;
}
http://git-wip-us.apache.org/repos/asf/knox/blob/3a411555/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-xml.xml
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-xml.xml b/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-xml.xml
index 50d8d58..8c54ed7 100644
--- a/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-xml.xml
+++ b/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-xml.xml
@@ -24,10 +24,11 @@ or more contributor license agreements. See the NOTICE file
element-form-default="QUALIFIED"/>
<java-types>
<java-type name="Topology" xml-accessor-type="NONE">
- <xml-type prop-order="name providers services applications"/>
+ <xml-type prop-order="name defaultServicePath providers services applications"/>
<xml-root-element/>
<java-attributes>
<xml-element java-attribute="name" name="name"/>
+ <xml-element java-attribute="defaultServicePath" name="default-service"/>
<xml-elements java-attribute="providers">
<xml-element name="provider"/>
<xml-element-wrapper name="gateway"/>
[11/25] knox git commit: KNOX-1076 - Update nimbus-jose-jwt to 4.41.2
Posted by mo...@apache.org.
KNOX-1076 - Update nimbus-jose-jwt to 4.41.2
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/62a23feb
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/62a23feb
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/62a23feb
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 62a23febbe6f1bc4aaade9d1fa0540cea5a839f3
Parents: 9ad9bcd
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Fri Oct 27 15:45:43 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Fri Oct 27 15:45:43 2017 +0100
----------------------------------------------------------------------
pom.xml | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/62a23feb/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 58a4122..d97548b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -739,7 +739,7 @@
<dependency>
<groupId>com.nimbusds</groupId>
<artifactId>nimbus-jose-jwt</artifactId>
- <version>4.34.2</version>
+ <version>4.41.2</version>
<scope>compile</scope>
<exclusions>
<exclusion>
@@ -1281,7 +1281,7 @@
<dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
- <version>3.0</version>
+ <version>3.5</version>
<scope>test</scope>
</dependency>
@@ -1315,12 +1315,12 @@
<dependency>
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path</artifactId>
- <version>0.9.1</version>
+ <version>2.4.0</version>
</dependency>
<dependency>
<groupId>com.jayway.jsonpath</groupId>
<artifactId>json-path-assert</artifactId>
- <version>0.9.1</version>
+ <version>2.4.0</version>
<scope>test</scope>
</dependency>
[12/25] knox git commit: KNOX-1049 - add unit test to
gatewayfiltertest
Posted by mo...@apache.org.
KNOX-1049 - add unit test to gatewayfiltertest
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/485520df
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/485520df
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/485520df
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 485520df21c94339066b53af35de13fadc5c632c
Parents: c211d05
Author: Larry McCay <lm...@hortonworks.com>
Authored: Sun Oct 29 15:47:01 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Sun Oct 29 15:47:01 2017 -0400
----------------------------------------------------------------------
.../hadoop/gateway/GatewayFilterTest.java | 49 ++++++++++++++++++++
1 file changed, 49 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/485520df/gateway-server/src/test/java/org/apache/hadoop/gateway/GatewayFilterTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/GatewayFilterTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/GatewayFilterTest.java
index 4e1562c..eabcf74 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/GatewayFilterTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/GatewayFilterTest.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.gateway;
import org.apache.hadoop.gateway.audit.api.AuditServiceFactory;
import org.apache.hadoop.gateway.config.GatewayConfig;
import org.apache.hadoop.gateway.filter.AbstractGatewayFilter;
+import org.apache.hadoop.gateway.topology.Topology;
import org.apache.hadoop.test.category.FastTests;
import org.apache.hadoop.test.category.UnitTests;
import org.easymock.EasyMock;
@@ -123,10 +124,17 @@ public class GatewayFilterTest {
public static class TestRoleFilter extends AbstractGatewayFilter {
public Object role;
+ public String defaultServicePath;
+ public String url;
@Override
protected void doFilter( HttpServletRequest request, HttpServletResponse response, FilterChain chain ) throws IOException, ServletException {
this.role = request.getAttribute( AbstractGatewayFilter.TARGET_SERVICE_ROLE );
+ Topology topology = (Topology)request.getServletContext().getAttribute( "org.apache.hadoop.gateway.topology" );
+ if (topology != null) {
+ this.defaultServicePath = (String) topology.getDefaultServicePath();
+ url = new String(request.getRequestURL());
+ }
}
}
@@ -168,4 +176,45 @@ public class GatewayFilterTest {
}
+ @Test
+ public void testDefaultServicePathTopologyRequestAttribute() throws Exception {
+
+ FilterConfig config = EasyMock.createNiceMock( FilterConfig.class );
+ EasyMock.replay( config );
+
+ Topology topology = EasyMock.createNiceMock( Topology.class );
+ topology.setDefaultServicePath("test-role/");
+ HttpServletRequest request = EasyMock.createNiceMock( HttpServletRequest.class );
+ ServletContext context = EasyMock.createNiceMock( ServletContext.class );
+ GatewayConfig gatewayConfig = EasyMock.createNiceMock( GatewayConfig.class );
+ EasyMock.expect( topology.getDefaultServicePath() ).andReturn( "test-role" ).anyTimes();
+ EasyMock.expect( request.getPathInfo() ).andReturn( "/test-path/test-resource" ).anyTimes();
+ EasyMock.expect( request.getServletContext() ).andReturn( context ).anyTimes();
+ EasyMock.expect( context.getAttribute(
+ GatewayConfig.GATEWAY_CONFIG_ATTRIBUTE)).andReturn(gatewayConfig).anyTimes();
+ EasyMock.expect(gatewayConfig.getHeaderNameForRemoteAddress()).andReturn(
+ "Custom-Forwarded-For").anyTimes();
+ EasyMock.expect( request.getRequestURL() ).andReturn( new StringBuffer("http://host:8443/gateway/sandbox/test-path/test-resource/") ).anyTimes();
+
+ EasyMock.expect( context.getAttribute( "org.apache.hadoop.gateway.topology" ) ).andReturn( topology ).anyTimes();
+ EasyMock.replay( request );
+ EasyMock.replay( context );
+ EasyMock.replay( topology );
+ EasyMock.replay( gatewayConfig );
+
+ HttpServletResponse response = EasyMock.createNiceMock( HttpServletResponse.class );
+ EasyMock.replay( response );
+
+ TestRoleFilter filter = new TestRoleFilter();
+
+ GatewayFilter gateway = new GatewayFilter();
+ gateway.addFilter( "test-role/**/**", "test-filter", filter, null, "test-role" );
+ gateway.init( config );
+ gateway.doFilter( request, response );
+ gateway.destroy();
+
+ assertThat( (String)filter.defaultServicePath, is( "test-role" ) );
+ assertThat( (String)filter.url, is("http://host:8443/gateway/sandbox/test-role/test-path/test-resource"));
+
+ }
}
[06/25] knox git commit: KNOX-1049 - change BeancConverter for path
element
Posted by mo...@apache.org.
KNOX-1049 - change BeancConverter for path element
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/f5490414
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/f5490414
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/f5490414
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: f5490414dfb23a0d9a46e96aa904a1cc8c04f3e4
Parents: 3346d99
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Oct 26 10:21:39 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Oct 26 10:21:39 2017 -0400
----------------------------------------------------------------------
.../apache/hadoop/gateway/service/admin/beans/BeanConverter.java | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/f5490414/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/BeanConverter.java
----------------------------------------------------------------------
diff --git a/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/BeanConverter.java b/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/BeanConverter.java
index ac3b19c..f94dcad 100644
--- a/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/BeanConverter.java
+++ b/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/BeanConverter.java
@@ -27,7 +27,7 @@ public class BeanConverter {
Topology topologyResource = new Topology();
topologyResource.setName(topology.getName());
topologyResource.setTimestamp(topology.getTimestamp());
- topologyResource.setDefaultService(topology.getDefaultServicePath());
+ topologyResource.setPath(topology.getDefaultServicePath());
topologyResource.setUri(topology.getUri());
for ( org.apache.hadoop.gateway.topology.Provider provider : topology.getProviders() ) {
topologyResource.getProviders().add( getProvider(provider) );
@@ -45,7 +45,7 @@ public class BeanConverter {
org.apache.hadoop.gateway.topology.Topology deploymentTopology = new org.apache.hadoop.gateway.topology.Topology();
deploymentTopology.setName(topology.getName());
deploymentTopology.setTimestamp(topology.getTimestamp());
- deploymentTopology.setDefaultServicePath(topology.getDefaultService());
+ deploymentTopology.setDefaultServicePath(topology.getPath());
deploymentTopology.setUri(topology.getUri());
for ( Provider provider : topology.getProviders() ) {
deploymentTopology.addProvider( getProvider(provider) );
[15/25] knox git commit: KNOX-1078 - Add option to preserve original
string when lookup fails in regex based identity assertion provider (Wei Han
via Sandeep More)
Posted by mo...@apache.org.
KNOX-1078 - Add option to preserve original string when lookup fails in regex based identity assertion provider (Wei Han via Sandeep More)
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/11ec78ad
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/11ec78ad
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/11ec78ad
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 11ec78adc7fced033b84eb7a7f29f816d8472714
Parents: b60322a
Author: Sandeep More <mo...@apache.org>
Authored: Mon Oct 30 10:50:46 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Mon Oct 30 10:50:46 2017 -0400
----------------------------------------------------------------------
.../filter/RegexIdentityAssertionFilter.java | 4 +++-
.../regex/filter/RegexTemplate.java | 12 ++++++----
.../regex/filter/RegexTemplateTest.java | 23 +++++++++++++++++++-
3 files changed, 33 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/11ec78ad/gateway-provider-identity-assertion-regex/src/main/java/org/apache/hadoop/gateway/identityasserter/regex/filter/RegexIdentityAssertionFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-identity-assertion-regex/src/main/java/org/apache/hadoop/gateway/identityasserter/regex/filter/RegexIdentityAssertionFilter.java b/gateway-provider-identity-assertion-regex/src/main/java/org/apache/hadoop/gateway/identityasserter/regex/filter/RegexIdentityAssertionFilter.java
index 209178b..b033699 100644
--- a/gateway-provider-identity-assertion-regex/src/main/java/org/apache/hadoop/gateway/identityasserter/regex/filter/RegexIdentityAssertionFilter.java
+++ b/gateway-provider-identity-assertion-regex/src/main/java/org/apache/hadoop/gateway/identityasserter/regex/filter/RegexIdentityAssertionFilter.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.gateway.security.principal.PrincipalMappingException;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.TreeMap;
+import java.lang.Boolean;
public class RegexIdentityAssertionFilter extends CommonIdentityAssertionFilter {
@@ -48,7 +49,8 @@ public class RegexIdentityAssertionFilter extends CommonIdentityAssertionFilter
output = "";
}
dict = loadDictionary( filterConfig.getInitParameter( "lookup" ) );
- template = new RegexTemplate( input, output, dict );
+ boolean useOriginalOnLookupFailure = Boolean.parseBoolean(filterConfig.getInitParameter("use.original.on.lookup.failure"));
+ template = new RegexTemplate( input, output, dict, useOriginalOnLookupFailure);
} catch ( PrincipalMappingException e ) {
throw new ServletException( e );
}
http://git-wip-us.apache.org/repos/asf/knox/blob/11ec78ad/gateway-provider-identity-assertion-regex/src/main/java/org/apache/hadoop/gateway/identityasserter/regex/filter/RegexTemplate.java
----------------------------------------------------------------------
diff --git a/gateway-provider-identity-assertion-regex/src/main/java/org/apache/hadoop/gateway/identityasserter/regex/filter/RegexTemplate.java b/gateway-provider-identity-assertion-regex/src/main/java/org/apache/hadoop/gateway/identityasserter/regex/filter/RegexTemplate.java
index 0a9912d..340b637 100644
--- a/gateway-provider-identity-assertion-regex/src/main/java/org/apache/hadoop/gateway/identityasserter/regex/filter/RegexTemplate.java
+++ b/gateway-provider-identity-assertion-regex/src/main/java/org/apache/hadoop/gateway/identityasserter/regex/filter/RegexTemplate.java
@@ -29,15 +29,17 @@ public class RegexTemplate {
Pattern inputPattern;
String outputTemplate;
Map<String,String> lookupTable;
+ boolean useOriginalOnLookupFailure;
public RegexTemplate( String regex, String template ) {
- this( regex, template, null );
+ this( regex, template, null, false );
}
- public RegexTemplate( String regex, String template, Map<String,String> map ) {
+ public RegexTemplate( String regex, String template, Map<String,String> map, boolean useOriginalOnLookupFailure ) {
this.inputPattern = Pattern.compile( regex );
this.outputTemplate = template;
this.lookupTable = map;
+ this.useOriginalOnLookupFailure = useOriginalOnLookupFailure;
}
public String apply( String input ) {
@@ -52,6 +54,7 @@ public class RegexTemplate {
private String expandTemplate( Matcher inputMatcher, String output ) {
Matcher directMatcher = directPattern.matcher( output );
while( directMatcher.find() ) {
+ String lookupKey = null;
String lookupValue = null;
String lookupStr = directMatcher.group( 1 );
Matcher indirectMatcher = indirectPattern.matcher( lookupStr );
@@ -59,14 +62,15 @@ public class RegexTemplate {
lookupStr = indirectMatcher.group( 1 );
int lookupIndex = Integer.parseInt( lookupStr );
if( lookupTable != null ) {
- String lookupKey = inputMatcher.group( lookupIndex );
+ lookupKey = inputMatcher.group( lookupIndex );
lookupValue = lookupTable.get( lookupKey );
}
} else {
int lookupIndex = Integer.parseInt( lookupStr );
lookupValue = inputMatcher.group( lookupIndex );
}
- output = directMatcher.replaceFirst( lookupValue == null ? "" : lookupValue );
+ String replaceWith = this.useOriginalOnLookupFailure ? lookupKey : "" ;
+ output = directMatcher.replaceFirst( lookupValue == null ? replaceWith : lookupValue );
directMatcher = directPattern.matcher( output );
}
return output;
http://git-wip-us.apache.org/repos/asf/knox/blob/11ec78ad/gateway-provider-identity-assertion-regex/src/test/java/org/apache/hadoop/gateway/identityasserter/regex/filter/RegexTemplateTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-identity-assertion-regex/src/test/java/org/apache/hadoop/gateway/identityasserter/regex/filter/RegexTemplateTest.java b/gateway-provider-identity-assertion-regex/src/test/java/org/apache/hadoop/gateway/identityasserter/regex/filter/RegexTemplateTest.java
index b32cd41..6e17b36 100644
--- a/gateway-provider-identity-assertion-regex/src/test/java/org/apache/hadoop/gateway/identityasserter/regex/filter/RegexTemplateTest.java
+++ b/gateway-provider-identity-assertion-regex/src/test/java/org/apache/hadoop/gateway/identityasserter/regex/filter/RegexTemplateTest.java
@@ -57,7 +57,7 @@ public class RegexTemplateTest {
String actual;
- template = new RegexTemplate( "(.*)@(.*?)\\..*", "prefix_{1}:{[2]}_suffix", map );
+ template = new RegexTemplate( "(.*)@(.*?)\\..*", "prefix_{1}:{[2]}_suffix", map, false );
actual = template.apply( "member@us.apache.org" );
assertThat( actual, is( "prefix_member:USA_suffix" ) );
@@ -69,4 +69,25 @@ public class RegexTemplateTest {
}
+ @Test
+ public void testLookupFailure() {
+
+ RegexTemplate template;
+ Map<String,String> map = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+ map.put( "us", "USA" );
+ map.put( "ca", "CANADA" );
+
+ String actual;
+
+ template = new RegexTemplate( "(.*)@(.*?)\\..*", "prefix_{1}:{[2]}_suffix", map, true );
+ actual = template.apply( "member@us.apache.org" );
+ assertThat( actual, is( "prefix_member:USA_suffix" ) );
+
+ actual = template.apply( "member@ca.apache.org" );
+ assertThat( actual, is( "prefix_member:CANADA_suffix" ) );
+
+ actual = template.apply( "member@nj.apache.org" );
+ assertThat( actual, is( "prefix_member:nj_suffix" ) );
+
+ }
}
[24/25] knox git commit: KNOX-998 - Some more refactoring
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockHttpServletRequest.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockHttpServletRequest.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockHttpServletRequest.java
deleted file mode 100644
index 82ebe3d..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockHttpServletRequest.java
+++ /dev/null
@@ -1,410 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.mock;
-
-import javax.servlet.AsyncContext;
-import javax.servlet.DispatcherType;
-import javax.servlet.RequestDispatcher;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.ServletInputStream;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.Cookie;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpSession;
-import javax.servlet.http.HttpUpgradeHandler;
-import javax.servlet.http.Part;
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.UnsupportedEncodingException;
-import java.security.Principal;
-import java.util.Collection;
-import java.util.Enumeration;
-import java.util.Locale;
-import java.util.Map;
-
-public class MockHttpServletRequest implements HttpServletRequest {
-
- private String queryString;
- private String contentType;
- private String characterEncoding;
- private ServletInputStream inputStream;
- private String method = "GET";
-
- @Override
- public String getAuthType() {
- return null;
- }
-
- @Override
- public Cookie[] getCookies() {
- return new Cookie[ 0 ];
- }
-
- @Override
- public long getDateHeader( String s ) {
- return 0;
- }
-
- @Override
- public String getHeader( String s ) {
- return null;
- }
-
- @Override
- public Enumeration<String> getHeaders( String s ) {
- return null;
- }
-
- @Override
- public Enumeration<String> getHeaderNames() {
- return null;
- }
-
- @Override
- public int getIntHeader( String s ) {
- return 0;
- }
-
- @Override
- public String getMethod() {
- return method;
- }
-
- public void setMethod( String method ) {
- this.method = method;
- }
-
- @Override
- public String getPathInfo() {
- return null;
- }
-
- @Override
- public String getPathTranslated() {
- return null;
- }
-
- @Override
- public String getContextPath() {
- return null;
- }
-
- @Override
- public String getQueryString() {
- return queryString;
- }
-
- public void setQueryString( String queryString ) {
- this.queryString = queryString;
- }
-
- @Override
- public String getRemoteUser() {
- return null;
- }
-
- @Override
- public boolean isUserInRole( String s ) {
- return false;
- }
-
- @Override
- public Principal getUserPrincipal() {
- return null;
- }
-
- @Override
- public String getRequestedSessionId() {
- return null;
- }
-
- @Override
- public String getRequestURI() {
- return null;
- }
-
- @Override
- public StringBuffer getRequestURL() {
- return null;
- }
-
- @Override
- public String getServletPath() {
- return null;
- }
-
- @Override
- public HttpSession getSession( boolean b ) {
- return null;
- }
-
- @Override
- public HttpSession getSession() {
- return null;
- }
-
- @Override
- public String changeSessionId() {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public boolean isRequestedSessionIdValid() {
- return false;
- }
-
- @Override
- public boolean isRequestedSessionIdFromCookie() {
- return false;
- }
-
- @Override
- public boolean isRequestedSessionIdFromURL() {
- return false;
- }
-
- @Override
- @SuppressWarnings("deprecation")
- public boolean isRequestedSessionIdFromUrl() {
- return false;
- }
-
- @Override
- public boolean authenticate( HttpServletResponse httpServletResponse ) throws IOException, ServletException {
- return false;
- }
-
- @Override
- public void login( String s, String s1 ) throws ServletException {
- }
-
- @Override
- public void logout() throws ServletException {
- }
-
- @Override
- public Collection<Part> getParts() throws IOException, ServletException {
- return null;
- }
-
- @Override
- public Part getPart( String s ) throws IOException, ServletException {
- return null;
- }
-
- @Override
- public <T extends HttpUpgradeHandler> T upgrade( Class<T> aClass ) throws IOException, ServletException {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public Object getAttribute( String s ) {
- return null;
- }
-
- @Override
- public Enumeration<String> getAttributeNames() {
- return null;
- }
-
- @Override
- public String getCharacterEncoding() {
- return characterEncoding;
- }
-
- @Override
- public void setCharacterEncoding( String characterEncoding ) throws UnsupportedEncodingException {
- this.characterEncoding = characterEncoding;
- }
-
- @Override
- public int getContentLength() {
- return 0;
- }
-
- @Override
- public long getContentLengthLong() {
- return 0;
- }
-
- @Override
- public String getContentType() {
- return contentType;
- }
-
- public void setContentType( String contentType ) {
- this.contentType = contentType;
- }
-
- @Override
- public ServletInputStream getInputStream() throws IOException {
- return inputStream;
- }
-
- public void setInputStream( ServletInputStream intputStream ) {
- this.inputStream = intputStream;
- }
-
- @Override
- public String getParameter( String s ) {
- return null;
- }
-
- @Override
- public Enumeration<String> getParameterNames() {
- return null;
- }
-
- @Override
- public String[] getParameterValues( String s ) {
- return new String[ 0 ];
- }
-
- @Override
- public Map<String, String[]> getParameterMap() {
- return null;
- }
-
- @Override
- public String getProtocol() {
- return null;
- }
-
- @Override
- public String getScheme() {
- return null;
- }
-
- @Override
- public String getServerName() {
- return null;
- }
-
- @Override
- public int getServerPort() {
- return 0;
- }
-
- @Override
- public BufferedReader getReader() throws IOException {
- return null;
- }
-
- @Override
- public String getRemoteAddr() {
- return null;
- }
-
- @Override
- public String getRemoteHost() {
- return null;
- }
-
- @Override
- public void setAttribute( String s, Object o ) {
- }
-
- @Override
- public void removeAttribute( String s ) {
- }
-
- @Override
- public Locale getLocale() {
- return null;
- }
-
- @Override
- public Enumeration<Locale> getLocales() {
- return null;
- }
-
- @Override
- public boolean isSecure() {
- return false;
- }
-
- @Override
- public RequestDispatcher getRequestDispatcher( String s ) {
- return null;
- }
-
- @Override
- @SuppressWarnings("deprecation")
- public String getRealPath( String s ) {
- return null;
- }
-
- @Override
- public int getRemotePort() {
- return 0;
- }
-
- @Override
- public String getLocalName() {
- return null;
- }
-
- @Override
- public String getLocalAddr() {
- return null;
- }
-
- @Override
- public int getLocalPort() {
- return 0;
- }
-
- @Override
- public ServletContext getServletContext() {
- return null;
- }
-
- @Override
- public AsyncContext startAsync() throws IllegalStateException {
- return null;
- }
-
- @Override
- public AsyncContext startAsync( ServletRequest servletRequest, ServletResponse servletResponse ) throws IllegalStateException {
- return null;
- }
-
- @Override
- public boolean isAsyncStarted() {
- return false;
- }
-
- @Override
- public boolean isAsyncSupported() {
- return false;
- }
-
- @Override
- public AsyncContext getAsyncContext() {
- return null;
- }
-
- @Override
- public DispatcherType getDispatcherType() {
- return null;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockHttpServletResponse.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockHttpServletResponse.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockHttpServletResponse.java
deleted file mode 100644
index 9d20d17..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockHttpServletResponse.java
+++ /dev/null
@@ -1,195 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.mock;
-
-import javax.servlet.ServletOutputStream;
-import javax.servlet.http.Cookie;
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.util.Collection;
-import java.util.Locale;
-
-public class MockHttpServletResponse implements HttpServletResponse {
-
- @Override
- public void addCookie( Cookie cookie ) {
- }
-
- @Override
- public boolean containsHeader( String s ) {
- return false;
- }
-
- @Override
- public String encodeURL( String s ) {
- return null;
- }
-
- @Override
- public String encodeRedirectURL( String s ) {
- return null;
- }
-
- @Override
- @SuppressWarnings("deprecation")
- public String encodeUrl( String s ) {
- return null;
- }
-
- @Override
- public String encodeRedirectUrl( String s ) {
- return null;
- }
-
- @Override
- public void sendError( int i, String s ) throws IOException {
- }
-
- @Override
- public void sendError( int i ) throws IOException {
- }
-
- @Override
- public void sendRedirect( String s ) throws IOException {
- }
-
- @Override
- public void setDateHeader( String s, long l ) {
- }
-
- @Override
- public void addDateHeader( String s, long l ) {
- }
-
- @Override
- public void setHeader( String s, String s1 ) {
- }
-
- @Override
- public void addHeader( String s, String s1 ) {
- }
-
- @Override
- public void setIntHeader( String s, int i ) {
- }
-
- @Override
- public void addIntHeader( String s, int i ) {
- }
-
- @Override
- public void setStatus( int i ) {
- }
-
- @Override
- @SuppressWarnings("deprecation")
- public void setStatus( int i, String s ) {
- }
-
- @Override
- public int getStatus() {
- return 0;
- }
-
- @Override
- public String getHeader( String s ) {
- return null;
- }
-
- @Override
- public Collection<String> getHeaders( String s ) {
- return null;
- }
-
- @Override
- public Collection<String> getHeaderNames() {
- return null;
- }
-
- @Override
- public String getCharacterEncoding() {
- return null;
- }
-
- @Override
- public String getContentType() {
- return null;
- }
-
- @Override
- public ServletOutputStream getOutputStream() throws IOException {
- return null;
- }
-
- @Override
- public PrintWriter getWriter() throws IOException {
- return null;
- }
-
- @Override
- public void setCharacterEncoding( String s ) {
- }
-
- @Override
- public void setContentLength( int i ) {
- }
-
- @Override
- public void setContentLengthLong( long l ) {
- }
-
- @Override
- public void setContentType( String s ) {
- }
-
- @Override
- public void setBufferSize( int i ) {
- }
-
- @Override
- public int getBufferSize() {
- return 0;
- }
-
- @Override
- public void flushBuffer() throws IOException {
- }
-
- @Override
- public void resetBuffer() {
- }
-
- @Override
- public boolean isCommitted() {
- return false;
- }
-
- @Override
- public void reset() {
- }
-
- @Override
- public void setLocale( Locale locale ) {
- }
-
- @Override
- public Locale getLocale() {
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockInteraction.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockInteraction.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockInteraction.java
deleted file mode 100644
index 1e30d38..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockInteraction.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.mock;
-
-public class MockInteraction {
-
- private MockResponseProvider response = new MockResponseProvider();
- private MockRequestMatcher request = new MockRequestMatcher( response );
-
- public MockRequestMatcher expect() {
- return request;
- }
-
- public MockResponseProvider respond() {
- return response;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockRequestMatcher.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockRequestMatcher.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockRequestMatcher.java
deleted file mode 100644
index e107e6f..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockRequestMatcher.java
+++ /dev/null
@@ -1,330 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.mock;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang3.ArrayUtils;
-import org.apache.http.NameValuePair;
-import org.apache.http.client.utils.URLEncodedUtils;
-import org.apache.http.message.BasicNameValuePair;
-import org.hamcrest.Matcher;
-import org.hamcrest.Matchers;
-
-import javax.servlet.http.Cookie;
-import javax.servlet.http.HttpServletRequest;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URL;
-import java.nio.charset.Charset;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-
-import static org.hamcrest.CoreMatchers.*;
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.Matchers.equalToIgnoringCase;
-import static org.xmlmatchers.XmlMatchers.isEquivalentTo;
-import static org.xmlmatchers.transform.XmlConverters.the;
-import static uk.co.datumedge.hamcrest.json.SameJSONAs.sameJSONAs;
-
-public class MockRequestMatcher {
-
- private static final Charset UTF8 = Charset.forName( "UTF-8" );
-
- private String from;
- private MockResponseProvider response;
- private Set<String> methods = null;
- private String pathInfo = null;
- private String requestURL = null;
- Map<String,Matcher> headers = null;
- Set<Cookie> cookies = null;
- private Map<String,Object> attributes = null;
- private Map<String,String> queryParams = null;
- private String contentType = null;
- private String characterEncoding = null;
- private Integer contentLength = null;
- private byte[] entity = null;
- private Map<String,String[]> formParams = null;
-
- public MockRequestMatcher( MockResponseProvider response ) {
- this.response = response;
- }
-
- public MockResponseProvider respond() {
- return response;
- }
-
- public MockRequestMatcher from( String from ) {
- this.from = from;
- return this;
- }
-
- public MockRequestMatcher method( String... methods ) {
- if( this.methods == null ) {
- this.methods = new HashSet<>();
- }
- if( methods != null ) {
- for( String method: methods ) {
- this.methods.add( method );
- }
- }
- return this;
- }
-
- public MockRequestMatcher pathInfo( String pathInfo ) {
- this.pathInfo = pathInfo;
- return this;
- }
-
- public MockRequestMatcher requestUrl( String requestUrl ) {
- this.requestURL = requestUrl;
- return this;
- }
-
- public MockRequestMatcher header( String name, String value ) {
- if( headers == null ) {
- headers = new HashMap<>();
- }
- headers.put( name, Matchers.is(value) );
- return this;
- }
-
- public MockRequestMatcher header( String name, Matcher matcher ) {
- if( headers == null ) {
- headers = new HashMap<>();
- }
- headers.put( name, matcher );
- return this;
- }
-
- public MockRequestMatcher cookie( Cookie cookie ) {
- if( cookies == null ) {
- cookies = new HashSet<>();
- }
- cookies.add( cookie );
- return this;
- }
-
- public MockRequestMatcher attribute( String name, Object value ) {
- if( this.attributes == null ) {
- this.attributes = new HashMap<>();
- }
- attributes.put( name, value );
- return this;
- }
-
- public MockRequestMatcher queryParam( String name, String value ) {
- if( this.queryParams == null ) {
- this.queryParams = new HashMap<>();
- }
- queryParams.put( name, value );
- return this;
- }
-
- public MockRequestMatcher formParam( String name, String... values ) {
- if( entity != null ) {
- throw new IllegalStateException( "Entity already specified." );
- }
- if( formParams == null ) {
- formParams = new HashMap<>();
- }
- String[] currentValues = formParams.get( name );
- if( currentValues == null ) {
- currentValues = values;
- } else if ( values != null ) {
- currentValues = ArrayUtils.addAll( currentValues, values );
- }
- formParams.put( name, currentValues );
- return this;
- }
-
- public MockRequestMatcher content( String string, Charset charset ) {
- content( string.getBytes( charset ) );
- return this;
- }
-
- public MockRequestMatcher content( byte[] entity ) {
- if( formParams != null ) {
- throw new IllegalStateException( "Form params already specified." );
- }
- this.entity = entity;
- return this;
- }
-
- public MockRequestMatcher content( URL url ) throws IOException {
- content( url.openStream() );
- return this;
- }
-
- public MockRequestMatcher content( InputStream stream ) throws IOException {
- content( IOUtils.toByteArray( stream ) );
- return this;
- }
-
- public MockRequestMatcher contentType( String contentType ) {
- this.contentType = contentType;
- return this;
- }
-
- public MockRequestMatcher contentLength( int length ) {
- this.contentLength = length;
- return this;
- }
-
- public MockRequestMatcher characterEncoding( String charset ) {
- this.characterEncoding = charset;
- return this;
- }
-
- public void match( HttpServletRequest request ) throws IOException {
- if( methods != null ) {
- assertThat(
- "Request " + request.getMethod() + " " + request.getRequestURL() +
- " is not using one of the expected HTTP methods",
- methods, hasItem( request.getMethod() ) );
- }
- if( pathInfo != null ) {
- assertThat(
- "Request " + request.getMethod() + " " + request.getRequestURL() +
- " does not have the expected pathInfo",
- request.getPathInfo(), is( pathInfo ) );
- }
- if( requestURL != null ) {
- assertThat(
- "Request " + request.getMethod() + " " + request.getRequestURL() +
- " does not have the expected requestURL",
- request.getRequestURL().toString(), is( requestURL ) );
- }
- if( headers != null ) {
- for( Entry<String, Matcher> entry : headers.entrySet() ) {
- assertThat(
- "Request " + request.getMethod() + " " + request.getRequestURL() +
- " does not have the expected value for header " + entry.getKey(),
- request.getHeader( entry.getKey() ), entry.getValue() );
- }
- }
- if( cookies != null ) {
- List<Cookie> requestCookies = Arrays.asList( request.getCookies() );
- for( Cookie cookie: cookies ) {
- assertThat(
- "Request " + request.getMethod() + " " + request.getRequestURL() +
- " does not have the expected cookie " + cookie,
- requestCookies, hasItem( cookie ) );
- }
- }
- if( contentType != null ) {
- String[] requestContentType = request.getContentType().split(";",2);
- assertThat(
- "Request " + request.getMethod() + " " + request.getRequestURL() +
- " does not have the expected content type",
- requestContentType[ 0 ], is( contentType ) );
- }
- if( characterEncoding != null ) {
- assertThat(
- "Request " + request.getMethod() + " " + request.getRequestURL() +
- " does not have the expected character encoding",
- request.getCharacterEncoding(), equalToIgnoringCase( characterEncoding ) );
- }
- if( contentLength != null ) {
- assertThat(
- "Request " + request.getMethod() + " " + request.getRequestURL() +
- " does not have the expected content length",
- request.getContentLength(), is( contentLength ) );
- }
- if( attributes != null ) {
- for( String name: attributes.keySet() ) {
- assertThat(
- "Request " + request.getMethod() + " " + request.getRequestURL() +
- " is missing attribute '" + name + "'",
- request.getAttribute( name ), notNullValue() );
- assertThat(
- "Request " + request.getMethod() + " " + request.getRequestURL() +
- " has wrong value for attribute '" + name + "'",
- request.getAttribute( name ), is( request.getAttribute( name ) ) );
- }
- }
- // Note: Cannot use any of the expect.getParameter*() methods because they will read the
- // body and we don't want that to happen.
- if( queryParams != null ) {
- String queryString = request.getQueryString();
- List<NameValuePair> requestParams = parseQueryString( queryString == null ? "" : queryString );
- for( Entry<String, String> entry : queryParams.entrySet() ) {
- assertThat(
- "Request " + request.getMethod() + " " + request.getRequestURL() +
- " query string " + queryString + " is missing parameter '" + entry.getKey() + "'",
- requestParams, hasItem( new BasicNameValuePair(entry.getKey(), entry.getValue())) );
- }
- }
- if( formParams != null ) {
- String paramString = IOUtils.toString( request.getInputStream(), request.getCharacterEncoding() );
- List<NameValuePair> requestParams = parseQueryString( paramString == null ? "" : paramString );
- for( Entry<String, String[]> entry : formParams.entrySet() ) {
- String[] expectedValues = entry.getValue();
- for( String expectedValue : expectedValues ) {
- assertThat(
- "Request " + request.getMethod() + " " + request.getRequestURL() +
- " form params " + paramString + " is missing a value " + expectedValue + " for parameter '" + entry.getKey() + "'",
- requestParams, hasItem( new BasicNameValuePair(entry.getKey(), expectedValue ) ));
- }
- }
- }
- if( entity != null ) {
- if( contentType != null && contentType.endsWith( "/xml" ) ) {
- String expectEncoding = characterEncoding;
- String expect = new String( entity, ( expectEncoding == null ? UTF8.name() : expectEncoding ) );
- String actualEncoding = request.getCharacterEncoding();
- String actual = IOUtils.toString( request.getInputStream(), actualEncoding == null ? UTF8.name() : actualEncoding );
- assertThat( the( actual ), isEquivalentTo( the( expect ) ) );
- } else if ( contentType != null && contentType.endsWith( "/json" ) ) {
- String expectEncoding = characterEncoding;
- String expect = new String( entity, ( expectEncoding == null ? UTF8.name() : expectEncoding ) );
- String actualEncoding = request.getCharacterEncoding();
- String actual = IOUtils.toString( request.getInputStream(), actualEncoding == null ? UTF8.name() : actualEncoding );
-// System.out.println( "EXPECT=" + expect );
-// System.out.println( "ACTUAL=" + actual );
- assertThat( actual, sameJSONAs( expect ) );
- } else if( characterEncoding == null || request.getCharacterEncoding() == null ) {
- byte[] bytes = IOUtils.toByteArray( request.getInputStream() );
- assertThat(
- "Request " + request.getMethod() + " " + request.getRequestURL() +
- " content does not match the expected content",
- bytes, is( entity ) );
- } else {
- String expect = new String( entity, characterEncoding );
- String actual = IOUtils.toString( request.getInputStream(), request.getCharacterEncoding() );
- assertThat(
- "Request " + request.getMethod() + " " + request.getRequestURL() +
- " content does not match the expected content",
- actual, is( expect ) );
- }
- }
- }
-
- public String toString() {
- return "from=" + from + ", pathInfo=" + pathInfo;
- }
-
- private static List<NameValuePair> parseQueryString( String queryString ) {
- return URLEncodedUtils.parse(queryString, Charset.defaultCharset());
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockResponseProvider.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockResponseProvider.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockResponseProvider.java
deleted file mode 100644
index b1b1178..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockResponseProvider.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.mock;
-
-import org.apache.commons.io.IOUtils;
-
-import javax.servlet.http.Cookie;
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URL;
-import java.nio.charset.Charset;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-
-public class MockResponseProvider {
-
- Integer errorCode = null;
- String errorMsg = null;
- Integer statusCode = null;
- String redirectUrl = null;
- Map<String,String> headers = null;
- Set<Cookie> cookies = null;
- byte[] entity = null;
- String contentType = null;
- String characterEncoding = null;
- Integer contentLength = null;
-
- public MockResponseProvider status( int statusCode ) {
- this.statusCode = statusCode;
- return this;
- }
-
- public MockResponseProvider error( int code, String message ) {
- errorCode = code;
- errorMsg = message;
- return this;
- }
-
- public MockResponseProvider redirect( String location ) {
- redirectUrl = location;
- return this;
- }
-
- public MockResponseProvider header( String name, String value ) {
- if( headers == null ) {
- headers = new HashMap<>();
- }
- headers.put( name, value );
- return this;
- }
-
- public MockResponseProvider cookie( Cookie cookie ) {
- if( cookies == null ) {
- cookies = new HashSet<>();
- }
- cookies.add( cookie );
- return this;
- }
-
- public MockResponseProvider content( byte[] entity ) {
- this.entity = entity;
- return this;
- }
-
- public MockResponseProvider content( String string, Charset charset ) {
- this.entity = string.getBytes( charset );
- return this;
- }
-
- public MockResponseProvider content( URL url ) throws IOException {
- content( url.openStream() );
- return this;
- }
-
- public MockResponseProvider content( InputStream stream ) throws IOException {
- content( IOUtils.toByteArray( stream ) );
- return this;
- }
-
- public MockResponseProvider contentType( String contentType ) {
- this.contentType = contentType;
- return this;
- }
-
- public MockResponseProvider contentLength( int contentLength ) {
- this.contentLength = contentLength;
- return this;
- }
-
- public MockResponseProvider characterEncoding( String charset ) {
- this.characterEncoding = charset;
- return this;
- }
-
- public void apply( HttpServletResponse response ) throws IOException {
- if( statusCode != null ) {
- response.setStatus( statusCode );
- } else {
- response.setStatus( HttpServletResponse.SC_OK );
- }
- if( errorCode != null ) {
- if( errorMsg != null ) {
- response.sendError( errorCode, errorMsg );
- } else {
- response.sendError( errorCode );
- }
- }
- if( redirectUrl != null ) {
- response.sendRedirect( redirectUrl );
- }
- if( headers != null ) {
- for( Entry<String, String> entry : headers.entrySet() ) {
- response.addHeader( entry.getKey(), entry.getValue() );
- }
- }
- if( cookies != null ) {
- for( Cookie cookie: cookies ) {
- response.addCookie( cookie );
- }
- }
- if( contentType != null ) {
- response.setContentType( contentType );
- }
- if( characterEncoding != null ) {
- response.setCharacterEncoding( characterEncoding );
- }
- if( contentLength != null ) {
- response.setContentLength( contentLength );
- }
- response.flushBuffer();
- if( entity != null ) {
- response.getOutputStream().write( entity );
- //KNOX-685: response.getOutputStream().flush();
- response.getOutputStream().close();
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServer.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServer.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServer.java
deleted file mode 100644
index 5d95ce6..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServer.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.mock;
-
-import org.eclipse.jetty.server.Handler;
-import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.servlet.ServletContextHandler;
-import org.eclipse.jetty.servlet.ServletHolder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.servlet.Servlet;
-import java.util.LinkedList;
-import java.util.Queue;
-
-/**
- * An embedded Jetty server with a single servlet deployed on "/*".
- * It is used by populating a queue of "interactions".
- * Each interaction is an expected request and a resulting response.
- * These interactions are added to a queue in a fluent API style.
- * So in most of the tests like GatewayBasicFuncTest.testBasicJsonUseCase you will see calls like
- * driver.getMock( "WEBHDFS" ).expect()....respond()...;
- * This adds a single interaction to the mock server which is returned via the driver.getMock( "WEBHDFS" ) above.
- * Any number of interactions may be added.
- * When the request comes in it will check the request against the expected request.
- * If it matches return the response otherwise it will return a 500 error.
- * Typically at the end of a test you should check to make sure the interaction queue is consumed by calling isEmpty().
- * The reset() method can be used to ensure everything is cleaned up so that the mock server can be reused beteween tests.
- * The whole idea was modeled after how the REST testing framework REST-assured and aims to be a server side equivalent.
- */
-public class MockServer {
-
- private Logger log = LoggerFactory.getLogger( this.getClass() );
-
- private String name;
- private Server jetty;
-
- private Queue<MockInteraction> interactions = new LinkedList<MockInteraction>();
-
- public MockServer( String name ) {
- this.name = name;
- }
-
- public MockServer( String name, boolean start ) throws Exception {
- this.name = name;
- if( start ) {
- start();
- }
- }
-
- public String getName() {
- return name;
- }
-
- public void start() throws Exception {
- Handler context = createHandler();
- jetty = new Server(0);
- jetty.setHandler( context );
- jetty.start();
- log.info( "Mock server started on port " + getPort() );
- }
-
- public void stop() throws Exception {
- jetty.stop();
- jetty.join();
- }
-
- private ServletContextHandler createHandler() {
- Servlet servlet = new MockServlet( getName(), interactions );
- ServletHolder holder = new ServletHolder( servlet );
- ServletContextHandler context = new ServletContextHandler( ServletContextHandler.SESSIONS );
- context.setContextPath( "/" );
- context.addServlet( holder, "/*" );
- return context;
- }
-
- public int getPort() {
- return jetty.getURI().getPort();
- }
-
- public MockRequestMatcher expect() {
- MockInteraction interaction = new MockInteraction();
- interactions.add( interaction );
- return interaction.expect();
- }
-
- public MockResponseProvider respond() {
- MockInteraction interaction = new MockInteraction();
- interactions.add( interaction );
- return interaction.respond();
- }
-
- public int getCount() {
- return interactions.size();
- }
-
- public boolean isEmpty() {
- return interactions.isEmpty();
- }
-
- public void reset() {
- interactions.clear();
- }
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServlet.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServlet.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServlet.java
deleted file mode 100644
index ca4692c..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServlet.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.mock;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-import java.util.Queue;
-
-import org.apache.log4j.Logger;
-
-import static org.junit.Assert.fail;
-
-public class MockServlet extends HttpServlet {
-
- private static final Logger LOG = Logger.getLogger(MockServlet.class.getName());
-
- public String name;
- public Queue<MockInteraction> interactions;
-
- public MockServlet( String name, Queue<MockInteraction> interactions ) {
- this.name = name;
- this.interactions = interactions;
- }
-
- @Override
- protected void service( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException {
- LOG.debug( "service: request=" + request.getMethod() + " " + request.getRequestURL() + "?" + request.getQueryString() );
- try {
- if( interactions.isEmpty() ) {
- fail( "Mock servlet " + name + " received a request but the expected interaction queue is empty." );
- }
- MockInteraction interaction = interactions.remove();
- interaction.expect().match( request );
- interaction.respond().apply( response );
- LOG.debug( "service: response=" + response.getStatus() );
- } catch( AssertionError e ) {
- LOG.debug( "service: exception=" + e.getMessage() );
- e.printStackTrace(); // I18N not required.
- throw new ServletException( e );
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServletContext.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServletContext.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServletContext.java
deleted file mode 100644
index 0df84c3..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServletContext.java
+++ /dev/null
@@ -1,293 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.mock;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterRegistration;
-import javax.servlet.RequestDispatcher;
-import javax.servlet.Servlet;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRegistration;
-import javax.servlet.SessionCookieConfig;
-import javax.servlet.SessionTrackingMode;
-import javax.servlet.descriptor.JspConfigDescriptor;
-import java.io.InputStream;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.Enumeration;
-import java.util.EventListener;
-import java.util.Map;
-import java.util.Set;
-
-public class MockServletContext implements ServletContext {
-
- @Override
- public String getContextPath() {
- return null;
- }
-
- @Override
- public ServletContext getContext( String s ) {
- return null;
- }
-
- @Override
- public int getMajorVersion() {
- return 0;
- }
-
- @Override
- public int getMinorVersion() {
- return 0;
- }
-
- @Override
- public int getEffectiveMajorVersion() {
- return 0;
- }
-
- @Override
- public int getEffectiveMinorVersion() {
- return 0;
- }
-
- @Override
- public String getMimeType( String s ) {
- return null;
- }
-
- @Override
- public Set<String> getResourcePaths( String s ) {
- return null;
- }
-
- @Override
- public URL getResource( String s ) throws MalformedURLException {
- return null;
- }
-
- @Override
- public InputStream getResourceAsStream( String s ) {
- return null;
- }
-
- @Override
- public RequestDispatcher getRequestDispatcher( String s ) {
- return null;
- }
-
- @Override
- public RequestDispatcher getNamedDispatcher( String s ) {
- return null;
- }
-
- @Override
- @SuppressWarnings("deprecation")
- public Servlet getServlet( String s ) throws ServletException {
- return null;
- }
-
- @Override
- @SuppressWarnings("deprecation")
- public Enumeration<Servlet> getServlets() {
- return null;
- }
-
- @Override
- @SuppressWarnings("deprecation")
- public Enumeration<String> getServletNames() {
- return null;
- }
-
- @Override
- public void log( String s ) {
- }
-
- @Override
- @SuppressWarnings("deprecation")
- public void log( Exception e, String s ) {
- }
-
- @Override
- public void log( String s, Throwable throwable ) {
- }
-
- @Override
- public String getRealPath( String s ) {
- return null;
- }
-
- @Override
- public String getServerInfo() {
- return null;
- }
-
- @Override
- public String getInitParameter( String s ) {
- return null;
- }
-
- @Override
- public Enumeration<String> getInitParameterNames() {
- return null;
- }
-
- @Override
- public boolean setInitParameter( String s, String s1 ) {
- return false;
- }
-
- @Override
- public Object getAttribute( String s ) {
- return null;
- }
-
- @Override
- public Enumeration<String> getAttributeNames() {
- return null;
- }
-
- @Override
- public void setAttribute( String s, Object o ) {
- }
-
- @Override
- public void removeAttribute( String s ) {
- }
-
- @Override
- public String getServletContextName() {
- return null;
- }
-
- @Override
- public ServletRegistration.Dynamic addServlet( String s, String s1 ) {
- return null;
- }
-
- @Override
- public ServletRegistration.Dynamic addServlet( String s, Servlet servlet ) {
- return null;
- }
-
- @Override
- public ServletRegistration.Dynamic addServlet( String s, Class<? extends Servlet> aClass ) {
- return null;
- }
-
- @Override
- public <T extends Servlet> T createServlet( Class<T> tClass ) throws ServletException {
- return null;
- }
-
- @Override
- public ServletRegistration getServletRegistration( String s ) {
- return null;
- }
-
- @Override
- public Map<String, ? extends ServletRegistration> getServletRegistrations() {
- return null;
- }
-
- @Override
- public FilterRegistration.Dynamic addFilter( String s, String s1 ) {
- return null;
- }
-
- @Override
- public FilterRegistration.Dynamic addFilter( String s, Filter filter ) {
- return null;
- }
-
- @Override
- public FilterRegistration.Dynamic addFilter( String s, Class<? extends Filter> aClass ) {
- return null;
- }
-
- @Override
- public <T extends Filter> T createFilter( Class<T> tClass ) throws ServletException {
- return null;
- }
-
- @Override
- public FilterRegistration getFilterRegistration( String s ) {
- return null;
- }
-
- @Override
- public Map<String, ? extends FilterRegistration> getFilterRegistrations() {
- return null;
- }
-
- @Override
- public SessionCookieConfig getSessionCookieConfig() {
- return null;
- }
-
- @Override
- public void setSessionTrackingModes( Set<SessionTrackingMode> sessionTrackingModes ) {
- }
-
- @Override
- public Set<SessionTrackingMode> getDefaultSessionTrackingModes() {
- return null;
- }
-
- @Override
- public Set<SessionTrackingMode> getEffectiveSessionTrackingModes() {
- return null;
- }
-
- @Override
- public void addListener( String s ) {
- }
-
- @Override
- public <T extends EventListener> void addListener( T t ) {
- }
-
- @Override
- public void addListener( Class<? extends EventListener> aClass ) {
- }
-
- @Override
- public <T extends EventListener> T createListener( Class<T> tClass ) throws ServletException {
- return null;
- }
-
- @Override
- public JspConfigDescriptor getJspConfigDescriptor() {
- return null;
- }
-
- @Override
- public ClassLoader getClassLoader() {
- return null;
- }
-
- @Override
- public void declareRoles( String... strings ) {
- }
-
- @Override
- public String getVirtualServerName() {
- throw new UnsupportedOperationException();
- }
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServletInputStream.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServletInputStream.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServletInputStream.java
deleted file mode 100644
index 227dc1c..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockServletInputStream.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.mock;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-import javax.servlet.ReadListener;
-import javax.servlet.ServletInputStream;
-
-public class MockServletInputStream extends ServletInputStream {
-
- private InputStream stream;
-
- public MockServletInputStream( InputStream stream ) {
- this.stream = stream;
- }
-
- @Override
- public int read() throws IOException {
- return stream.read();
- }
-
- @Override
- public boolean isFinished() {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public boolean isReady() {
- throw new UnsupportedOperationException();
- }
-
- @Override
- public void setReadListener( ReadListener readListener ) {
- throw new UnsupportedOperationException();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/Console.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/Console.java b/gateway-test-utils/src/main/java/org/apache/knox/test/Console.java
new file mode 100644
index 0000000..0965748
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/Console.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test;
+
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
+
+public class Console {
+
+ PrintStream oldOut, newOut;
+ PrintStream oldErr, newErr;
+ ByteArrayOutputStream newOutBuf, newErrBuf;
+
+ public void capture() {
+ oldErr = System.err;
+ newErrBuf = new ByteArrayOutputStream();
+ newErr = new PrintStream( newErrBuf );
+
+ oldOut = System.out; // I18N not required.
+ newOutBuf = new ByteArrayOutputStream();
+ newOut = new PrintStream( newOutBuf );
+
+ System.setErr( newErr );
+ System.setOut( newOut );
+ }
+
+ public byte[] getOut() {
+ return newOutBuf.toByteArray();
+ }
+
+ public byte[] getErr() {
+ return newErrBuf.toByteArray();
+ }
+
+ public void release() {
+ System.setErr( oldErr );
+ System.setOut( oldOut );
+ newErr.close();
+ newOut.close();
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/TestUtils.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/TestUtils.java b/gateway-test-utils/src/main/java/org/apache/knox/test/TestUtils.java
new file mode 100644
index 0000000..5437ce1
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/TestUtils.java
@@ -0,0 +1,216 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.io.StringWriter;
+import java.net.HttpURLConnection;
+import java.net.InetSocketAddress;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.net.URL;
+import java.nio.ByteBuffer;
+import java.util.Properties;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.log4j.Logger;
+import org.apache.velocity.Template;
+import org.apache.velocity.VelocityContext;
+import org.apache.velocity.app.VelocityEngine;
+import org.apache.velocity.runtime.RuntimeConstants;
+import org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader;
+import org.eclipse.jetty.http.HttpTester;
+import org.eclipse.jetty.servlet.ServletTester;
+
+public class TestUtils {
+
+ private static Logger LOG = Logger.getLogger(TestUtils.class);
+
+ public static final long SHORT_TIMEOUT = 1000L;
+ public static final long MEDIUM_TIMEOUT = 20 * 1000L;
+ public static final long LONG_TIMEOUT = 60 * 1000L;
+
+ public static String getResourceName( Class clazz, String name ) {
+ name = clazz.getName().replaceAll( "\\.", "/" ) + "/" + name;
+ return name;
+ }
+
+ public static URL getResourceUrl( Class clazz, String name ) throws FileNotFoundException {
+ name = getResourceName( clazz, name );
+ URL url = ClassLoader.getSystemResource( name );
+ if( url == null ) {
+ throw new FileNotFoundException( name );
+ }
+ return url;
+ }
+
+ public static URL getResourceUrl( String name ) throws FileNotFoundException {
+ URL url = ClassLoader.getSystemResource( name );
+ if( url == null ) {
+ throw new FileNotFoundException( name );
+ }
+ return url;
+ }
+
+ public static InputStream getResourceStream( String name ) throws IOException {
+ URL url = ClassLoader.getSystemResource( name );
+ InputStream stream = url.openStream();
+ return stream;
+ }
+
+ public static InputStream getResourceStream( Class clazz, String name ) throws IOException {
+ URL url = getResourceUrl( clazz, name );
+ InputStream stream = url.openStream();
+ return stream;
+ }
+
+ public static Reader getResourceReader( String name, String charset ) throws IOException {
+ return new InputStreamReader( getResourceStream( name ), charset );
+ }
+
+ public static Reader getResourceReader( Class clazz, String name, String charset ) throws IOException {
+ return new InputStreamReader( getResourceStream( clazz, name ), charset );
+ }
+
+ public static String getResourceString( Class clazz, String name, String charset ) throws IOException {
+ return IOUtils.toString( getResourceReader( clazz, name, charset ) );
+ }
+
+ public static File createTempDir( String prefix ) throws IOException {
+ File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+ File tempDir = new File( targetDir, prefix + UUID.randomUUID() );
+ FileUtils.forceMkdir( tempDir );
+ return tempDir;
+ }
+
+ public static void LOG_ENTER() {
+ StackTraceElement caller = Thread.currentThread().getStackTrace()[2];
+ System.out.flush();
+ System.out.println( String.format( "Running %s#%s", caller.getClassName(), caller.getMethodName() ) );
+ System.out.flush();
+ }
+
+ public static void LOG_EXIT() {
+ StackTraceElement caller = Thread.currentThread().getStackTrace()[2];
+ System.out.flush();
+ System.out.println( String.format( "Exiting %s#%s", caller.getClassName(), caller.getMethodName() ) );
+ System.out.flush();
+ }
+
+ public static void awaitPortOpen( InetSocketAddress address, int timeout, int delay ) throws InterruptedException {
+ long maxTime = System.currentTimeMillis() + timeout;
+ do {
+ try {
+ Socket socket = new Socket();
+ socket.connect( address, delay );
+ socket.close();
+ return;
+ } catch ( IOException e ) {
+ //e.printStackTrace();
+ }
+ } while( System.currentTimeMillis() < maxTime );
+ throw new IllegalStateException( "Timed out " + timeout + " waiting for port " + address );
+ }
+
+ public static void awaitNon404HttpStatus( URL url, int timeout, int delay ) throws InterruptedException {
+ long maxTime = System.currentTimeMillis() + timeout;
+ do {
+ Thread.sleep( delay );
+ HttpURLConnection conn = null;
+ try {
+ conn = (HttpURLConnection)url.openConnection();
+ conn.getInputStream().close();
+ return;
+ } catch ( IOException e ) {
+ //e.printStackTrace();
+ try {
+ if( conn != null && conn.getResponseCode() != 404 ) {
+ return;
+ }
+ } catch ( IOException ee ) {
+ //ee.printStackTrace();
+ }
+ }
+ } while( System.currentTimeMillis() < maxTime );
+ throw new IllegalStateException( "Timed out " + timeout + " waiting for URL " + url );
+ }
+
+ public static String merge( String resource, Properties properties ) {
+ ClasspathResourceLoader loader = new ClasspathResourceLoader();
+ loader.getResourceStream( resource );
+
+ VelocityEngine engine = new VelocityEngine();
+ Properties config = new Properties();
+ config.setProperty( RuntimeConstants.RUNTIME_LOG_LOGSYSTEM_CLASS, "org.apache.velocity.runtime.log.NullLogSystem" );
+ config.setProperty( RuntimeConstants.RESOURCE_LOADER, "classpath" );
+ config.setProperty( "classpath.resource.loader.class", ClasspathResourceLoader.class.getName() );
+ engine.init( config );
+
+ VelocityContext context = new VelocityContext( properties );
+ Template template = engine.getTemplate( resource );
+ StringWriter writer = new StringWriter();
+ template.merge( context, writer );
+ return writer.toString();
+ }
+
+ public static String merge( Class base, String resource, Properties properties ) {
+ String baseResource = base.getName().replaceAll( "\\.", "/" );
+ String fullResource = baseResource + "/" + resource;
+ return merge( fullResource, properties );
+ }
+
+ public static int findFreePort() throws IOException {
+ ServerSocket socket = new ServerSocket(0);
+ int port = socket.getLocalPort();
+ socket.close();
+ return port;
+ }
+
+ public static void waitUntilNextSecond() {
+ long before = System.currentTimeMillis();
+ long wait;
+ while( ( wait = ( 1000 - ( System.currentTimeMillis() - before ) ) ) > 0 ) {
+ try {
+ Thread.sleep( wait );
+ } catch( InterruptedException e ) {
+ // Ignore.
+ }
+ }
+ }
+
+ public static HttpTester.Response execute( ServletTester server, HttpTester.Request request ) throws Exception {
+ LOG.debug( "execute: request=" + request );
+ ByteBuffer requestBuffer = request.generate();
+ LOG.trace( "execute: requestBuffer=[" + new String(requestBuffer.array(),0,requestBuffer.limit()) + "]" );
+ ByteBuffer responseBuffer = server.getResponses( requestBuffer, 30, TimeUnit.SECONDS );
+ HttpTester.Response response = HttpTester.parseResponse( responseBuffer );
+ LOG.trace( "execute: responseBuffer=[" + new String(responseBuffer.array(),0,responseBuffer.limit()) + "]" );
+ LOG.debug( "execute: reponse=" + response );
+ return response;
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/category/FastTests.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/category/FastTests.java b/gateway-test-utils/src/main/java/org/apache/knox/test/category/FastTests.java
new file mode 100644
index 0000000..2360c17
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/category/FastTests.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.category;
+
+public interface FastTests {
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/category/ManualTests.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/category/ManualTests.java b/gateway-test-utils/src/main/java/org/apache/knox/test/category/ManualTests.java
new file mode 100644
index 0000000..0065357
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/category/ManualTests.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.category;
+
+public interface ManualTests {
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/category/MediumTests.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/category/MediumTests.java b/gateway-test-utils/src/main/java/org/apache/knox/test/category/MediumTests.java
new file mode 100644
index 0000000..f5d354b
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/category/MediumTests.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.category;
+
+public interface MediumTests {
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/category/ReleaseTest.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/category/ReleaseTest.java b/gateway-test-utils/src/main/java/org/apache/knox/test/category/ReleaseTest.java
new file mode 100644
index 0000000..6e2279e
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/category/ReleaseTest.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.category;
+
+public interface ReleaseTest {
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/category/SlowTests.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/category/SlowTests.java b/gateway-test-utils/src/main/java/org/apache/knox/test/category/SlowTests.java
new file mode 100644
index 0000000..3f0b50c
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/category/SlowTests.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.category;
+
+public interface SlowTests {
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/category/UnitTests.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/category/UnitTests.java b/gateway-test-utils/src/main/java/org/apache/knox/test/category/UnitTests.java
new file mode 100644
index 0000000..0d91e00
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/category/UnitTests.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.category;
+
+public interface UnitTests {
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/category/VerifyTest.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/category/VerifyTest.java b/gateway-test-utils/src/main/java/org/apache/knox/test/category/VerifyTest.java
new file mode 100644
index 0000000..825c08f
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/category/VerifyTest.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.category;
+
+public interface VerifyTest {
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/log/CollectAppender.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/log/CollectAppender.java b/gateway-test-utils/src/main/java/org/apache/knox/test/log/CollectAppender.java
new file mode 100644
index 0000000..3ab0c93
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/log/CollectAppender.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.log;
+
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import org.apache.log4j.AppenderSkeleton;
+import org.apache.log4j.spi.LoggingEvent;
+
+public class CollectAppender extends AppenderSkeleton {
+
+ public CollectAppender() {
+ super();
+ }
+
+ public static BlockingQueue<LoggingEvent> queue = new LinkedBlockingQueue<LoggingEvent>();
+ public static boolean closed = false;
+
+ @Override
+ protected void append( LoggingEvent event ) {
+ event.getProperties();
+ queue.add( event );
+ }
+
+ @Override
+ public void close() {
+ closed = true;
+ }
+
+ @Override
+ public boolean requiresLayout() {
+ return false;
+ }
+
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/log/NoOpAppender.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/log/NoOpAppender.java b/gateway-test-utils/src/main/java/org/apache/knox/test/log/NoOpAppender.java
new file mode 100644
index 0000000..80a7fce
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/log/NoOpAppender.java
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.log;
+
+import org.apache.log4j.Appender;
+import org.apache.log4j.Layout;
+import org.apache.log4j.Logger;
+import org.apache.log4j.spi.ErrorHandler;
+import org.apache.log4j.spi.Filter;
+import org.apache.log4j.spi.LoggingEvent;
+
+import java.util.Enumeration;
+
+public class NoOpAppender implements Appender {
+
+ public static Enumeration<Appender> setUp() {
+ Enumeration<Appender> appenders = (Enumeration<Appender>)Logger.getRootLogger().getAllAppenders();
+ Logger.getRootLogger().removeAllAppenders();
+ Logger.getRootLogger().addAppender( new NoOpAppender() );
+ return appenders;
+ }
+
+ public static void tearDown( Enumeration<Appender> appenders ) {
+ if( appenders != null ) {
+ while( appenders.hasMoreElements() ) {
+ Logger.getRootLogger().addAppender( appenders.nextElement() );
+ }
+ }
+ }
+
+ @Override
+ public void addFilter( Filter newFilter ) {
+ }
+
+ @Override
+ public Filter getFilter() {
+ return null;
+ }
+
+ @Override
+ public void clearFilters() {
+ }
+
+ @Override
+ public void close() {
+ }
+
+ @Override
+ public void doAppend( LoggingEvent event ) {
+ }
+
+ @Override
+ public String getName() {
+ return this.getClass().getName();
+ }
+
+ @Override
+ public void setErrorHandler( ErrorHandler errorHandler ) {
+ }
+
+ @Override
+ public ErrorHandler getErrorHandler() {
+ return null;
+ }
+
+ @Override
+ public void setLayout( Layout layout ) {
+ }
+
+ @Override
+ public Layout getLayout() {
+ return null;
+ }
+
+ @Override
+ public void setName( String name ) {
+ }
+
+ @Override
+ public boolean requiresLayout() {
+ return false;
+ }
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/log/NoOpLogger.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/log/NoOpLogger.java b/gateway-test-utils/src/main/java/org/apache/knox/test/log/NoOpLogger.java
new file mode 100644
index 0000000..2c6763f
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/log/NoOpLogger.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.log;
+
+import org.eclipse.jetty.util.log.Logger;
+
+public class NoOpLogger implements Logger {
+
+ @Override
+ public String getName() {
+ return "";
+ }
+
+ @Override
+ public void warn( String msg, Object... args ) {
+ }
+
+ @Override
+ public void warn( Throwable thrown ) {
+ }
+
+ @Override
+ public void warn( String msg, Throwable thrown ) {
+ }
+
+ @Override
+ public void info( String msg, Object... args ) {
+ }
+
+ @Override
+ public void info( Throwable thrown ) {
+ }
+
+ @Override
+ public void info( String msg, Throwable thrown ) {
+ }
+
+ @Override
+ public boolean isDebugEnabled() {
+ return false;
+ }
+
+ @Override
+ public void setDebugEnabled( boolean enabled ) {
+ }
+
+ @Override
+ public void debug( String msg, Object... args ) {
+ }
+
+ @Override
+ public void debug( String msg, long arg ) {
+ }
+
+ @Override
+ public void debug( Throwable thrown ) {
+ }
+
+ @Override
+ public void debug( String msg, Throwable thrown ) {
+ }
+
+ @Override
+ public Logger getLogger( String name ) {
+ return this;
+ }
+
+ @Override
+ public void ignore( Throwable ignored ) {
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockFilterConfig.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockFilterConfig.java b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockFilterConfig.java
new file mode 100644
index 0000000..39a3625
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockFilterConfig.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.mock;
+
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import java.util.Enumeration;
+
+public class MockFilterConfig implements FilterConfig {
+
+ @Override
+ public String getFilterName() {
+ return null;
+ }
+
+ @Override
+ public ServletContext getServletContext() {
+ return null;
+ }
+
+ @Override
+ public String getInitParameter( String s ) {
+ return null;
+ }
+
+ @Override
+ public Enumeration<String> getInitParameterNames() {
+ return null;
+ }
+
+}
[09/25] knox git commit: KNOX-1039 - Added admin APIs for managing
shared provider configurations and descriptors (Phil Zampino via Sandeep
More)
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/9ad9bcdb/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
index 9ddc469..c5acb50 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
@@ -23,8 +23,10 @@ import java.io.IOException;
import java.io.StringReader;
import java.net.URI;
import java.net.URISyntaxException;
+import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.ws.rs.core.MediaType;
@@ -32,6 +34,9 @@ import javax.ws.rs.core.MediaType;
import io.restassured.http.ContentType;
import com.mycila.xmltool.XMLDoc;
import com.mycila.xmltool.XMLTag;
+import io.restassured.response.ResponseBody;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.FilenameUtils;
import org.apache.hadoop.gateway.config.GatewayConfig;
import org.apache.hadoop.gateway.services.DefaultGatewayServices;
import org.apache.hadoop.gateway.services.GatewayServices;
@@ -55,6 +60,7 @@ import org.w3c.dom.Document;
import org.xml.sax.InputSource;
import static io.restassured.RestAssured.given;
+import static junit.framework.TestCase.assertTrue;
import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.containsString;
@@ -64,6 +70,8 @@ import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.xml.HasXPath.hasXPath;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
@@ -108,6 +116,12 @@ public class GatewayAdminTopologyFuncTest {
File deployDir = new File( testConfig.getGatewayDeploymentDir() );
deployDir.mkdirs();
+ File providerConfigDir = new File(testConfig.getGatewayConfDir(), "shared-providers");
+ providerConfigDir.mkdirs();
+
+ File descriptorsDir = new File(testConfig.getGatewayConfDir(), "descriptors");
+ descriptorsDir.mkdirs();
+
File descriptor = new File( topoDir, "admin.xml" );
FileOutputStream stream = new FileOutputStream( descriptor );
createKnoxTopology().toStream( stream );
@@ -229,6 +243,84 @@ public class GatewayAdminTopologyFuncTest {
return xml;
}
+ private static XMLTag createProviderConfiguration() {
+ XMLTag xml = XMLDoc.newDocument( true )
+ .addRoot( "gateway" )
+ .addTag( "provider" )
+ .addTag( "role" ).addText( "authentication" )
+ .addTag( "name" ).addText( "ShiroProvider" )
+ .addTag( "enabled" ).addText( "true" )
+ .addTag( "param" )
+ .addTag( "name" ).addText( "main.ldapRealm" )
+ .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
+ .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
+ .addTag( "value" ).addText( driver.getLdapUrl() ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
+ .addTag( "value" ).addText( "simple" ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "urls./**" )
+ .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
+ .addTag("provider")
+ .addTag( "role" ).addText( "authorization" )
+ .addTag( "name" ).addText( "AclsAuthz" )
+ .addTag( "enabled" ).addText( "true" )
+ .addTag("param")
+ .addTag("name").addText("knox.acl")
+ .addTag("value").addText("admin;*;*").gotoParent().gotoParent()
+ .addTag("provider")
+ .addTag( "role" ).addText( "identity-assertion" )
+ .addTag( "enabled" ).addText( "true" )
+ .addTag( "name" ).addText( "Default" ).gotoParent()
+ .gotoRoot();
+ // System.out.println( "GATEWAY=" + xml.toString() );
+ return xml;
+ }
+
+
+ private static String createDescriptor(String clusterName) {
+ return createDescriptor(clusterName, null);
+ }
+
+
+ private static String createDescriptor(String clusterName, String providerConfigRef) {
+ StringBuilder sb = new StringBuilder();
+ if (providerConfigRef == null) {
+ providerConfigRef = "sandbox-providers";
+ }
+
+ sb.append("{\n");
+ sb.append(" \"discovery-type\":\"AMBARI\",\n");
+ sb.append(" \"discovery-address\":\"http://c6401.ambari.apache.org:8080\",\n");
+ sb.append(" \"discovery-user\":\"ambariuser\",\n");
+ sb.append(" \"discovery-pwd-alias\":\"ambari.discovery.password\",\n");
+ sb.append(" \"provider-config-ref\":\"");
+ sb.append(providerConfigRef);
+ sb.append("\",\n");
+ sb.append(" \"cluster\":\"");
+ sb.append(clusterName);
+ sb.append("\",\n");
+ sb.append(" \"services\":[\n");
+ sb.append(" {\"name\":\"NAMENODE\"},\n");
+ sb.append(" {\"name\":\"JOBTRACKER\"},\n");
+ sb.append(" {\"name\":\"WEBHDFS\"},\n");
+ sb.append(" {\"name\":\"WEBHCAT\"},\n");
+ sb.append(" {\"name\":\"OOZIE\"},\n");
+ sb.append(" {\"name\":\"WEBHBASE\"},\n");
+ sb.append(" {\"name\":\"HIVE\"},\n");
+ sb.append(" {\"name\":\"RESOURCEMANAGER\"},\n");
+ sb.append(" {\"name\":\"AMBARI\", \"urls\":[\"http://c6401.ambari.apache.org:8080\"]}\n");
+ sb.append(" ]\n");
+ sb.append("}\n");
+
+ return sb.toString();
+ }
+
+
//@Test
public void waitForManualTesting() throws IOException {
System.in.read();
@@ -795,4 +887,498 @@ public class GatewayAdminTopologyFuncTest {
LOG_EXIT();
}
+
+ @Test( timeout = TestUtils.LONG_TIMEOUT )
+ public void testProviderConfigurationCollection() throws Exception {
+ LOG_ENTER();
+
+ final String username = "admin";
+ final String password = "admin-password";
+ final String serviceUrl = clusterUrl + "/api/v1/providerconfig";
+
+ final File sharedProvidersDir = new File(config.getGatewayConfDir(), "shared-providers");
+ final List<String> configNames = Arrays.asList("sandbox-providers", "custom-providers");
+ final List<String> configFileNames = Arrays.asList(configNames.get(0) + ".xml", configNames.get(1) + ".xml");
+
+ // Request a listing of all the provider configs with an INCORRECT Accept header
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .then()
+ .statusCode(HttpStatus.SC_NOT_ACCEPTABLE)
+ .when().get(serviceUrl);
+
+ // Request a listing of all the provider configs (with the CORRECT Accept header)
+ ResponseBody responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().get(serviceUrl).body();
+ List<String> items = responseBody.path("items");
+ assertTrue("Expected no items since the shared-providers dir is empty.", items.isEmpty());
+
+ // Manually write a file to the shared-providers directory
+ File providerConfig = new File(sharedProvidersDir, configFileNames.get(0));
+ FileOutputStream stream = new FileOutputStream(providerConfig);
+ createProviderConfiguration().toStream(stream);
+ stream.close();
+
+ // Request a listing of all the provider configs
+ responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().get(serviceUrl).body();
+ items = responseBody.path("items");
+ assertEquals("Expected items to include the new file in the shared-providers dir.", 1, items.size());
+ assertEquals(configFileNames.get(0), responseBody.path("items[0].name"));
+ String href1 = responseBody.path("items[0].href");
+
+ // Manually write another file to the shared-providers directory
+ File anotherProviderConfig = new File(sharedProvidersDir, configFileNames.get(1));
+ stream = new FileOutputStream(anotherProviderConfig);
+ createProviderConfiguration().toStream(stream);
+ stream.close();
+
+ // Request a listing of all the provider configs
+ responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().get(serviceUrl).body();
+ items = responseBody.path("items");
+ assertEquals(2, items.size());
+ String pcOne = responseBody.path("items[0].name");
+ String pcTwo = responseBody.path("items[1].name");
+ assertTrue(configFileNames.contains(pcOne));
+ assertTrue(configFileNames.contains(pcTwo));
+
+ // Request a specific provider configuration with an INCORRECT Accept header
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_NOT_ACCEPTABLE)
+ .when().get(href1).body();
+
+ // Request a specific provider configuration (with the CORRECT Accept header)
+ responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_XML)
+ .when().get(href1).body();
+ String sandboxProvidersConfigContent = responseBody.asString();
+
+ // Parse the result, to make sure it's at least valid XML
+ XmlUtils.readXml(new InputSource(new StringReader(sandboxProvidersConfigContent)));
+
+ providerConfig.delete();
+ anotherProviderConfig.delete();
+
+ // Request a specific provider configuration, which does NOT exist
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .then()
+ .statusCode(HttpStatus.SC_NOT_FOUND)
+ .when().get(serviceUrl + "/not-a-real-provider-config");
+
+ LOG_EXIT();
+ }
+
+
+ @Test( timeout = TestUtils.LONG_TIMEOUT )
+ public void testPutProviderConfiguration() throws Exception {
+ LOG_ENTER();
+
+ final String username = "admin";
+ final String password = "admin-password";
+ final String serviceUrl = clusterUrl + "/api/v1/providerconfig";
+
+ final String newProviderConfigName = "new-provider-config";
+ final String newProviderConfigFileName = newProviderConfigName + ".xml";
+
+ XMLTag newProviderConfigXML = createProviderConfiguration();
+
+ // Attempt to PUT a provider config with an INCORRECT Content-type header
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Content-type", MediaType.APPLICATION_JSON)
+ .body(newProviderConfigXML.toBytes("utf-8"))
+ .then()
+ .statusCode(HttpStatus.SC_UNSUPPORTED_MEDIA_TYPE)
+ .when().put(serviceUrl + "/" + newProviderConfigName);
+
+ // Attempt to PUT a provider config with the CORRECT Content-type header
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Content-type", MediaType.APPLICATION_XML)
+ .body(newProviderConfigXML.toBytes("utf-8"))
+ .then()
+ .statusCode(HttpStatus.SC_CREATED)
+ .when().put(serviceUrl + "/" + newProviderConfigName);
+
+ // Verify that the provider configuration was written to the expected location
+ File newProviderConfigFile =
+ new File(new File(config.getGatewayConfDir(), "shared-providers"), newProviderConfigFileName);
+ assertTrue(newProviderConfigFile.exists());
+
+ // Request a listing of all the provider configs to further verify the PUT
+ ResponseBody responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().get(serviceUrl).body();
+ List<String> items = responseBody.path("items");
+ assertEquals(1, items.size());
+ assertEquals(newProviderConfigFileName, responseBody.path("items[0].name"));
+ String href = responseBody.path("items[0].href");
+
+ // Get the new provider config content
+ responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_XML)
+ .when().get(href).body();
+ String configContent = responseBody.asString();
+
+ // Parse the result, to make sure it's at least valid XML
+ XmlUtils.readXml(new InputSource(new StringReader(configContent)));
+
+ // Manually delete the provider config
+ newProviderConfigFile.delete();
+
+ LOG_EXIT();
+ }
+
+
+ @Test( timeout = TestUtils.LONG_TIMEOUT )
+ public void testDeleteProviderConfiguration() throws Exception {
+ LOG_ENTER();
+
+ final String username = "admin";
+ final String password = "admin-password";
+ final String serviceUrl = clusterUrl + "/api/v1/providerconfig";
+
+ final File sharedProvidersDir = new File(config.getGatewayConfDir(), "shared-providers");
+
+ // Manually add two provider config files to the shared-providers directory
+ File providerConfigOneFile = new File(sharedProvidersDir, "deleteme-one-config.xml");
+ FileOutputStream stream = new FileOutputStream(providerConfigOneFile);
+ createProviderConfiguration().toStream(stream);
+ stream.close();
+ assertTrue(providerConfigOneFile.exists());
+
+ File providerConfigTwoFile = new File(sharedProvidersDir, "deleteme-two-config.xml");
+ stream = new FileOutputStream(providerConfigTwoFile);
+ createProviderConfiguration().toStream(stream);
+ stream.close();
+ assertTrue(providerConfigTwoFile.exists());
+
+ // Request a listing of all the provider configs
+ ResponseBody responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().get(serviceUrl).body();
+ List<String> items = responseBody.path("items");
+ assertEquals(2, items.size());
+ String name1 = responseBody.path("items[0].name");
+ String href1 = responseBody.path("items[0].href");
+ String name2 = responseBody.path("items[1].name");
+ String href2 = responseBody.path("items[1].href");
+
+ // Delete one of the provider configs
+ responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().delete(href1).body();
+ String deletedMsg = responseBody.path("deleted");
+ assertEquals("provider config " + FilenameUtils.getBaseName(name1), deletedMsg);
+ assertFalse((new File(sharedProvidersDir, name1).exists()));
+
+ assertTrue((new File(sharedProvidersDir, name2).exists()));
+ // Delete the other provider config
+ responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().delete(href2).body();
+ deletedMsg = responseBody.path("deleted");
+ assertEquals("provider config " + FilenameUtils.getBaseName(name2), deletedMsg);
+ assertFalse((new File(sharedProvidersDir, name2).exists()));
+
+ // Attempt to delete a provider config that does not exist
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .when().delete(serviceUrl + "/does-not-exist");
+
+ LOG_EXIT();
+ }
+
+
+ @Test( timeout = TestUtils.LONG_TIMEOUT )
+ public void testDescriptorCollection() throws Exception {
+ LOG_ENTER();
+
+ final String username = "admin";
+ final String password = "admin-password";
+ final String serviceUrl = clusterUrl + "/api/v1/descriptors";
+
+ final File descriptorsDir = new File(config.getGatewayConfDir(), "descriptors");
+ final List<String> clusterNames = Arrays.asList("clusterOne", "clusterTwo");
+ final List<String> descriptorNames = Arrays.asList("test-descriptor-one", "test-descriptor-two");
+ final List<String> descriptorFileNames = Arrays.asList(descriptorNames.get(0) + ".json",
+ descriptorNames.get(1) + ".json");
+
+ // Request a listing of all the descriptors with an INCORRECT Accept header
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .then()
+ .statusCode(HttpStatus.SC_NOT_ACCEPTABLE)
+ .when().get(serviceUrl);
+
+ // Request a listing of all the descriptors (with the CORRECT Accept header)
+ ResponseBody responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().get(serviceUrl).body();
+ List<String> items = responseBody.path("items");
+ assertTrue("Expected no items since the descriptors dir is empty.", items.isEmpty());
+
+ // Manually write a file to the descriptors directory
+ File descriptorOneFile = new File(descriptorsDir, descriptorFileNames.get(0));
+ FileUtils.write(descriptorOneFile, createDescriptor(clusterNames.get(0)));
+
+ // Request a listing of all the descriptors
+ responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().get(serviceUrl).body();
+ items = responseBody.path("items");
+ assertEquals("Expected items to include the new file in the shared-providers dir.", 1, items.size());
+ assertEquals(descriptorFileNames.get(0), responseBody.path("items[0].name"));
+ String href1 = responseBody.path("items[0].href");
+
+ // Manually write another file to the descriptors directory
+ File descriptorTwoFile = new File(descriptorsDir, descriptorFileNames.get(1));
+ FileUtils.write(descriptorTwoFile, createDescriptor(clusterNames.get(1)));
+
+ // Request a listing of all the descriptors
+ responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().get(serviceUrl).body();
+ items = responseBody.path("items");
+ assertEquals(2, items.size());
+ String descOne = responseBody.path("items[0].name");
+ String descTwo = responseBody.path("items[1].name");
+ assertTrue(descriptorFileNames.contains(descOne));
+ assertTrue(descriptorFileNames.contains(descTwo));
+
+ // Request a specific descriptor with an INCORRECT Accept header
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .then()
+ .statusCode(HttpStatus.SC_NOT_ACCEPTABLE)
+ .when().get(href1).body();
+
+ // Request a specific descriptor (with the CORRECT Accept header)
+ responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().get(href1).body();
+ String cluster = responseBody.path("cluster");
+ assertEquals(cluster, clusterNames.get(0));
+
+ // Request a specific descriptor, which does NOT exist
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_NOT_FOUND)
+ .when().get(serviceUrl + "/not-a-real-descriptor").body();
+
+ descriptorOneFile.delete();
+ descriptorTwoFile.delete();
+
+ LOG_EXIT();
+ }
+
+
+ @Test( timeout = TestUtils.LONG_TIMEOUT )
+ public void testPutDescriptor() throws Exception {
+ LOG_ENTER();
+
+ final String username = "admin";
+ final String password = "admin-password";
+ final String serviceUrl = clusterUrl + "/api/v1/descriptors";
+
+ final String clusterName = "test-cluster";
+ final String newDescriptorName = "new-descriptor";
+ final String newDescriptorFileName = newDescriptorName + ".json";
+
+ String newDescriptorJSON = createDescriptor(clusterName);
+
+ // Attempt to PUT a descriptor with an INCORRECT Content-type header
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Content-type", MediaType.APPLICATION_XML)
+ .body(newDescriptorJSON.getBytes("utf-8"))
+ .then()
+ .statusCode(HttpStatus.SC_UNSUPPORTED_MEDIA_TYPE)
+ .when().put(serviceUrl + "/" + newDescriptorName);
+
+ // Attempt to PUT a descriptor with the CORRECT Content-type header
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Content-type", MediaType.APPLICATION_JSON)
+ .body(newDescriptorJSON.getBytes("utf-8"))
+ .then()
+ .statusCode(HttpStatus.SC_CREATED)
+ .when().put(serviceUrl + "/" + newDescriptorName);
+
+ // Verify that the descriptor was written to the expected location
+ File newDescriptorFile =
+ new File(new File(config.getGatewayConfDir(), "descriptors"), newDescriptorFileName);
+ assertTrue(newDescriptorFile.exists());
+
+ // Request a listing of all the descriptors to verify the PUT
+ ResponseBody responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().get(serviceUrl).body();
+ List<String> items = responseBody.path("items");
+ assertEquals(1, items.size());
+ assertEquals(newDescriptorFileName, responseBody.path("items[0].name"));
+ String href = responseBody.path("items[0].href");
+
+ // Get the new descriptor content
+ responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().get(href).body();
+ String cluster = responseBody.path("cluster");
+ assertEquals(clusterName, cluster);
+
+ // Manually delete the descriptor
+ newDescriptorFile.delete();
+
+ LOG_EXIT();
+ }
+
+
+ @Test( timeout = TestUtils.LONG_TIMEOUT )
+ public void testDeleteDescriptor() throws Exception {
+ LOG_ENTER();
+
+ final String username = "admin";
+ final String password = "admin-password";
+ final String serviceUrl = clusterUrl + "/api/v1/descriptors";
+
+ final File descriptorsDir = new File(config.getGatewayConfDir(), "descriptors");
+
+ // Manually add two descriptor files to the descriptors directory
+ File descriptorOneFile = new File(descriptorsDir, "deleteme-one.json");
+ FileUtils.writeStringToFile(descriptorOneFile, createDescriptor("clusterOne"));
+ assertTrue(descriptorOneFile.exists());
+
+ File descriptorTwoFile = new File(descriptorsDir, "deleteme-two.json");
+ FileUtils.writeStringToFile(descriptorTwoFile, createDescriptor("clusterTwo"));
+ assertTrue(descriptorTwoFile.exists());
+
+ // Request a listing of all the descriptors
+ ResponseBody responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().get(serviceUrl).body();
+ List<String> items = responseBody.path("items");
+ assertEquals(2, items.size());
+ String name1 = responseBody.path("items[0].name");
+ String href1 = responseBody.path("items[0].href");
+ String name2 = responseBody.path("items[1].name");
+ String href2 = responseBody.path("items[1].href");
+
+ // Delete one of the descriptors
+ responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().delete(href1).body();
+ String deletedMsg = responseBody.path("deleted");
+ assertEquals("descriptor " + FilenameUtils.getBaseName(name1), deletedMsg);
+ assertFalse((new File(descriptorsDir, name1).exists()));
+
+ assertTrue((new File(descriptorsDir, name2).exists()));
+ // Delete the other descriptor
+ responseBody = given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().delete(href2).body();
+ deletedMsg = responseBody.path("deleted");
+ assertEquals("descriptor " + FilenameUtils.getBaseName(name2), deletedMsg);
+ assertFalse((new File(descriptorsDir, name2).exists()));
+
+ // Attempt to delete a descriptor that does not exist
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .when().delete(serviceUrl + "/does-not-exist");
+
+ LOG_EXIT();
+ }
+
+
}
[25/25] knox git commit: KNOX-998 - Some more refactoring
Posted by mo...@apache.org.
KNOX-998 - Some more refactoring
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/1451428f
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/1451428f
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/1451428f
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 1451428f7cca88758e8163276cb5f6a33c1f812d
Parents: 46109ad
Author: Sandeep More <mo...@apache.org>
Authored: Thu Nov 2 14:47:28 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Thu Nov 2 14:47:28 2017 -0400
----------------------------------------------------------------------
.../gateway/i18n/messages/MessagesTest.java | 4 +-
.../gateway/i18n/resources/ResourcesTest.java | 4 +-
...yAssertionHttpServletRequestWrapperTest.java | 8 +-
.../function/UsernameFunctionProcessorTest.java | 8 +-
.../impl/HostmapFunctionProcessorTest.java | 2 +-
.../impl/ServiceRegistryFunctionsTest.java | 8 +-
.../UrlRewriteServletContextListenerTest.java | 4 +-
.../api/UrlRewriteServletFilterTest.java | 8 +-
.../impl/FrontendFunctionProcessorTest.java | 8 +-
.../rewrite/impl/json/JsonFilterReaderTest.java | 2 +-
.../rewrite/impl/xml/XmlFilterReaderTest.java | 2 +-
.../apache/knox/gateway/AuditLoggingTest.java | 2 +-
.../apache/knox/gateway/GatewayFilterTest.java | 4 +-
.../knox/gateway/GatewayGlobalConfigTest.java | 2 +-
.../org/apache/knox/gateway/TempletonDemo.java | 4 +-
.../config/impl/GatewayConfigImplTest.java | 2 +-
.../gateway/deploy/DeploymentFactoryTest.java | 2 +-
.../xml/XmlGatewayDescriptorExporterTest.java | 2 +-
.../knox/gateway/jetty/SslSocketTest.java | 4 +-
.../knox/gateway/mock/MockConsoleFactory.java | 2 +-
.../services/security/CryptoServiceTest.java | 4 +-
.../topology/DefaultTopologyServiceTest.java | 2 +-
.../builder/PropertyTopologyBuilderTest.java | 2 +-
.../validation/TopologyValidatorTest.java | 2 +-
.../topology/xml/TopologyRulesModuleTest.java | 2 +-
.../knox/gateway/websockets/BadUrlTest.java | 2 +-
.../gateway/websockets/WebsocketEchoTest.java | 2 +-
.../WebsocketMultipleConnectionTest.java | 2 +-
.../src/test/resources/log4j.properties | 2 +-
.../knox/gateway/hbase/HBaseDispatchTest.java | 6 +-
.../gateway/dispatch/DefaultDispatchTest.java | 6 +-
.../security/principal/PrincipalMapperTest.java | 4 +-
.../hostmap/FileBasedHostMapperTest.java | 2 +-
.../security/impl/CMFKeystoreServiceTest.java | 4 +-
.../security/impl/CMFMasterServiceTest.java | 4 +-
.../apache/knox/gateway/GatewayTestDriver.java | 2 +-
.../apache/knox/gateway/SecureClusterTest.java | 4 +-
.../java/org/apache/knox/gateway/ShellTest.java | 4 +-
.../java/org/apache/hadoop/test/Console.java | 57 --
.../java/org/apache/hadoop/test/TestUtils.java | 216 -----
.../apache/hadoop/test/category/FastTests.java | 21 -
.../hadoop/test/category/ManualTests.java | 21 -
.../hadoop/test/category/MediumTests.java | 21 -
.../hadoop/test/category/ReleaseTest.java | 21 -
.../apache/hadoop/test/category/SlowTests.java | 21 -
.../apache/hadoop/test/category/UnitTests.java | 21 -
.../apache/hadoop/test/category/VerifyTest.java | 21 -
.../apache/hadoop/test/log/CollectAppender.java | 51 --
.../apache/hadoop/test/log/NoOpAppender.java | 98 ---
.../org/apache/hadoop/test/log/NoOpLogger.java | 87 --
.../hadoop/test/mock/MockFilterConfig.java | 46 -
.../test/mock/MockHttpServletRequest.java | 410 ---------
.../test/mock/MockHttpServletResponse.java | 195 -----
.../hadoop/test/mock/MockInteraction.java | 33 -
.../hadoop/test/mock/MockRequestMatcher.java | 330 --------
.../hadoop/test/mock/MockResponseProvider.java | 158 ----
.../org/apache/hadoop/test/mock/MockServer.java | 119 ---
.../apache/hadoop/test/mock/MockServlet.java | 61 --
.../hadoop/test/mock/MockServletContext.java | 293 -------
.../test/mock/MockServletInputStream.java | 54 --
.../main/java/org/apache/knox/test/Console.java | 57 ++
.../java/org/apache/knox/test/TestUtils.java | 216 +++++
.../apache/knox/test/category/FastTests.java | 21 +
.../apache/knox/test/category/ManualTests.java | 21 +
.../apache/knox/test/category/MediumTests.java | 21 +
.../apache/knox/test/category/ReleaseTest.java | 21 +
.../apache/knox/test/category/SlowTests.java | 21 +
.../apache/knox/test/category/UnitTests.java | 21 +
.../apache/knox/test/category/VerifyTest.java | 21 +
.../apache/knox/test/log/CollectAppender.java | 51 ++
.../org/apache/knox/test/log/NoOpAppender.java | 98 +++
.../org/apache/knox/test/log/NoOpLogger.java | 87 ++
.../apache/knox/test/mock/MockFilterConfig.java | 46 +
.../knox/test/mock/MockHttpServletRequest.java | 410 +++++++++
.../knox/test/mock/MockHttpServletResponse.java | 195 +++++
.../apache/knox/test/mock/MockInteraction.java | 33 +
.../knox/test/mock/MockRequestMatcher.java | 330 ++++++++
.../knox/test/mock/MockResponseProvider.java | 157 ++++
.../org/apache/knox/test/mock/MockServer.java | 119 +++
.../org/apache/knox/test/mock/MockServlet.java | 61 ++
.../knox/test/mock/MockServletContext.java | 293 +++++++
.../knox/test/mock/MockServletInputStream.java | 54 ++
.../gateway/AmbariServiceDefinitionTest.java | 8 +-
.../knox/gateway/GatewayAdminFuncTest.java | 2 +-
.../gateway/GatewayAdminTopologyFuncTest.java | 8 +-
.../apache/knox/gateway/GatewayAppFuncTest.java | 10 +-
.../knox/gateway/GatewayBasicFuncTest.java | 12 +-
.../knox/gateway/GatewayDeployFuncTest.java | 10 +-
.../knox/gateway/GatewayHealthFuncTest.java | 2 +-
.../GatewayLdapDynamicGroupFuncTest.java | 6 +-
.../knox/gateway/GatewayLdapGroupFuncTest.java | 7 +-
.../gateway/GatewayLdapPosixGroupFuncTest.java | 11 +-
.../gateway/GatewayLocalServiceFuncTest.java | 10 +-
.../knox/gateway/GatewayMultiFuncTest.java | 11 +-
.../GatewayPortMappingDisableFeatureTest.java | 10 +-
.../gateway/GatewayPortMappingFailTest.java | 10 +-
.../gateway/GatewayPortMappingFuncTest.java | 10 +-
.../knox/gateway/GatewaySampleFuncTest.java | 8 +-
.../apache/knox/gateway/GatewaySslFuncTest.java | 14 +-
.../apache/knox/gateway/Knox242FuncTest.java | 8 +-
.../gateway/KnoxCliLdapFuncTestNegative.java | 8 +-
.../gateway/KnoxCliLdapFuncTestPositive.java | 10 +-
.../apache/knox/gateway/KnoxCliSysBindTest.java | 10 +-
.../gateway/OozieServiceDefinitionTest.java | 8 +-
.../apache/knox/gateway/WebHdfsHaFuncTest.java | 10 +-
.../deploy/DeploymentFactoryFuncTest.java | 8 +-
.../knox/gateway/audit/AuditLayoutTest.java | 2 +-
.../knox/gateway/audit/AuditServiceTest.java | 2 +-
.../audit/StoreAndForwardAppenderTest.java | 2 +-
.../src/test/resources/audit-log4j.properties | 2 +-
.../gateway/util/urltemplate/ExpanderTest.java | 4 +-
.../gateway/util/urltemplate/MatcherTest.java | 4 +-
.../util/urltemplate/MatcherTest.java.orig | 839 -------------------
.../gateway/util/urltemplate/ParserTest.java | 4 +-
.../gateway/util/urltemplate/RewriterTest.java | 4 +-
.../gateway/util/urltemplate/SegmentTest.java | 4 +-
.../gateway/util/urltemplate/TemplateTest.java | 4 +-
pom.xml | 8 +-
118 files changed, 2536 insertions(+), 3401 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-i18n/src/test/java/org/apache/knox/gateway/i18n/messages/MessagesTest.java
----------------------------------------------------------------------
diff --git a/gateway-i18n/src/test/java/org/apache/knox/gateway/i18n/messages/MessagesTest.java b/gateway-i18n/src/test/java/org/apache/knox/gateway/i18n/messages/MessagesTest.java
index d53e99d..8a9c42e 100644
--- a/gateway-i18n/src/test/java/org/apache/knox/gateway/i18n/messages/MessagesTest.java
+++ b/gateway-i18n/src/test/java/org/apache/knox/gateway/i18n/messages/MessagesTest.java
@@ -20,8 +20,8 @@ package org.apache.knox.gateway.i18n.messages;
import org.apache.knox.gateway.i18n.messages.loggers.test.TestMessageLogger;
import org.apache.knox.gateway.i18n.messages.loggers.test.TestMessageLoggerFactory;
import org.apache.knox.gateway.i18n.messages.loggers.test.TestMessageRecord;
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
+import org.apache.knox.test.category.FastTests;
+import org.apache.knox.test.category.UnitTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-i18n/src/test/java/org/apache/knox/gateway/i18n/resources/ResourcesTest.java
----------------------------------------------------------------------
diff --git a/gateway-i18n/src/test/java/org/apache/knox/gateway/i18n/resources/ResourcesTest.java b/gateway-i18n/src/test/java/org/apache/knox/gateway/i18n/resources/ResourcesTest.java
index a876b89..d54f379 100644
--- a/gateway-i18n/src/test/java/org/apache/knox/gateway/i18n/resources/ResourcesTest.java
+++ b/gateway-i18n/src/test/java/org/apache/knox/gateway/i18n/resources/ResourcesTest.java
@@ -17,8 +17,8 @@
*/
package org.apache.knox.gateway.i18n.resources;
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
+import org.apache.knox.test.category.FastTests;
+import org.apache.knox.test.category.UnitTests;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-provider-identity-assertion-common/src/test/java/org/apache/knox/gateway/identityasserter/filter/IdentityAssertionHttpServletRequestWrapperTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-identity-assertion-common/src/test/java/org/apache/knox/gateway/identityasserter/filter/IdentityAssertionHttpServletRequestWrapperTest.java b/gateway-provider-identity-assertion-common/src/test/java/org/apache/knox/gateway/identityasserter/filter/IdentityAssertionHttpServletRequestWrapperTest.java
index 745fbdd..eaa2245 100644
--- a/gateway-provider-identity-assertion-common/src/test/java/org/apache/knox/gateway/identityasserter/filter/IdentityAssertionHttpServletRequestWrapperTest.java
+++ b/gateway-provider-identity-assertion-common/src/test/java/org/apache/knox/gateway/identityasserter/filter/IdentityAssertionHttpServletRequestWrapperTest.java
@@ -20,10 +20,10 @@ package org.apache.knox.gateway.identityasserter.filter;
import org.apache.commons.io.IOUtils;
import org.apache.knox.gateway.config.GatewayConfig;
import org.apache.knox.gateway.identityasserter.common.filter.IdentityAsserterHttpServletRequestWrapper;
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
-import org.apache.hadoop.test.mock.MockHttpServletRequest;
-import org.apache.hadoop.test.mock.MockServletInputStream;
+import org.apache.knox.test.category.FastTests;
+import org.apache.knox.test.category.UnitTests;
+import org.apache.knox.test.mock.MockHttpServletRequest;
+import org.apache.knox.test.mock.MockServletInputStream;
import org.junit.Test;
import org.junit.After;
import org.junit.experimental.categories.Category;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-provider-identity-assertion-common/src/test/java/org/apache/knox/gateway/identityasserter/function/UsernameFunctionProcessorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-identity-assertion-common/src/test/java/org/apache/knox/gateway/identityasserter/function/UsernameFunctionProcessorTest.java b/gateway-provider-identity-assertion-common/src/test/java/org/apache/knox/gateway/identityasserter/function/UsernameFunctionProcessorTest.java
index 0f9d67c..556443f 100644
--- a/gateway-provider-identity-assertion-common/src/test/java/org/apache/knox/gateway/identityasserter/function/UsernameFunctionProcessorTest.java
+++ b/gateway-provider-identity-assertion-common/src/test/java/org/apache/knox/gateway/identityasserter/function/UsernameFunctionProcessorTest.java
@@ -24,10 +24,10 @@ import org.apache.knox.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor;
import org.apache.knox.gateway.identityasserter.common.function.UsernameFunctionProcessor;
import org.apache.knox.gateway.security.PrimaryPrincipal;
import org.apache.knox.gateway.util.urltemplate.Parser;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.log.NoOpLogger;
-import org.apache.hadoop.test.mock.MockInteraction;
-import org.apache.hadoop.test.mock.MockServlet;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.log.NoOpLogger;
+import org.apache.knox.test.mock.MockInteraction;
+import org.apache.knox.test.mock.MockServlet;
import org.apache.http.auth.BasicUserPrincipal;
import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.servlet.ServletHolder;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-provider-rewrite-func-hostmap-static/src/test/java/org/apache/knox/gateway/hostmap/impl/HostmapFunctionProcessorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-hostmap-static/src/test/java/org/apache/knox/gateway/hostmap/impl/HostmapFunctionProcessorTest.java b/gateway-provider-rewrite-func-hostmap-static/src/test/java/org/apache/knox/gateway/hostmap/impl/HostmapFunctionProcessorTest.java
index 92ec957..c373dc0 100644
--- a/gateway-provider-rewrite-func-hostmap-static/src/test/java/org/apache/knox/gateway/hostmap/impl/HostmapFunctionProcessorTest.java
+++ b/gateway-provider-rewrite-func-hostmap-static/src/test/java/org/apache/knox/gateway/hostmap/impl/HostmapFunctionProcessorTest.java
@@ -31,7 +31,7 @@ import org.apache.knox.gateway.services.hostmap.HostMapperService;
import org.apache.knox.gateway.util.urltemplate.Parser;
import org.apache.knox.gateway.util.urltemplate.Resolver;
import org.apache.knox.gateway.util.urltemplate.Template;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.easymock.EasyMock;
import org.junit.Test;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/knox/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/knox/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest.java b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/knox/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest.java
index 9f19a4f..47525bb 100644
--- a/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/knox/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest.java
+++ b/gateway-provider-rewrite-func-service-registry/src/test/java/org/apache/knox/gateway/svcregfunc/impl/ServiceRegistryFunctionsTest.java
@@ -23,10 +23,10 @@ import org.apache.knox.gateway.filter.rewrite.api.UrlRewriteServletFilter;
import org.apache.knox.gateway.services.GatewayServices;
import org.apache.knox.gateway.services.registry.ServiceRegistry;
import org.apache.knox.gateway.util.urltemplate.Parser;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.log.NoOpLogger;
-import org.apache.hadoop.test.mock.MockInteraction;
-import org.apache.hadoop.test.mock.MockServlet;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.log.NoOpLogger;
+import org.apache.knox.test.mock.MockInteraction;
+import org.apache.knox.test.mock.MockServlet;
import org.apache.http.auth.BasicUserPrincipal;
import org.easymock.EasyMock;
import org.eclipse.jetty.servlet.FilterHolder;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/api/UrlRewriteServletContextListenerTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/api/UrlRewriteServletContextListenerTest.java b/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/api/UrlRewriteServletContextListenerTest.java
index 1d0d72d..db65e35 100644
--- a/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/api/UrlRewriteServletContextListenerTest.java
+++ b/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/api/UrlRewriteServletContextListenerTest.java
@@ -17,8 +17,8 @@
*/
package org.apache.knox.gateway.filter.rewrite.api;
-import org.apache.hadoop.test.mock.MockInteraction;
-import org.apache.hadoop.test.mock.MockServlet;
+import org.apache.knox.test.mock.MockInteraction;
+import org.apache.knox.test.mock.MockServlet;
import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.http.HttpTester;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/api/UrlRewriteServletFilterTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/api/UrlRewriteServletFilterTest.java b/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/api/UrlRewriteServletFilterTest.java
index 484786e..1df1c20 100644
--- a/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/api/UrlRewriteServletFilterTest.java
+++ b/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/api/UrlRewriteServletFilterTest.java
@@ -20,10 +20,10 @@ package org.apache.knox.gateway.filter.rewrite.api;
import com.jayway.jsonassert.JsonAssert;
import org.apache.knox.gateway.filter.AbstractGatewayFilter;
import org.apache.knox.gateway.util.urltemplate.Parser;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.log.NoOpAppender;
-import org.apache.hadoop.test.mock.MockInteraction;
-import org.apache.hadoop.test.mock.MockServlet;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.log.NoOpAppender;
+import org.apache.knox.test.mock.MockInteraction;
+import org.apache.knox.test.mock.MockServlet;
import org.apache.log4j.Appender;
import org.apache.log4j.Logger;
import org.eclipse.jetty.http.HttpHeader;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/impl/FrontendFunctionProcessorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/impl/FrontendFunctionProcessorTest.java b/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/impl/FrontendFunctionProcessorTest.java
index d40edc9..a946bd2 100644
--- a/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/impl/FrontendFunctionProcessorTest.java
+++ b/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/impl/FrontendFunctionProcessorTest.java
@@ -27,10 +27,10 @@ import org.apache.knox.gateway.filter.rewrite.spi.UrlRewriteFunctionProcessor;
import org.apache.knox.gateway.services.GatewayServices;
import org.apache.knox.gateway.services.registry.ServiceRegistry;
import org.apache.knox.gateway.util.urltemplate.Parser;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.log.NoOpLogger;
-import org.apache.hadoop.test.mock.MockInteraction;
-import org.apache.hadoop.test.mock.MockServlet;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.log.NoOpLogger;
+import org.apache.knox.test.mock.MockInteraction;
+import org.apache.knox.test.mock.MockServlet;
import org.apache.http.auth.BasicUserPrincipal;
import org.easymock.EasyMock;
import org.eclipse.jetty.http.HttpTester;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/impl/json/JsonFilterReaderTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/impl/json/JsonFilterReaderTest.java b/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/impl/json/JsonFilterReaderTest.java
index 1378fef..ad55aeb 100644
--- a/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/impl/json/JsonFilterReaderTest.java
+++ b/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/impl/json/JsonFilterReaderTest.java
@@ -26,7 +26,7 @@ import org.apache.knox.gateway.filter.rewrite.api.UrlRewriteFilterDescriptor;
import org.apache.knox.gateway.filter.rewrite.api.UrlRewriteFilterDetectDescriptor;
import org.apache.knox.gateway.filter.rewrite.api.UrlRewriteRulesDescriptor;
import org.apache.knox.gateway.filter.rewrite.api.UrlRewriteRulesDescriptorFactory;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.junit.Test;
import java.io.IOException;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/impl/xml/XmlFilterReaderTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/impl/xml/XmlFilterReaderTest.java b/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/impl/xml/XmlFilterReaderTest.java
index 7ac4626..5aa5cce 100644
--- a/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/impl/xml/XmlFilterReaderTest.java
+++ b/gateway-provider-rewrite/src/test/java/org/apache/knox/gateway/filter/rewrite/impl/xml/XmlFilterReaderTest.java
@@ -37,7 +37,7 @@ import org.apache.knox.gateway.filter.rewrite.ext.UrlRewriteControlDescriptor;
import org.apache.knox.gateway.filter.rewrite.ext.UrlRewriteMatchDescriptor;
import org.apache.knox.gateway.filter.rewrite.ext.UrlRewriteMatchDescriptorExt;
import org.apache.knox.gateway.filter.rewrite.spi.UrlRewriteActionDescriptorBase;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Test;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/AuditLoggingTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/AuditLoggingTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/AuditLoggingTest.java
index 82890c4..03ee0d7 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/AuditLoggingTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/AuditLoggingTest.java
@@ -50,7 +50,7 @@ import org.apache.knox.gateway.audit.log4j.correlation.Log4jCorrelationService;
import org.apache.knox.gateway.config.GatewayConfig;
import org.apache.knox.gateway.dispatch.DefaultDispatch;
import org.apache.knox.gateway.i18n.resources.ResourcesFactory;
-import org.apache.hadoop.test.log.CollectAppender;
+import org.apache.knox.test.log.CollectAppender;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.log4j.spi.LoggingEvent;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/GatewayFilterTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/GatewayFilterTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/GatewayFilterTest.java
index 2fe1f1a..b7f787a 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/GatewayFilterTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/GatewayFilterTest.java
@@ -21,8 +21,8 @@ import org.apache.knox.gateway.audit.api.AuditServiceFactory;
import org.apache.knox.gateway.config.GatewayConfig;
import org.apache.knox.gateway.filter.AbstractGatewayFilter;
import org.apache.knox.gateway.topology.Topology;
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
+import org.apache.knox.test.category.FastTests;
+import org.apache.knox.test.category.UnitTests;
import org.easymock.EasyMock;
import org.junit.After;
import org.junit.Before;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/GatewayGlobalConfigTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/GatewayGlobalConfigTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/GatewayGlobalConfigTest.java
index 4cfdb8e..1acf9d6 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/GatewayGlobalConfigTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/GatewayGlobalConfigTest.java
@@ -19,7 +19,7 @@ package org.apache.knox.gateway;
import org.apache.knox.gateway.config.GatewayConfig;
import org.apache.knox.gateway.config.impl.GatewayConfigImpl;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.hamcrest.Matchers;
import org.junit.Test;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/TempletonDemo.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/TempletonDemo.java b/gateway-server/src/test/java/org/apache/knox/gateway/TempletonDemo.java
index 66321ea..fd32abc 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/TempletonDemo.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/TempletonDemo.java
@@ -17,8 +17,8 @@
*/
package org.apache.knox.gateway;
-import org.apache.hadoop.test.category.ManualTests;
-import org.apache.hadoop.test.category.SlowTests;
+import org.apache.knox.test.category.ManualTests;
+import org.apache.knox.test.category.SlowTests;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.entity.UrlEncodedFormEntity;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/config/impl/GatewayConfigImplTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/config/impl/GatewayConfigImplTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/config/impl/GatewayConfigImplTest.java
index bae67e6..06da13d 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/config/impl/GatewayConfigImplTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/config/impl/GatewayConfigImplTest.java
@@ -1,6 +1,6 @@
package org.apache.knox.gateway.config.impl;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.hamcrest.CoreMatchers;
import org.junit.Test;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/deploy/DeploymentFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/deploy/DeploymentFactoryTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/deploy/DeploymentFactoryTest.java
index 2973f40..7cea065 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/deploy/DeploymentFactoryTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/deploy/DeploymentFactoryTest.java
@@ -27,7 +27,7 @@ import org.apache.knox.gateway.topology.Application;
import org.apache.knox.gateway.topology.Service;
import org.apache.knox.gateway.topology.Topology;
import org.apache.knox.gateway.util.XmlUtils;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.jboss.shrinkwrap.api.spec.EnterpriseArchive;
import org.junit.Test;
import org.w3c.dom.Document;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/descriptor/xml/XmlGatewayDescriptorExporterTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/descriptor/xml/XmlGatewayDescriptorExporterTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/descriptor/xml/XmlGatewayDescriptorExporterTest.java
index 5624a10..d4469c9 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/descriptor/xml/XmlGatewayDescriptorExporterTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/descriptor/xml/XmlGatewayDescriptorExporterTest.java
@@ -20,7 +20,7 @@ package org.apache.knox.gateway.descriptor.xml;
import org.apache.knox.gateway.descriptor.GatewayDescriptor;
import org.apache.knox.gateway.descriptor.GatewayDescriptorFactory;
import org.apache.knox.gateway.util.XmlUtils;
-import org.apache.hadoop.test.Console;
+import org.apache.knox.test.Console;
import org.junit.Test;
import org.w3c.dom.Document;
import org.xml.sax.InputSource;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/jetty/SslSocketTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/jetty/SslSocketTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/jetty/SslSocketTest.java
index 5aafc20..f65c220 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/jetty/SslSocketTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/jetty/SslSocketTest.java
@@ -17,8 +17,8 @@
*/
package org.apache.knox.gateway.jetty;
-import org.apache.hadoop.test.category.MediumTests;
-import org.apache.hadoop.test.category.ManualTests;
+import org.apache.knox.test.category.MediumTests;
+import org.apache.knox.test.category.ManualTests;
import org.apache.http.HttpVersion;
import org.apache.http.conn.ssl.SSLSocketFactory;
import org.apache.http.params.BasicHttpParams;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/mock/MockConsoleFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/mock/MockConsoleFactory.java b/gateway-server/src/test/java/org/apache/knox/gateway/mock/MockConsoleFactory.java
index cb15db1..590cca0 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/mock/MockConsoleFactory.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/mock/MockConsoleFactory.java
@@ -17,7 +17,7 @@
*/
package org.apache.knox.gateway.mock;
-import org.apache.hadoop.test.mock.MockServlet;
+import org.apache.knox.test.mock.MockServlet;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/services/security/CryptoServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/services/security/CryptoServiceTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/services/security/CryptoServiceTest.java
index 0d8b7b8..72a21b0 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/services/security/CryptoServiceTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/services/security/CryptoServiceTest.java
@@ -21,8 +21,8 @@ import org.apache.knox.gateway.config.GatewayConfig;
import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.services.security.impl.ConfigurableEncryptor;
import org.apache.knox.gateway.services.security.impl.DefaultCryptoService;
-import org.apache.hadoop.test.category.ManualTests;
-import org.apache.hadoop.test.category.MediumTests;
+import org.apache.knox.test.category.ManualTests;
+import org.apache.knox.test.category.MediumTests;
import org.easymock.EasyMock;
import org.junit.BeforeClass;
import org.junit.Test;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
index e70d096..408d396 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
@@ -26,7 +26,7 @@ import org.apache.commons.io.monitor.FileAlterationObserver;
import org.apache.knox.gateway.config.GatewayConfig;
import org.apache.knox.gateway.services.topology.impl.DefaultTopologyService;
import org.apache.knox.gateway.services.security.AliasService;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.apache.knox.gateway.topology.Param;
import org.apache.knox.gateway.topology.Provider;
import org.apache.knox.gateway.topology.Topology;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/topology/builder/PropertyTopologyBuilderTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/builder/PropertyTopologyBuilderTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/builder/PropertyTopologyBuilderTest.java
index 700ac9b..f69dc53 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/topology/builder/PropertyTopologyBuilderTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/builder/PropertyTopologyBuilderTest.java
@@ -20,7 +20,7 @@ import java.util.Enumeration;
import org.apache.knox.gateway.topology.Topology;
import org.apache.knox.gateway.topology.builder.property.Property;
-import org.apache.hadoop.test.log.NoOpAppender;
+import org.apache.knox.test.log.NoOpAppender;
import org.apache.log4j.Appender;
import org.junit.AfterClass;
import org.junit.BeforeClass;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/topology/validation/TopologyValidatorTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/validation/TopologyValidatorTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/validation/TopologyValidatorTest.java
index 9337c85..7c16ac4 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/topology/validation/TopologyValidatorTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/validation/TopologyValidatorTest.java
@@ -18,7 +18,7 @@ package org.apache.knox.gateway.topology.validation;
import java.net.URL;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.junit.Test;
import static org.hamcrest.core.Is.is;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/topology/xml/TopologyRulesModuleTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/xml/TopologyRulesModuleTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/xml/TopologyRulesModuleTest.java
index 55c80bd..d75dcfb 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/topology/xml/TopologyRulesModuleTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/xml/TopologyRulesModuleTest.java
@@ -25,7 +25,7 @@ import org.apache.knox.gateway.topology.Service;
import org.apache.knox.gateway.topology.Topology;
import org.apache.knox.gateway.topology.Version;
import org.apache.knox.gateway.topology.builder.TopologyBuilder;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/websockets/BadUrlTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/websockets/BadUrlTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/websockets/BadUrlTest.java
index 25891bf..3aceadd 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/websockets/BadUrlTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/websockets/BadUrlTest.java
@@ -42,7 +42,7 @@ import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.services.topology.TopologyService;
import org.apache.knox.gateway.topology.TopologyEvent;
import org.apache.knox.gateway.topology.TopologyListener;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.easymock.EasyMock;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketEchoTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketEchoTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketEchoTest.java
index da24b98..268e14b 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketEchoTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketEchoTest.java
@@ -45,7 +45,7 @@ import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.services.topology.TopologyService;
import org.apache.knox.gateway.topology.TopologyEvent;
import org.apache.knox.gateway.topology.TopologyListener;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.easymock.EasyMock;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketMultipleConnectionTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketMultipleConnectionTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketMultipleConnectionTest.java
index 1b98616..42bc9c3 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketMultipleConnectionTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/websockets/WebsocketMultipleConnectionTest.java
@@ -48,7 +48,7 @@ import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.services.topology.TopologyService;
import org.apache.knox.gateway.topology.TopologyEvent;
import org.apache.knox.gateway.topology.TopologyListener;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.easymock.EasyMock;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-server/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/log4j.properties b/gateway-server/src/test/resources/log4j.properties
index b212231..f35213e 100644
--- a/gateway-server/src/test/resources/log4j.properties
+++ b/gateway-server/src/test/resources/log4j.properties
@@ -24,7 +24,7 @@ log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%5p [%c] %m%n
log4j.logger.audit = INFO, collectappender
-log4j.appender.collectappender = org.apache.hadoop.test.log.CollectAppender
+log4j.appender.collectappender = org.apache.knox.test.log.CollectAppender
#log4j.logger.org.apache.knox.gateway=DEBUG
#log4j.logger.org.eclipse.jetty=DEBUG
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-service-hbase/src/test/java/org/apache/knox/gateway/hbase/HBaseDispatchTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-hbase/src/test/java/org/apache/knox/gateway/hbase/HBaseDispatchTest.java b/gateway-service-hbase/src/test/java/org/apache/knox/gateway/hbase/HBaseDispatchTest.java
index 526b0e7..e5bae02 100644
--- a/gateway-service-hbase/src/test/java/org/apache/knox/gateway/hbase/HBaseDispatchTest.java
+++ b/gateway-service-hbase/src/test/java/org/apache/knox/gateway/hbase/HBaseDispatchTest.java
@@ -21,9 +21,9 @@ import java.net.URI;
import javax.servlet.http.HttpServletRequest;
import org.apache.knox.gateway.dispatch.Dispatch;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.category.FastTests;
+import org.apache.knox.test.category.UnitTests;
import org.easymock.EasyMock;
import org.junit.Test;
import org.junit.experimental.categories.Category;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-spi/src/test/java/org/apache/knox/gateway/dispatch/DefaultDispatchTest.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/test/java/org/apache/knox/gateway/dispatch/DefaultDispatchTest.java b/gateway-spi/src/test/java/org/apache/knox/gateway/dispatch/DefaultDispatchTest.java
index 99e3a33..9d0afc9 100644
--- a/gateway-spi/src/test/java/org/apache/knox/gateway/dispatch/DefaultDispatchTest.java
+++ b/gateway-spi/src/test/java/org/apache/knox/gateway/dispatch/DefaultDispatchTest.java
@@ -40,9 +40,9 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.knox.gateway.config.GatewayConfig;
import org.apache.knox.gateway.servlet.SynchronousServletOutputStreamAdapter;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.category.FastTests;
+import org.apache.knox.test.category.UnitTests;
import org.apache.http.HttpEntity;
import org.apache.http.HttpVersion;
import org.apache.http.RequestLine;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-spi/src/test/java/org/apache/knox/gateway/security/principal/PrincipalMapperTest.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/test/java/org/apache/knox/gateway/security/principal/PrincipalMapperTest.java b/gateway-spi/src/test/java/org/apache/knox/gateway/security/principal/PrincipalMapperTest.java
index cba7f1c..66b7b46 100644
--- a/gateway-spi/src/test/java/org/apache/knox/gateway/security/principal/PrincipalMapperTest.java
+++ b/gateway-spi/src/test/java/org/apache/knox/gateway/security/principal/PrincipalMapperTest.java
@@ -17,8 +17,8 @@
*/
package org.apache.knox.gateway.security.principal;
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
+import org.apache.knox.test.category.FastTests;
+import org.apache.knox.test.category.UnitTests;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-spi/src/test/java/org/apache/knox/gateway/services/hostmap/FileBasedHostMapperTest.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/test/java/org/apache/knox/gateway/services/hostmap/FileBasedHostMapperTest.java b/gateway-spi/src/test/java/org/apache/knox/gateway/services/hostmap/FileBasedHostMapperTest.java
index 99be7b7..be4d798 100644
--- a/gateway-spi/src/test/java/org/apache/knox/gateway/services/hostmap/FileBasedHostMapperTest.java
+++ b/gateway-spi/src/test/java/org/apache/knox/gateway/services/hostmap/FileBasedHostMapperTest.java
@@ -17,7 +17,7 @@
*/
package org.apache.knox.gateway.services.hostmap;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.junit.Test;
import java.net.URL;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/impl/CMFKeystoreServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/impl/CMFKeystoreServiceTest.java b/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/impl/CMFKeystoreServiceTest.java
index 7386f74..28c5ea6 100644
--- a/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/impl/CMFKeystoreServiceTest.java
+++ b/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/impl/CMFKeystoreServiceTest.java
@@ -26,8 +26,8 @@ import org.apache.knox.gateway.config.GatewayConfig;
import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.services.security.KeystoreServiceException;
import org.apache.knox.gateway.services.security.MasterService;
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
+import org.apache.knox.test.category.FastTests;
+import org.apache.knox.test.category.UnitTests;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/impl/CMFMasterServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/impl/CMFMasterServiceTest.java b/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/impl/CMFMasterServiceTest.java
index 275b090..d3449c0 100644
--- a/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/impl/CMFMasterServiceTest.java
+++ b/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/impl/CMFMasterServiceTest.java
@@ -20,8 +20,8 @@ package org.apache.knox.gateway.services.security.impl;
import java.io.File;
import org.apache.knox.gateway.services.ServiceLifecycleException;
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
+import org.apache.knox.test.category.FastTests;
+import org.apache.knox.test.category.UnitTests;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-release-utils/src/main/java/org/apache/knox/gateway/GatewayTestDriver.java
----------------------------------------------------------------------
diff --git a/gateway-test-release-utils/src/main/java/org/apache/knox/gateway/GatewayTestDriver.java b/gateway-test-release-utils/src/main/java/org/apache/knox/gateway/GatewayTestDriver.java
index dd4216f..3dcd02f 100644
--- a/gateway-test-release-utils/src/main/java/org/apache/knox/gateway/GatewayTestDriver.java
+++ b/gateway-test-release-utils/src/main/java/org/apache/knox/gateway/GatewayTestDriver.java
@@ -43,7 +43,7 @@ import org.apache.knox.gateway.config.GatewayConfig;
import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
-import org.apache.hadoop.test.mock.MockServer;
+import org.apache.knox.test.mock.MockServer;
import org.hamcrest.CoreMatchers;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-release/webhdfs-kerb-test/src/test/java/org/apache/knox/gateway/SecureClusterTest.java
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-kerb-test/src/test/java/org/apache/knox/gateway/SecureClusterTest.java b/gateway-test-release/webhdfs-kerb-test/src/test/java/org/apache/knox/gateway/SecureClusterTest.java
index dca2a19..5395a82 100644
--- a/gateway-test-release/webhdfs-kerb-test/src/test/java/org/apache/knox/gateway/SecureClusterTest.java
+++ b/gateway-test-release/webhdfs-kerb-test/src/test/java/org/apache/knox/gateway/SecureClusterTest.java
@@ -33,8 +33,8 @@ import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.category.ReleaseTest;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.category.ReleaseTest;
import org.apache.http.HttpHost;
import org.apache.http.HttpRequest;
import org.apache.http.auth.AuthScope;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-release/webhdfs-test/src/test/java/org/apache/knox/gateway/ShellTest.java
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-test/src/test/java/org/apache/knox/gateway/ShellTest.java b/gateway-test-release/webhdfs-test/src/test/java/org/apache/knox/gateway/ShellTest.java
index 455af41..b4f5c1b 100644
--- a/gateway-test-release/webhdfs-test/src/test/java/org/apache/knox/gateway/ShellTest.java
+++ b/gateway-test-release/webhdfs-test/src/test/java/org/apache/knox/gateway/ShellTest.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.knox.gateway;
-import org.apache.hadoop.test.category.ReleaseTest;
+import org.apache.knox.test.category.ReleaseTest;
import org.junit.experimental.categories.Category;
import java.io.File;
@@ -35,7 +35,7 @@ import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.apache.log4j.PropertyConfigurator;
import org.junit.AfterClass;
import org.junit.BeforeClass;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/Console.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/Console.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/Console.java
deleted file mode 100644
index e5bce70..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/Console.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test;
-
-import java.io.ByteArrayOutputStream;
-import java.io.PrintStream;
-
-public class Console {
-
- PrintStream oldOut, newOut;
- PrintStream oldErr, newErr;
- ByteArrayOutputStream newOutBuf, newErrBuf;
-
- public void capture() {
- oldErr = System.err;
- newErrBuf = new ByteArrayOutputStream();
- newErr = new PrintStream( newErrBuf );
-
- oldOut = System.out; // I18N not required.
- newOutBuf = new ByteArrayOutputStream();
- newOut = new PrintStream( newOutBuf );
-
- System.setErr( newErr );
- System.setOut( newOut );
- }
-
- public byte[] getOut() {
- return newOutBuf.toByteArray();
- }
-
- public byte[] getErr() {
- return newErrBuf.toByteArray();
- }
-
- public void release() {
- System.setErr( oldErr );
- System.setOut( oldOut );
- newErr.close();
- newOut.close();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/TestUtils.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/TestUtils.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/TestUtils.java
deleted file mode 100644
index 076c312..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/TestUtils.java
+++ /dev/null
@@ -1,216 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.io.StringWriter;
-import java.net.HttpURLConnection;
-import java.net.InetSocketAddress;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.net.URL;
-import java.nio.ByteBuffer;
-import java.util.Properties;
-import java.util.UUID;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.apache.log4j.Logger;
-import org.apache.velocity.Template;
-import org.apache.velocity.VelocityContext;
-import org.apache.velocity.app.VelocityEngine;
-import org.apache.velocity.runtime.RuntimeConstants;
-import org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader;
-import org.eclipse.jetty.http.HttpTester;
-import org.eclipse.jetty.servlet.ServletTester;
-
-public class TestUtils {
-
- private static Logger LOG = Logger.getLogger(TestUtils.class);
-
- public static final long SHORT_TIMEOUT = 1000L;
- public static final long MEDIUM_TIMEOUT = 20 * 1000L;
- public static final long LONG_TIMEOUT = 60 * 1000L;
-
- public static String getResourceName( Class clazz, String name ) {
- name = clazz.getName().replaceAll( "\\.", "/" ) + "/" + name;
- return name;
- }
-
- public static URL getResourceUrl( Class clazz, String name ) throws FileNotFoundException {
- name = getResourceName( clazz, name );
- URL url = ClassLoader.getSystemResource( name );
- if( url == null ) {
- throw new FileNotFoundException( name );
- }
- return url;
- }
-
- public static URL getResourceUrl( String name ) throws FileNotFoundException {
- URL url = ClassLoader.getSystemResource( name );
- if( url == null ) {
- throw new FileNotFoundException( name );
- }
- return url;
- }
-
- public static InputStream getResourceStream( String name ) throws IOException {
- URL url = ClassLoader.getSystemResource( name );
- InputStream stream = url.openStream();
- return stream;
- }
-
- public static InputStream getResourceStream( Class clazz, String name ) throws IOException {
- URL url = getResourceUrl( clazz, name );
- InputStream stream = url.openStream();
- return stream;
- }
-
- public static Reader getResourceReader( String name, String charset ) throws IOException {
- return new InputStreamReader( getResourceStream( name ), charset );
- }
-
- public static Reader getResourceReader( Class clazz, String name, String charset ) throws IOException {
- return new InputStreamReader( getResourceStream( clazz, name ), charset );
- }
-
- public static String getResourceString( Class clazz, String name, String charset ) throws IOException {
- return IOUtils.toString( getResourceReader( clazz, name, charset ) );
- }
-
- public static File createTempDir( String prefix ) throws IOException {
- File targetDir = new File( System.getProperty( "user.dir" ), "target" );
- File tempDir = new File( targetDir, prefix + UUID.randomUUID() );
- FileUtils.forceMkdir( tempDir );
- return tempDir;
- }
-
- public static void LOG_ENTER() {
- StackTraceElement caller = Thread.currentThread().getStackTrace()[2];
- System.out.flush();
- System.out.println( String.format( "Running %s#%s", caller.getClassName(), caller.getMethodName() ) );
- System.out.flush();
- }
-
- public static void LOG_EXIT() {
- StackTraceElement caller = Thread.currentThread().getStackTrace()[2];
- System.out.flush();
- System.out.println( String.format( "Exiting %s#%s", caller.getClassName(), caller.getMethodName() ) );
- System.out.flush();
- }
-
- public static void awaitPortOpen( InetSocketAddress address, int timeout, int delay ) throws InterruptedException {
- long maxTime = System.currentTimeMillis() + timeout;
- do {
- try {
- Socket socket = new Socket();
- socket.connect( address, delay );
- socket.close();
- return;
- } catch ( IOException e ) {
- //e.printStackTrace();
- }
- } while( System.currentTimeMillis() < maxTime );
- throw new IllegalStateException( "Timed out " + timeout + " waiting for port " + address );
- }
-
- public static void awaitNon404HttpStatus( URL url, int timeout, int delay ) throws InterruptedException {
- long maxTime = System.currentTimeMillis() + timeout;
- do {
- Thread.sleep( delay );
- HttpURLConnection conn = null;
- try {
- conn = (HttpURLConnection)url.openConnection();
- conn.getInputStream().close();
- return;
- } catch ( IOException e ) {
- //e.printStackTrace();
- try {
- if( conn != null && conn.getResponseCode() != 404 ) {
- return;
- }
- } catch ( IOException ee ) {
- //ee.printStackTrace();
- }
- }
- } while( System.currentTimeMillis() < maxTime );
- throw new IllegalStateException( "Timed out " + timeout + " waiting for URL " + url );
- }
-
- public static String merge( String resource, Properties properties ) {
- ClasspathResourceLoader loader = new ClasspathResourceLoader();
- loader.getResourceStream( resource );
-
- VelocityEngine engine = new VelocityEngine();
- Properties config = new Properties();
- config.setProperty( RuntimeConstants.RUNTIME_LOG_LOGSYSTEM_CLASS, "org.apache.velocity.runtime.log.NullLogSystem" );
- config.setProperty( RuntimeConstants.RESOURCE_LOADER, "classpath" );
- config.setProperty( "classpath.resource.loader.class", ClasspathResourceLoader.class.getName() );
- engine.init( config );
-
- VelocityContext context = new VelocityContext( properties );
- Template template = engine.getTemplate( resource );
- StringWriter writer = new StringWriter();
- template.merge( context, writer );
- return writer.toString();
- }
-
- public static String merge( Class base, String resource, Properties properties ) {
- String baseResource = base.getName().replaceAll( "\\.", "/" );
- String fullResource = baseResource + "/" + resource;
- return merge( fullResource, properties );
- }
-
- public static int findFreePort() throws IOException {
- ServerSocket socket = new ServerSocket(0);
- int port = socket.getLocalPort();
- socket.close();
- return port;
- }
-
- public static void waitUntilNextSecond() {
- long before = System.currentTimeMillis();
- long wait;
- while( ( wait = ( 1000 - ( System.currentTimeMillis() - before ) ) ) > 0 ) {
- try {
- Thread.sleep( wait );
- } catch( InterruptedException e ) {
- // Ignore.
- }
- }
- }
-
- public static HttpTester.Response execute( ServletTester server, HttpTester.Request request ) throws Exception {
- LOG.debug( "execute: request=" + request );
- ByteBuffer requestBuffer = request.generate();
- LOG.trace( "execute: requestBuffer=[" + new String(requestBuffer.array(),0,requestBuffer.limit()) + "]" );
- ByteBuffer responseBuffer = server.getResponses( requestBuffer, 30, TimeUnit.SECONDS );
- HttpTester.Response response = HttpTester.parseResponse( responseBuffer );
- LOG.trace( "execute: responseBuffer=[" + new String(responseBuffer.array(),0,responseBuffer.limit()) + "]" );
- LOG.debug( "execute: reponse=" + response );
- return response;
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/FastTests.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/FastTests.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/FastTests.java
deleted file mode 100644
index 7761430..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/FastTests.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.category;
-
-public interface FastTests {
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/ManualTests.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/ManualTests.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/ManualTests.java
deleted file mode 100644
index 840dbb3..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/ManualTests.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.category;
-
-public interface ManualTests {
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/MediumTests.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/MediumTests.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/MediumTests.java
deleted file mode 100644
index 5cec811..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/MediumTests.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.category;
-
-public interface MediumTests {
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/ReleaseTest.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/ReleaseTest.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/ReleaseTest.java
deleted file mode 100644
index bd52807..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/ReleaseTest.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.category;
-
-public interface ReleaseTest {
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/SlowTests.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/SlowTests.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/SlowTests.java
deleted file mode 100644
index d395b02..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/SlowTests.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.category;
-
-public interface SlowTests {
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/UnitTests.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/UnitTests.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/UnitTests.java
deleted file mode 100644
index f36d539..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/UnitTests.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.category;
-
-public interface UnitTests {
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/VerifyTest.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/VerifyTest.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/VerifyTest.java
deleted file mode 100644
index 0b0acaa..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/VerifyTest.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.category;
-
-public interface VerifyTest {
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/log/CollectAppender.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/log/CollectAppender.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/log/CollectAppender.java
deleted file mode 100644
index d14ab7b..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/log/CollectAppender.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.log;
-
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.spi.LoggingEvent;
-
-public class CollectAppender extends AppenderSkeleton {
-
- public CollectAppender() {
- super();
- }
-
- public static BlockingQueue<LoggingEvent> queue = new LinkedBlockingQueue<LoggingEvent>();
- public static boolean closed = false;
-
- @Override
- protected void append( LoggingEvent event ) {
- event.getProperties();
- queue.add( event );
- }
-
- @Override
- public void close() {
- closed = true;
- }
-
- @Override
- public boolean requiresLayout() {
- return false;
- }
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/log/NoOpAppender.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/log/NoOpAppender.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/log/NoOpAppender.java
deleted file mode 100644
index 647f0e2..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/log/NoOpAppender.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.log;
-
-import org.apache.log4j.Appender;
-import org.apache.log4j.Layout;
-import org.apache.log4j.Logger;
-import org.apache.log4j.spi.ErrorHandler;
-import org.apache.log4j.spi.Filter;
-import org.apache.log4j.spi.LoggingEvent;
-
-import java.util.Enumeration;
-
-public class NoOpAppender implements Appender {
-
- public static Enumeration<Appender> setUp() {
- Enumeration<Appender> appenders = (Enumeration<Appender>)Logger.getRootLogger().getAllAppenders();
- Logger.getRootLogger().removeAllAppenders();
- Logger.getRootLogger().addAppender( new NoOpAppender() );
- return appenders;
- }
-
- public static void tearDown( Enumeration<Appender> appenders ) {
- if( appenders != null ) {
- while( appenders.hasMoreElements() ) {
- Logger.getRootLogger().addAppender( appenders.nextElement() );
- }
- }
- }
-
- @Override
- public void addFilter( Filter newFilter ) {
- }
-
- @Override
- public Filter getFilter() {
- return null;
- }
-
- @Override
- public void clearFilters() {
- }
-
- @Override
- public void close() {
- }
-
- @Override
- public void doAppend( LoggingEvent event ) {
- }
-
- @Override
- public String getName() {
- return this.getClass().getName();
- }
-
- @Override
- public void setErrorHandler( ErrorHandler errorHandler ) {
- }
-
- @Override
- public ErrorHandler getErrorHandler() {
- return null;
- }
-
- @Override
- public void setLayout( Layout layout ) {
- }
-
- @Override
- public Layout getLayout() {
- return null;
- }
-
- @Override
- public void setName( String name ) {
- }
-
- @Override
- public boolean requiresLayout() {
- return false;
- }
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/log/NoOpLogger.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/log/NoOpLogger.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/log/NoOpLogger.java
deleted file mode 100644
index 8fd24ed..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/log/NoOpLogger.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.log;
-
-import org.eclipse.jetty.util.log.Logger;
-
-public class NoOpLogger implements Logger {
-
- @Override
- public String getName() {
- return "";
- }
-
- @Override
- public void warn( String msg, Object... args ) {
- }
-
- @Override
- public void warn( Throwable thrown ) {
- }
-
- @Override
- public void warn( String msg, Throwable thrown ) {
- }
-
- @Override
- public void info( String msg, Object... args ) {
- }
-
- @Override
- public void info( Throwable thrown ) {
- }
-
- @Override
- public void info( String msg, Throwable thrown ) {
- }
-
- @Override
- public boolean isDebugEnabled() {
- return false;
- }
-
- @Override
- public void setDebugEnabled( boolean enabled ) {
- }
-
- @Override
- public void debug( String msg, Object... args ) {
- }
-
- @Override
- public void debug( String msg, long arg ) {
- }
-
- @Override
- public void debug( Throwable thrown ) {
- }
-
- @Override
- public void debug( String msg, Throwable thrown ) {
- }
-
- @Override
- public Logger getLogger( String name ) {
- return this;
- }
-
- @Override
- public void ignore( Throwable ignored ) {
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockFilterConfig.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockFilterConfig.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockFilterConfig.java
deleted file mode 100644
index eae1ef6..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/mock/MockFilterConfig.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.mock;
-
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletContext;
-import java.util.Enumeration;
-
-public class MockFilterConfig implements FilterConfig {
-
- @Override
- public String getFilterName() {
- return null;
- }
-
- @Override
- public ServletContext getServletContext() {
- return null;
- }
-
- @Override
- public String getInitParameter( String s ) {
- return null;
- }
-
- @Override
- public Enumeration<String> getInitParameterNames() {
- return null;
- }
-
-}
[21/25] knox git commit: KNOX-998 - Merge from master
Posted by mo...@apache.org.
KNOX-998 - Merge from master
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/46109ad8
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/46109ad8
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/46109ad8
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 46109ad8563ea2286ca7e4756410e7753b2fd9cb
Parents: c754cc0
Author: Sandeep More <mo...@apache.org>
Authored: Thu Nov 2 10:37:58 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Thu Nov 2 10:37:58 2017 -0400
----------------------------------------------------------------------
.../security/ldap/BaseDirectoryService.java | 0
.../ldap/BaseDirectoryServiceFactory.java | 0
.../ldap/SimpleDirectoryServiceFactory.java | 0
.../ambari/AmbariServiceDiscoveryMessages.java | 2 +-
.../webappsec/filter/StrictTranportFilter.java | 137 ----------
.../webappsec/deploy/WebAppSecContributor.java | 2 +-
.../webappsec/filter/StrictTranportFilter.java | 137 ++++++++++
.../webappsec/StrictTranportFilterTest.java | 164 ------------
.../webappsec/StrictTranportFilterTest.java | 164 ++++++++++++
.../org/apache/knox/gateway/GatewayFilter.java | 2 +-
.../impl/DefaultTokenAuthorityServiceTest.java | 254 -------------------
.../apache/knox/gateway/GatewayFilterTest.java | 2 +-
.../impl/DefaultTokenAuthorityServiceTest.java | 254 +++++++++++++++++++
.../topology/DefaultTopologyServiceTest.java | 20 +-
.../simple/SimpleDescriptorHandlerTest.java | 2 +-
.../topology/file/provider-config-one.xml | 74 ------
.../topology/file/simple-descriptor-five.json | 14 -
.../topology/file/simple-descriptor-six.json | 18 --
.../topology/file/ambari-cluster-policy.xml | 4 +-
.../topology/file/provider-config-one.xml | 74 ++++++
.../topology/file/simple-descriptor-five.json | 14 +
.../topology/file/simple-descriptor-six.json | 18 ++
.../service/admin/HrefListingMarshaller.java | 75 ------
.../service/admin/HrefListingMarshaller.java | 75 ++++++
.../service/admin/TopologiesResource.java | 2 +-
.../services/ambariui/2.2.1/service.xml | 2 +-
26 files changed, 755 insertions(+), 755 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryService.java
----------------------------------------------------------------------
diff --git a/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryService.java b/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryService.java
deleted file mode 100644
index e69de29..0000000
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryServiceFactory.java
----------------------------------------------------------------------
diff --git a/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryServiceFactory.java b/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryServiceFactory.java
deleted file mode 100644
index e69de29..0000000
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleDirectoryServiceFactory.java
----------------------------------------------------------------------
diff --git a/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleDirectoryServiceFactory.java b/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleDirectoryServiceFactory.java
deleted file mode 100644
index e69de29..0000000
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
index d91edef..2bdc94b 100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
@@ -21,7 +21,7 @@ import org.apache.knox.gateway.i18n.messages.MessageLevel;
import org.apache.knox.gateway.i18n.messages.Messages;
import org.apache.knox.gateway.i18n.messages.StackTrace;
-@Messages(logger="org.apache.hadoop.gateway.topology.discovery.ambari")
+@Messages(logger="org.apache.knox.gateway.topology.discovery.ambari")
public interface AmbariServiceDiscoveryMessages {
@Message(level = MessageLevel.ERROR,
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/StrictTranportFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/StrictTranportFilter.java b/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/StrictTranportFilter.java
deleted file mode 100644
index 28ac18a..0000000
--- a/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/StrictTranportFilter.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.webappsec.filter;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpServletResponseWrapper;
-
-/**
- * This filter protects proxied webapps from protocol downgrade attacks
- * and cookie hijacking.
- */
-public class StrictTranportFilter implements Filter {
- private static final String STRICT_TRANSPORT = "Strict-Transport-Security";
- private static final String CUSTOM_HEADER_PARAM = "strict.transport";
-
- private String option = "max-age=31536000";
-
- /* (non-Javadoc)
- * @see javax.servlet.Filter#destroy()
- */
- @Override
- public void destroy() {
- }
-
- /* (non-Javadoc)
- * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, javax.servlet.FilterChain)
- */
- @Override
- public void doFilter(ServletRequest req, ServletResponse res,
- FilterChain chain) throws IOException, ServletException {
- ((HttpServletResponse) res).setHeader(STRICT_TRANSPORT, option);
- chain.doFilter(req, new StrictTranportResponseWrapper((HttpServletResponse) res));
- }
-
- /* (non-Javadoc)
- * @see javax.servlet.Filter#init(javax.servlet.FilterConfig)
- */
- @Override
- public void init(FilterConfig config) throws ServletException {
- String customOption = config.getInitParameter(CUSTOM_HEADER_PARAM);
- if (customOption != null) {
- option = customOption;
- }
- }
-
- public class StrictTranportResponseWrapper extends HttpServletResponseWrapper {
- @Override
- public void addHeader(String name, String value) {
- // don't allow additional values to be added to
- // the configured options value in topology
- if (!name.equals(STRICT_TRANSPORT)) {
- super.addHeader(name, value);
- }
- }
-
- @Override
- public void setHeader(String name, String value) {
- // don't allow overwriting of configured value
- if (!name.equals(STRICT_TRANSPORT)) {
- super.setHeader(name, value);
- }
- }
-
- /**
- * construct a wrapper for this request
- *
- * @param request
- */
- public StrictTranportResponseWrapper(HttpServletResponse response) {
- super(response);
- }
-
- @Override
- public String getHeader(String name) {
- String headerValue = null;
- if (name.equals(STRICT_TRANSPORT)) {
- headerValue = option;
- }
- else {
- headerValue = super.getHeader(name);
- }
- return headerValue;
- }
-
- /**
- * get the Header names
- */
- @Override
- public Collection<String> getHeaderNames() {
- List<String> names = (List<String>) super.getHeaderNames();
- if (names == null) {
- names = new ArrayList<String>();
- }
- names.add(STRICT_TRANSPORT);
- return names;
- }
-
- @Override
- public Collection<String> getHeaders(String name) {
- List<String> values = (List<String>) super.getHeaders(name);
- if (name.equals(STRICT_TRANSPORT)) {
- if (values == null) {
- values = new ArrayList<String>();
- }
- values.add(option);
- }
- return values;
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/deploy/WebAppSecContributor.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/deploy/WebAppSecContributor.java b/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/deploy/WebAppSecContributor.java
index 17fb8c2..71a5af9 100644
--- a/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/deploy/WebAppSecContributor.java
+++ b/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/deploy/WebAppSecContributor.java
@@ -43,7 +43,7 @@ public class WebAppSecContributor extends
private static final String XFRAME_OPTIONS_FILTER_CLASSNAME = "org.apache.knox.gateway.webappsec.filter.XFrameOptionsFilter";
private static final String XFRAME_OPTIONS_ENABLED = "xframe.options.enabled";
private static final String STRICT_TRANSPORT_SUFFIX = "_STRICTTRANSPORT";
- private static final String STRICT_TRANSPORT_FILTER_CLASSNAME = "org.apache.hadoop.gateway.webappsec.filter.StrictTranportFilter";
+ private static final String STRICT_TRANSPORT_FILTER_CLASSNAME = "org.apache.knox.gateway.webappsec.filter.StrictTranportFilter";
private static final String STRICT_TRANSPORT_ENABLED = "strict.transport.enabled";
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/filter/StrictTranportFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/filter/StrictTranportFilter.java b/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/filter/StrictTranportFilter.java
new file mode 100644
index 0000000..0856297
--- /dev/null
+++ b/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/filter/StrictTranportFilter.java
@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.webappsec.filter;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpServletResponseWrapper;
+
+/**
+ * This filter protects proxied webapps from protocol downgrade attacks
+ * and cookie hijacking.
+ */
+public class StrictTranportFilter implements Filter {
+ private static final String STRICT_TRANSPORT = "Strict-Transport-Security";
+ private static final String CUSTOM_HEADER_PARAM = "strict.transport";
+
+ private String option = "max-age=31536000";
+
+ /* (non-Javadoc)
+ * @see javax.servlet.Filter#destroy()
+ */
+ @Override
+ public void destroy() {
+ }
+
+ /* (non-Javadoc)
+ * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, javax.servlet.FilterChain)
+ */
+ @Override
+ public void doFilter(ServletRequest req, ServletResponse res,
+ FilterChain chain) throws IOException, ServletException {
+ ((HttpServletResponse) res).setHeader(STRICT_TRANSPORT, option);
+ chain.doFilter(req, new StrictTranportResponseWrapper((HttpServletResponse) res));
+ }
+
+ /* (non-Javadoc)
+ * @see javax.servlet.Filter#init(javax.servlet.FilterConfig)
+ */
+ @Override
+ public void init(FilterConfig config) throws ServletException {
+ String customOption = config.getInitParameter(CUSTOM_HEADER_PARAM);
+ if (customOption != null) {
+ option = customOption;
+ }
+ }
+
+ public class StrictTranportResponseWrapper extends HttpServletResponseWrapper {
+ @Override
+ public void addHeader(String name, String value) {
+ // don't allow additional values to be added to
+ // the configured options value in topology
+ if (!name.equals(STRICT_TRANSPORT)) {
+ super.addHeader(name, value);
+ }
+ }
+
+ @Override
+ public void setHeader(String name, String value) {
+ // don't allow overwriting of configured value
+ if (!name.equals(STRICT_TRANSPORT)) {
+ super.setHeader(name, value);
+ }
+ }
+
+ /**
+ * construct a wrapper for this request
+ *
+ * @param request
+ */
+ public StrictTranportResponseWrapper(HttpServletResponse response) {
+ super(response);
+ }
+
+ @Override
+ public String getHeader(String name) {
+ String headerValue = null;
+ if (name.equals(STRICT_TRANSPORT)) {
+ headerValue = option;
+ }
+ else {
+ headerValue = super.getHeader(name);
+ }
+ return headerValue;
+ }
+
+ /**
+ * get the Header names
+ */
+ @Override
+ public Collection<String> getHeaderNames() {
+ List<String> names = (List<String>) super.getHeaderNames();
+ if (names == null) {
+ names = new ArrayList<String>();
+ }
+ names.add(STRICT_TRANSPORT);
+ return names;
+ }
+
+ @Override
+ public Collection<String> getHeaders(String name) {
+ List<String> values = (List<String>) super.getHeaders(name);
+ if (name.equals(STRICT_TRANSPORT)) {
+ if (values == null) {
+ values = new ArrayList<String>();
+ }
+ values.add(option);
+ }
+ return values;
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-provider-security-webappsec/src/test/java/org/apache/hadoop/gateway/webappsec/StrictTranportFilterTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-webappsec/src/test/java/org/apache/hadoop/gateway/webappsec/StrictTranportFilterTest.java b/gateway-provider-security-webappsec/src/test/java/org/apache/hadoop/gateway/webappsec/StrictTranportFilterTest.java
deleted file mode 100644
index 0c63d7f..0000000
--- a/gateway-provider-security-webappsec/src/test/java/org/apache/hadoop/gateway/webappsec/StrictTranportFilterTest.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.webappsec;
-
-import static org.junit.Assert.fail;
-
-import java.io.IOException;
-import java.util.Collection;
-import java.util.Enumeration;
-import java.util.Properties;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.hadoop.gateway.webappsec.filter.StrictTranportFilter;
-import org.easymock.EasyMock;
-import org.junit.Assert;
-import org.junit.Test;
-
-/**
- *
- */
-public class StrictTranportFilterTest {
- /**
- *
- */
- private static final String STRICT_TRANSPORT = "Strict-Transport-Security";
- String options = null;
- Collection<String> headerNames = null;
- Collection<String> headers = null;
-
- @Test
- public void testDefaultOptionsValue() throws Exception {
- try {
- StrictTranportFilter filter = new StrictTranportFilter();
- Properties props = new Properties();
- props.put("strict.transport.enabled", "true");
- filter.init(new TestFilterConfig(props));
-
- HttpServletRequest request = EasyMock.createNiceMock(
- HttpServletRequest.class);
- HttpServletResponse response = EasyMock.createNiceMock(
- HttpServletResponse.class);
- EasyMock.replay(request);
- EasyMock.replay(response);
-
- TestFilterChain chain = new TestFilterChain();
- filter.doFilter(request, response, chain);
- Assert.assertTrue("doFilterCalled should not be false.",
- chain.doFilterCalled );
- Assert.assertTrue("Options value incorrect should be max-age=31536000 but is: "
- + options, "max-age=31536000".equals(options));
-
- Assert.assertTrue("Strict-Transport-Security count not equal to 1.", headers.size() == 1);
- } catch (ServletException se) {
- fail("Should NOT have thrown a ServletException.");
- }
- }
-
- @Test
- public void testConfiguredOptionsValue() throws Exception {
- try {
- StrictTranportFilter filter = new StrictTranportFilter();
- Properties props = new Properties();
- props.put("strict.transport.enabled", "true");
- props.put("strict.transport", "max-age=31536010; includeSubDomains");
- filter.init(new TestFilterConfig(props));
-
- HttpServletRequest request = EasyMock.createNiceMock(
- HttpServletRequest.class);
- HttpServletResponse response = EasyMock.createNiceMock(
- HttpServletResponse.class);
- EasyMock.replay(request);
- EasyMock.replay(response);
-
- TestFilterChain chain = new TestFilterChain();
- filter.doFilter(request, response, chain);
- Assert.assertTrue("doFilterCalled should not be false.",
- chain.doFilterCalled );
- Assert.assertTrue("Options value incorrect should be max-age=31536010; includeSubDomains but is: "
- + options, "max-age=31536010; includeSubDomains".equals(options));
-
- Assert.assertTrue("Strict-Transport-Security count not equal to 1.", headers.size() == 1);
- } catch (ServletException se) {
- fail("Should NOT have thrown a ServletException.");
- }
- }
-
- class TestFilterConfig implements FilterConfig {
- Properties props = null;
-
- public TestFilterConfig(Properties props) {
- this.props = props;
- }
-
- @Override
- public String getFilterName() {
- return null;
- }
-
- /* (non-Javadoc)
- * @see javax.servlet.FilterConfig#getServletContext()
- */
- @Override
- public ServletContext getServletContext() {
- return null;
- }
-
- /* (non-Javadoc)
- * @see javax.servlet.FilterConfig#getInitParameter(java.lang.String)
- */
- @Override
- public String getInitParameter(String name) {
- return props.getProperty(name, null);
- }
-
- /* (non-Javadoc)
- * @see javax.servlet.FilterConfig#getInitParameterNames()
- */
- @Override
- public Enumeration<String> getInitParameterNames() {
- return null;
- }
-
- }
-
- class TestFilterChain implements FilterChain {
- boolean doFilterCalled = false;
-
- /* (non-Javadoc)
- * @see javax.servlet.FilterChain#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse)
- */
- @Override
- public void doFilter(ServletRequest request, ServletResponse response)
- throws IOException, ServletException {
- doFilterCalled = true;
- options = ((HttpServletResponse)response).getHeader(STRICT_TRANSPORT);
- headerNames = ((HttpServletResponse)response).getHeaderNames();
- headers = ((HttpServletResponse)response).getHeaders(STRICT_TRANSPORT);
- }
-
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-provider-security-webappsec/src/test/java/org/apache/knox/gateway/webappsec/StrictTranportFilterTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-webappsec/src/test/java/org/apache/knox/gateway/webappsec/StrictTranportFilterTest.java b/gateway-provider-security-webappsec/src/test/java/org/apache/knox/gateway/webappsec/StrictTranportFilterTest.java
new file mode 100644
index 0000000..fa0b5b6
--- /dev/null
+++ b/gateway-provider-security-webappsec/src/test/java/org/apache/knox/gateway/webappsec/StrictTranportFilterTest.java
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.webappsec;
+
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Enumeration;
+import java.util.Properties;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.knox.gateway.webappsec.filter.StrictTranportFilter;
+import org.easymock.EasyMock;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ *
+ */
+public class StrictTranportFilterTest {
+ /**
+ *
+ */
+ private static final String STRICT_TRANSPORT = "Strict-Transport-Security";
+ String options = null;
+ Collection<String> headerNames = null;
+ Collection<String> headers = null;
+
+ @Test
+ public void testDefaultOptionsValue() throws Exception {
+ try {
+ StrictTranportFilter filter = new StrictTranportFilter();
+ Properties props = new Properties();
+ props.put("strict.transport.enabled", "true");
+ filter.init(new TestFilterConfig(props));
+
+ HttpServletRequest request = EasyMock.createNiceMock(
+ HttpServletRequest.class);
+ HttpServletResponse response = EasyMock.createNiceMock(
+ HttpServletResponse.class);
+ EasyMock.replay(request);
+ EasyMock.replay(response);
+
+ TestFilterChain chain = new TestFilterChain();
+ filter.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be false.",
+ chain.doFilterCalled );
+ Assert.assertTrue("Options value incorrect should be max-age=31536000 but is: "
+ + options, "max-age=31536000".equals(options));
+
+ Assert.assertTrue("Strict-Transport-Security count not equal to 1.", headers.size() == 1);
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ @Test
+ public void testConfiguredOptionsValue() throws Exception {
+ try {
+ StrictTranportFilter filter = new StrictTranportFilter();
+ Properties props = new Properties();
+ props.put("strict.transport.enabled", "true");
+ props.put("strict.transport", "max-age=31536010; includeSubDomains");
+ filter.init(new TestFilterConfig(props));
+
+ HttpServletRequest request = EasyMock.createNiceMock(
+ HttpServletRequest.class);
+ HttpServletResponse response = EasyMock.createNiceMock(
+ HttpServletResponse.class);
+ EasyMock.replay(request);
+ EasyMock.replay(response);
+
+ TestFilterChain chain = new TestFilterChain();
+ filter.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be false.",
+ chain.doFilterCalled );
+ Assert.assertTrue("Options value incorrect should be max-age=31536010; includeSubDomains but is: "
+ + options, "max-age=31536010; includeSubDomains".equals(options));
+
+ Assert.assertTrue("Strict-Transport-Security count not equal to 1.", headers.size() == 1);
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ class TestFilterConfig implements FilterConfig {
+ Properties props = null;
+
+ public TestFilterConfig(Properties props) {
+ this.props = props;
+ }
+
+ @Override
+ public String getFilterName() {
+ return null;
+ }
+
+ /* (non-Javadoc)
+ * @see javax.servlet.FilterConfig#getServletContext()
+ */
+ @Override
+ public ServletContext getServletContext() {
+ return null;
+ }
+
+ /* (non-Javadoc)
+ * @see javax.servlet.FilterConfig#getInitParameter(java.lang.String)
+ */
+ @Override
+ public String getInitParameter(String name) {
+ return props.getProperty(name, null);
+ }
+
+ /* (non-Javadoc)
+ * @see javax.servlet.FilterConfig#getInitParameterNames()
+ */
+ @Override
+ public Enumeration<String> getInitParameterNames() {
+ return null;
+ }
+
+ }
+
+ class TestFilterChain implements FilterChain {
+ boolean doFilterCalled = false;
+
+ /* (non-Javadoc)
+ * @see javax.servlet.FilterChain#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse)
+ */
+ @Override
+ public void doFilter(ServletRequest request, ServletResponse response)
+ throws IOException, ServletException {
+ doFilterCalled = true;
+ options = ((HttpServletResponse)response).getHeader(STRICT_TRANSPORT);
+ headerNames = ((HttpServletResponse)response).getHeaderNames();
+ headers = ((HttpServletResponse)response).getHeaders(STRICT_TRANSPORT);
+ }
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-server/src/main/java/org/apache/knox/gateway/GatewayFilter.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/GatewayFilter.java b/gateway-server/src/main/java/org/apache/knox/gateway/GatewayFilter.java
index 8dd29bf..25d4f75 100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/GatewayFilter.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/GatewayFilter.java
@@ -127,7 +127,7 @@ public class GatewayFilter implements Filter {
// if there was no match then look for a default service for the topology
if (match == null) {
- Topology topology = (Topology) servletRequest.getServletContext().getAttribute("org.apache.hadoop.gateway.topology");
+ Topology topology = (Topology) servletRequest.getServletContext().getAttribute("org.apache.knox.gateway.topology");
if (topology != null) {
String defaultServicePath = topology.getDefaultServicePath();
if (defaultServicePath != null) {
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
deleted file mode 100644
index da55422..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
+++ /dev/null
@@ -1,254 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.knox.gateway.services.token.impl;
-
-import java.io.File;
-import java.security.Principal;
-import java.util.HashMap;
-
-import org.apache.knox.gateway.config.GatewayConfig;
-import org.apache.knox.gateway.services.security.AliasService;
-import org.apache.knox.gateway.services.security.KeystoreService;
-import org.apache.knox.gateway.services.security.MasterService;
-import org.apache.knox.gateway.services.security.impl.DefaultKeystoreService;
-import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
-import org.apache.knox.gateway.services.security.token.impl.JWT;
-import org.apache.knox.gateway.services.security.token.TokenServiceException;
-
-import org.easymock.EasyMock;
-import org.junit.Test;
-
-/**
- * Some unit tests for the DefaultTokenAuthorityService.
- */
-public class DefaultTokenAuthorityServiceTest extends org.junit.Assert {
-
- @Test
- public void testTokenCreation() throws Exception {
-
- Principal principal = EasyMock.createNiceMock(Principal.class);
- EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
-
- GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
- String basedir = System.getProperty("basedir");
- if (basedir == null) {
- basedir = new File(".").getCanonicalPath();
- }
-
- EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
- EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
- EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
-
- MasterService ms = EasyMock.createNiceMock(MasterService.class);
- EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
-
- AliasService as = EasyMock.createNiceMock(AliasService.class);
- EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
-
- EasyMock.replay(principal, config, ms, as);
-
- KeystoreService ks = new DefaultKeystoreService();
- ((DefaultKeystoreService)ks).setMasterService(ms);
-
- ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
-
- JWTokenAuthority ta = new DefaultTokenAuthorityService();
- ((DefaultTokenAuthorityService)ta).setAliasService(as);
- ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
-
- ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
-
- JWT token = ta.issueToken(principal, "RS256");
- assertEquals("KNOXSSO", token.getIssuer());
- assertEquals("john.doe@example.com", token.getSubject());
-
- assertTrue(ta.verifyToken(token));
- }
-
- @Test
- public void testTokenCreationAudience() throws Exception {
-
- Principal principal = EasyMock.createNiceMock(Principal.class);
- EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
-
- GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
- String basedir = System.getProperty("basedir");
- if (basedir == null) {
- basedir = new File(".").getCanonicalPath();
- }
-
- EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
- EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
- EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
-
- MasterService ms = EasyMock.createNiceMock(MasterService.class);
- EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
-
- AliasService as = EasyMock.createNiceMock(AliasService.class);
- EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
-
- EasyMock.replay(principal, config, ms, as);
-
- KeystoreService ks = new DefaultKeystoreService();
- ((DefaultKeystoreService)ks).setMasterService(ms);
-
- ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
-
- JWTokenAuthority ta = new DefaultTokenAuthorityService();
- ((DefaultTokenAuthorityService)ta).setAliasService(as);
- ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
-
- ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
-
- JWT token = ta.issueToken(principal, "https://login.example.com", "RS256");
- assertEquals("KNOXSSO", token.getIssuer());
- assertEquals("john.doe@example.com", token.getSubject());
- assertEquals("https://login.example.com", token.getAudience());
-
- assertTrue(ta.verifyToken(token));
- }
-
- @Test
- public void testTokenCreationNullAudience() throws Exception {
-
- Principal principal = EasyMock.createNiceMock(Principal.class);
- EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
-
- GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
- String basedir = System.getProperty("basedir");
- if (basedir == null) {
- basedir = new File(".").getCanonicalPath();
- }
-
- EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
- EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
- EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
-
- MasterService ms = EasyMock.createNiceMock(MasterService.class);
- EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
-
- AliasService as = EasyMock.createNiceMock(AliasService.class);
- EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
-
- EasyMock.replay(principal, config, ms, as);
-
- KeystoreService ks = new DefaultKeystoreService();
- ((DefaultKeystoreService)ks).setMasterService(ms);
-
- ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
-
- JWTokenAuthority ta = new DefaultTokenAuthorityService();
- ((DefaultTokenAuthorityService)ta).setAliasService(as);
- ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
-
- ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
-
- JWT token = ta.issueToken(principal, null, "RS256");
- assertEquals("KNOXSSO", token.getIssuer());
- assertEquals("john.doe@example.com", token.getSubject());
-
- assertTrue(ta.verifyToken(token));
- }
-
- @Test
- public void testTokenCreationSignatureAlgorithm() throws Exception {
-
- Principal principal = EasyMock.createNiceMock(Principal.class);
- EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
-
- GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
- String basedir = System.getProperty("basedir");
- if (basedir == null) {
- basedir = new File(".").getCanonicalPath();
- }
-
- EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
- EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
- EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
-
- MasterService ms = EasyMock.createNiceMock(MasterService.class);
- EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
-
- AliasService as = EasyMock.createNiceMock(AliasService.class);
- EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
-
- EasyMock.replay(principal, config, ms, as);
-
- KeystoreService ks = new DefaultKeystoreService();
- ((DefaultKeystoreService)ks).setMasterService(ms);
-
- ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
-
- JWTokenAuthority ta = new DefaultTokenAuthorityService();
- ((DefaultTokenAuthorityService)ta).setAliasService(as);
- ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
-
- ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
-
- JWT token = ta.issueToken(principal, "RS512");
- assertEquals("KNOXSSO", token.getIssuer());
- assertEquals("john.doe@example.com", token.getSubject());
- assertTrue(token.getHeader().contains("RS512"));
-
- assertTrue(ta.verifyToken(token));
- }
-
- @Test
- public void testTokenCreationBadSignatureAlgorithm() throws Exception {
-
- Principal principal = EasyMock.createNiceMock(Principal.class);
- EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
-
- GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
- String basedir = System.getProperty("basedir");
- if (basedir == null) {
- basedir = new File(".").getCanonicalPath();
- }
-
- EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
- EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
- EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
-
- MasterService ms = EasyMock.createNiceMock(MasterService.class);
- EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
-
- AliasService as = EasyMock.createNiceMock(AliasService.class);
- EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
-
- EasyMock.replay(principal, config, ms, as);
-
- KeystoreService ks = new DefaultKeystoreService();
- ((DefaultKeystoreService)ks).setMasterService(ms);
-
- ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
-
- JWTokenAuthority ta = new DefaultTokenAuthorityService();
- ((DefaultTokenAuthorityService)ta).setAliasService(as);
- ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
-
- ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
-
- try {
- ta.issueToken(principal, "none");
- fail("Failure expected on a bad signature algorithm");
- } catch (TokenServiceException ex) {
- // expected
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-server/src/test/java/org/apache/knox/gateway/GatewayFilterTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/GatewayFilterTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/GatewayFilterTest.java
index ac22400..2fe1f1a 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/GatewayFilterTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/GatewayFilterTest.java
@@ -196,7 +196,7 @@ public class GatewayFilterTest {
"Custom-Forwarded-For").anyTimes();
EasyMock.expect( request.getRequestURL() ).andReturn( new StringBuffer("http://host:8443/gateway/sandbox/test-path/test-resource/") ).anyTimes();
- EasyMock.expect( context.getAttribute( "org.apache.hadoop.gateway.topology" ) ).andReturn( topology ).anyTimes();
+ EasyMock.expect( context.getAttribute( "org.apache.knox.gateway.topology" ) ).andReturn( topology ).anyTimes();
EasyMock.replay( request );
EasyMock.replay( context );
EasyMock.replay( topology );
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-server/src/test/java/org/apache/knox/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
new file mode 100644
index 0000000..da55422
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/services/token/impl/DefaultTokenAuthorityServiceTest.java
@@ -0,0 +1,254 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.services.token.impl;
+
+import java.io.File;
+import java.security.Principal;
+import java.util.HashMap;
+
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.services.security.KeystoreService;
+import org.apache.knox.gateway.services.security.MasterService;
+import org.apache.knox.gateway.services.security.impl.DefaultKeystoreService;
+import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
+import org.apache.knox.gateway.services.security.token.impl.JWT;
+import org.apache.knox.gateway.services.security.token.TokenServiceException;
+
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+/**
+ * Some unit tests for the DefaultTokenAuthorityService.
+ */
+public class DefaultTokenAuthorityServiceTest extends org.junit.Assert {
+
+ @Test
+ public void testTokenCreation() throws Exception {
+
+ Principal principal = EasyMock.createNiceMock(Principal.class);
+ EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
+
+ GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+ String basedir = System.getProperty("basedir");
+ if (basedir == null) {
+ basedir = new File(".").getCanonicalPath();
+ }
+
+ EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
+ EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
+ EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
+
+ MasterService ms = EasyMock.createNiceMock(MasterService.class);
+ EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
+
+ AliasService as = EasyMock.createNiceMock(AliasService.class);
+ EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
+
+ EasyMock.replay(principal, config, ms, as);
+
+ KeystoreService ks = new DefaultKeystoreService();
+ ((DefaultKeystoreService)ks).setMasterService(ms);
+
+ ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
+
+ JWTokenAuthority ta = new DefaultTokenAuthorityService();
+ ((DefaultTokenAuthorityService)ta).setAliasService(as);
+ ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
+
+ ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
+
+ JWT token = ta.issueToken(principal, "RS256");
+ assertEquals("KNOXSSO", token.getIssuer());
+ assertEquals("john.doe@example.com", token.getSubject());
+
+ assertTrue(ta.verifyToken(token));
+ }
+
+ @Test
+ public void testTokenCreationAudience() throws Exception {
+
+ Principal principal = EasyMock.createNiceMock(Principal.class);
+ EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
+
+ GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+ String basedir = System.getProperty("basedir");
+ if (basedir == null) {
+ basedir = new File(".").getCanonicalPath();
+ }
+
+ EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
+ EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
+ EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
+
+ MasterService ms = EasyMock.createNiceMock(MasterService.class);
+ EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
+
+ AliasService as = EasyMock.createNiceMock(AliasService.class);
+ EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
+
+ EasyMock.replay(principal, config, ms, as);
+
+ KeystoreService ks = new DefaultKeystoreService();
+ ((DefaultKeystoreService)ks).setMasterService(ms);
+
+ ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
+
+ JWTokenAuthority ta = new DefaultTokenAuthorityService();
+ ((DefaultTokenAuthorityService)ta).setAliasService(as);
+ ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
+
+ ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
+
+ JWT token = ta.issueToken(principal, "https://login.example.com", "RS256");
+ assertEquals("KNOXSSO", token.getIssuer());
+ assertEquals("john.doe@example.com", token.getSubject());
+ assertEquals("https://login.example.com", token.getAudience());
+
+ assertTrue(ta.verifyToken(token));
+ }
+
+ @Test
+ public void testTokenCreationNullAudience() throws Exception {
+
+ Principal principal = EasyMock.createNiceMock(Principal.class);
+ EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
+
+ GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+ String basedir = System.getProperty("basedir");
+ if (basedir == null) {
+ basedir = new File(".").getCanonicalPath();
+ }
+
+ EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
+ EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
+ EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
+
+ MasterService ms = EasyMock.createNiceMock(MasterService.class);
+ EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
+
+ AliasService as = EasyMock.createNiceMock(AliasService.class);
+ EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
+
+ EasyMock.replay(principal, config, ms, as);
+
+ KeystoreService ks = new DefaultKeystoreService();
+ ((DefaultKeystoreService)ks).setMasterService(ms);
+
+ ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
+
+ JWTokenAuthority ta = new DefaultTokenAuthorityService();
+ ((DefaultTokenAuthorityService)ta).setAliasService(as);
+ ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
+
+ ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
+
+ JWT token = ta.issueToken(principal, null, "RS256");
+ assertEquals("KNOXSSO", token.getIssuer());
+ assertEquals("john.doe@example.com", token.getSubject());
+
+ assertTrue(ta.verifyToken(token));
+ }
+
+ @Test
+ public void testTokenCreationSignatureAlgorithm() throws Exception {
+
+ Principal principal = EasyMock.createNiceMock(Principal.class);
+ EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
+
+ GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+ String basedir = System.getProperty("basedir");
+ if (basedir == null) {
+ basedir = new File(".").getCanonicalPath();
+ }
+
+ EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
+ EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
+ EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
+
+ MasterService ms = EasyMock.createNiceMock(MasterService.class);
+ EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
+
+ AliasService as = EasyMock.createNiceMock(AliasService.class);
+ EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
+
+ EasyMock.replay(principal, config, ms, as);
+
+ KeystoreService ks = new DefaultKeystoreService();
+ ((DefaultKeystoreService)ks).setMasterService(ms);
+
+ ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
+
+ JWTokenAuthority ta = new DefaultTokenAuthorityService();
+ ((DefaultTokenAuthorityService)ta).setAliasService(as);
+ ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
+
+ ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
+
+ JWT token = ta.issueToken(principal, "RS512");
+ assertEquals("KNOXSSO", token.getIssuer());
+ assertEquals("john.doe@example.com", token.getSubject());
+ assertTrue(token.getHeader().contains("RS512"));
+
+ assertTrue(ta.verifyToken(token));
+ }
+
+ @Test
+ public void testTokenCreationBadSignatureAlgorithm() throws Exception {
+
+ Principal principal = EasyMock.createNiceMock(Principal.class);
+ EasyMock.expect(principal.getName()).andReturn("john.doe@example.com");
+
+ GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+ String basedir = System.getProperty("basedir");
+ if (basedir == null) {
+ basedir = new File(".").getCanonicalPath();
+ }
+
+ EasyMock.expect(config.getGatewaySecurityDir()).andReturn(basedir + "/target/test-classes");
+ EasyMock.expect(config.getSigningKeystoreName()).andReturn("server-keystore.jks");
+ EasyMock.expect(config.getSigningKeyAlias()).andReturn("server").anyTimes();
+
+ MasterService ms = EasyMock.createNiceMock(MasterService.class);
+ EasyMock.expect(ms.getMasterSecret()).andReturn("horton".toCharArray());
+
+ AliasService as = EasyMock.createNiceMock(AliasService.class);
+ EasyMock.expect(as.getGatewayIdentityPassphrase()).andReturn("horton".toCharArray());
+
+ EasyMock.replay(principal, config, ms, as);
+
+ KeystoreService ks = new DefaultKeystoreService();
+ ((DefaultKeystoreService)ks).setMasterService(ms);
+
+ ((DefaultKeystoreService)ks).init(config, new HashMap<String, String>());
+
+ JWTokenAuthority ta = new DefaultTokenAuthorityService();
+ ((DefaultTokenAuthorityService)ta).setAliasService(as);
+ ((DefaultTokenAuthorityService)ta).setKeystoreService(ks);
+
+ ((DefaultTokenAuthorityService)ta).init(config, new HashMap<String, String>());
+
+ try {
+ ta.issueToken(principal, "none");
+ fail("Failure expected on a bad signature algorithm");
+ } catch (TokenServiceException ex) {
+ // expected
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
index 95d6f9d..e70d096 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
@@ -25,17 +25,13 @@ import org.apache.commons.io.monitor.FileAlterationMonitor;
import org.apache.commons.io.monitor.FileAlterationObserver;
import org.apache.knox.gateway.config.GatewayConfig;
import org.apache.knox.gateway.services.topology.impl.DefaultTopologyService;
-import org.apache.knox.gateway.config.GatewayConfig;
import org.apache.knox.gateway.services.security.AliasService;
-import org.apache.knox.gateway.services.topology.impl.DefaultTopologyService;
-import org.apache.knox.gateway.topology.*;
import org.apache.hadoop.test.TestUtils;
import org.apache.knox.gateway.topology.Param;
import org.apache.knox.gateway.topology.Provider;
import org.apache.knox.gateway.topology.Topology;
import org.apache.knox.gateway.topology.TopologyEvent;
import org.apache.knox.gateway.topology.TopologyListener;
-import org.apache.knox.gateway.services.security.AliasService;
import org.easymock.EasyMock;
import org.junit.After;
import org.junit.Before;
@@ -192,7 +188,7 @@ public class DefaultTopologyServiceTest {
* Test the lifecycle relationship between simple descriptors and topology files.
*
* N.B. This test depends on the DummyServiceDiscovery extension being configured:
- * org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
+ * org.apache.knox.gateway.topology.discovery.test.extension.DummyServiceDiscovery
*/
@Test
public void testSimpleDescriptorsTopologyGeneration() throws Exception {
@@ -313,7 +309,7 @@ public class DefaultTopologyServiceTest {
* Test the lifecycle relationship between provider configuration files, simple descriptors, and topology files.
*
* N.B. This test depends on the DummyServiceDiscovery extension being configured:
- * org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
+ * org.apache.knox.gateway.topology.discovery.test.extension.DummyServiceDiscovery
*/
@Test
public void testTopologiesUpdateFromProviderConfigChange() throws Exception {
@@ -447,14 +443,16 @@ public class DefaultTopologyServiceTest {
// "Deploy" the referenced provider configs first
boolean isDeployed =
ts.deployProviderConfiguration(provConfOne,
- FileUtils.readFileToString(new File(ClassLoader.getSystemResource("org/apache/hadoop/gateway/topology/file/provider-config-one.xml").toURI())));
+ FileUtils.readFileToString(new File(ClassLoader.getSystemResource(
+ "org/apache/knox/gateway/topology/file/provider-config-one.xml").toURI())));
assertTrue(isDeployed);
File provConfOneFile = new File(sharedProvidersDir, provConfOne);
assertTrue(provConfOneFile.exists());
isDeployed =
ts.deployProviderConfiguration(provConfTwo,
- FileUtils.readFileToString(new File(ClassLoader.getSystemResource("org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml").toURI())));
+ FileUtils.readFileToString(new File(ClassLoader.getSystemResource(
+ "org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml").toURI())));
assertTrue(isDeployed);
File provConfTwoFile = new File(sharedProvidersDir, provConfTwo);
assertTrue(provConfTwoFile.exists());
@@ -469,7 +467,8 @@ public class DefaultTopologyServiceTest {
// "Deploy" the simple descriptor, which depends on provConfOne
isDeployed =
ts.deployDescriptor(simpleDescName,
- FileUtils.readFileToString(new File(ClassLoader.getSystemResource("org/apache/hadoop/gateway/topology/file/simple-descriptor-six.json").toURI())));
+ FileUtils.readFileToString(new File(ClassLoader.getSystemResource(
+ "org/apache/knox/gateway/topology/file/simple-descriptor-six.json").toURI())));
assertTrue(isDeployed);
File simpleDesc = new File(descriptorsDir, simpleDescName);
assertTrue(simpleDesc.exists());
@@ -490,7 +489,8 @@ public class DefaultTopologyServiceTest {
// Overwrite the simple descriptor with content that changes the provider config reference to provConfTwo
isDeployed =
ts.deployDescriptor(simpleDescName,
- FileUtils.readFileToString(new File(ClassLoader.getSystemResource("org/apache/hadoop/gateway/topology/file/simple-descriptor-five.json").toURI())));
+ FileUtils.readFileToString(new File(ClassLoader.getSystemResource(
+ "org/apache/knox/gateway/topology/file/simple-descriptor-five.json").toURI())));
assertTrue(isDeployed);
assertTrue(simpleDesc.exists());
ts.getProviderConfigurations();
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
index a0c977a..f40fad7 100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
@@ -302,7 +302,7 @@ public class SimpleDescriptorHandlerTest {
* a service.
*
* N.B. This test depends on the PropertiesFileServiceDiscovery extension being configured:
- * org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscovery
+ * org.apache.knox.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscovery
*/
@Test
public void testInvalidServiceURLFromDiscovery() throws Exception {
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/provider-config-one.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/provider-config-one.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/provider-config-one.xml
deleted file mode 100644
index 95465a4..0000000
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/provider-config-one.xml
+++ /dev/null
@@ -1,74 +0,0 @@
-<gateway>
- <provider>
- <role>authentication</role>
- <name>ShiroProvider</name>
- <enabled>false</enabled>
- <param>
- <!--
- session timeout in minutes, this is really idle timeout,
- defaults to 30mins, if the property value is not defined,,
- current client authentication would expire if client idles contiuosly for more than this value
- -->
- <name>sessionTimeout</name>
- <value>30</value>
- </param>
- <param>
- <name>main.ldapRealm</name>
- <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
- </param>
- <param>
- <name>main.ldapContextFactory</name>
- <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>
- </param>
- <param>
- <name>main.ldapRealm.contextFactory</name>
- <value>$ldapContextFactory</value>
- </param>
- <param>
- <name>main.ldapRealm.userDnTemplate</name>
- <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
- </param>
- <param>
- <name>main.ldapRealm.contextFactory.url</name>
- <value>ldap://localhost:33389</value>
- </param>
- <param>
- <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
- <value>simple</value>
- </param>
- <param>
- <name>urls./**</name>
- <value>authcBasic</value>
- </param>
- </provider>
-
- <provider>
- <role>identity-assertion</role>
- <name>Default</name>
- <enabled>true</enabled>
- </provider>
-
- <!--
- Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.
- For example, a hadoop service running in AWS may return a response that includes URLs containing the
- some AWS internal host name. If the client needs to make a subsequent request to the host identified
- in those URLs they need to be mapped to external host names that the client Knox can use to connect.
-
- If the external hostname and internal host names are same turn of this provider by setting the value of
- enabled parameter as false.
-
- The name parameter specifies the external host names in a comma separated list.
- The value parameter specifies corresponding internal host names in a comma separated list.
-
- Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out
- of box sandbox.xml. This is because Sandbox uses port mapping to allow clients to connect to the
- Hadoop services using localhost. In real clusters, external host names would almost never be localhost.
- -->
- <provider>
- <role>hostmap</role>
- <name>static</name>
- <enabled>true</enabled>
- <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
- </provider>
-
-</gateway>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-five.json
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-five.json b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-five.json
deleted file mode 100644
index 52cec35..0000000
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-five.json
+++ /dev/null
@@ -1,14 +0,0 @@
-{
- "discovery-type":"DUMMY",
- "discovery-address":"http://c6401.ambari.apache.org:8080",
- "provider-config-ref":"../shared-providers/ambari-cluster-policy.xml",
- "cluster":"dummy",
- "services":[
- {"name":"NAMENODE"},
- {"name":"JOBTRACKER"},
- {"name":"WEBHDFS"},
- {"name":"OOZIE"},
- {"name":"HIVE"},
- {"name":"RESOURCEMANAGER"}
- ]
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-six.json
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-six.json b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-six.json
deleted file mode 100644
index e78f193..0000000
--- a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-six.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "discovery-type":"DUMMY",
- "discovery-address":"http://c6401.ambari.apache.org:8080",
- "provider-config-ref":"../shared-providers/provider-config-one.xml",
- "cluster":"dummy",
- "services":[
- {"name":"NAMENODE"},
- {"name":"JOBTRACKER"},
- {"name":"WEBHDFS"},
- {"name":"WEBHCAT"},
- {"name":"OOZIE"},
- {"name":"WEBHBASE"},
- {"name":"HIVE"},
- {"name":"RESOURCEMANAGER"},
- {"name":"AMBARI", "urls":["http://c6401.ambari.apache.org:8080"]},
- {"name":"AMBARIUI", "urls":["http://c6401.ambari.apache.org:8080"]}
- ]
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml b/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml
index 8223bea..32ae6e1 100644
--- a/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml
+++ b/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml
@@ -14,11 +14,11 @@
</param>
<param>
<name>main.ldapRealm</name>
- <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
+ <value>org.apache.knox.gateway.shirorealm.KnoxLdapRealm</value>
</param>
<param>
<name>main.ldapContextFactory</name>
- <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>
+ <value>org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory</value>
</param>
<param>
<name>main.ldapRealm.contextFactory</name>
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/provider-config-one.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/provider-config-one.xml b/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/provider-config-one.xml
new file mode 100644
index 0000000..049d5cb
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/provider-config-one.xml
@@ -0,0 +1,74 @@
+<gateway>
+ <provider>
+ <role>authentication</role>
+ <name>ShiroProvider</name>
+ <enabled>false</enabled>
+ <param>
+ <!--
+ session timeout in minutes, this is really idle timeout,
+ defaults to 30mins, if the property value is not defined,,
+ current client authentication would expire if client idles contiuosly for more than this value
+ -->
+ <name>sessionTimeout</name>
+ <value>30</value>
+ </param>
+ <param>
+ <name>main.ldapRealm</name>
+ <value>org.apache.knox.gateway.shirorealm.KnoxLdapRealm</value>
+ </param>
+ <param>
+ <name>main.ldapContextFactory</name>
+ <value>org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory</name>
+ <value>$ldapContextFactory</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.userDnTemplate</name>
+ <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.url</name>
+ <value>ldap://localhost:33389</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
+ <value>simple</value>
+ </param>
+ <param>
+ <name>urls./**</name>
+ <value>authcBasic</value>
+ </param>
+ </provider>
+
+ <provider>
+ <role>identity-assertion</role>
+ <name>Default</name>
+ <enabled>true</enabled>
+ </provider>
+
+ <!--
+ Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.
+ For example, a hadoop service running in AWS may return a response that includes URLs containing the
+ some AWS internal host name. If the client needs to make a subsequent request to the host identified
+ in those URLs they need to be mapped to external host names that the client Knox can use to connect.
+
+ If the external hostname and internal host names are same turn of this provider by setting the value of
+ enabled parameter as false.
+
+ The name parameter specifies the external host names in a comma separated list.
+ The value parameter specifies corresponding internal host names in a comma separated list.
+
+ Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out
+ of box sandbox.xml. This is because Sandbox uses port mapping to allow clients to connect to the
+ Hadoop services using localhost. In real clusters, external host names would almost never be localhost.
+ -->
+ <provider>
+ <role>hostmap</role>
+ <name>static</name>
+ <enabled>true</enabled>
+ <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
+ </provider>
+
+</gateway>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/simple-descriptor-five.json
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/simple-descriptor-five.json b/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/simple-descriptor-five.json
new file mode 100644
index 0000000..52cec35
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/simple-descriptor-five.json
@@ -0,0 +1,14 @@
+{
+ "discovery-type":"DUMMY",
+ "discovery-address":"http://c6401.ambari.apache.org:8080",
+ "provider-config-ref":"../shared-providers/ambari-cluster-policy.xml",
+ "cluster":"dummy",
+ "services":[
+ {"name":"NAMENODE"},
+ {"name":"JOBTRACKER"},
+ {"name":"WEBHDFS"},
+ {"name":"OOZIE"},
+ {"name":"HIVE"},
+ {"name":"RESOURCEMANAGER"}
+ ]
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/simple-descriptor-six.json
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/simple-descriptor-six.json b/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/simple-descriptor-six.json
new file mode 100644
index 0000000..e78f193
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/knox/gateway/topology/file/simple-descriptor-six.json
@@ -0,0 +1,18 @@
+{
+ "discovery-type":"DUMMY",
+ "discovery-address":"http://c6401.ambari.apache.org:8080",
+ "provider-config-ref":"../shared-providers/provider-config-one.xml",
+ "cluster":"dummy",
+ "services":[
+ {"name":"NAMENODE"},
+ {"name":"JOBTRACKER"},
+ {"name":"WEBHDFS"},
+ {"name":"WEBHCAT"},
+ {"name":"OOZIE"},
+ {"name":"WEBHBASE"},
+ {"name":"HIVE"},
+ {"name":"RESOURCEMANAGER"},
+ {"name":"AMBARI", "urls":["http://c6401.ambari.apache.org:8080"]},
+ {"name":"AMBARIUI", "urls":["http://c6401.ambari.apache.org:8080"]}
+ ]
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/HrefListingMarshaller.java
----------------------------------------------------------------------
diff --git a/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/HrefListingMarshaller.java b/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/HrefListingMarshaller.java
deleted file mode 100644
index c251213..0000000
--- a/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/HrefListingMarshaller.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.service.admin;
-
-import org.eclipse.persistence.jaxb.JAXBContextProperties;
-
-import javax.ws.rs.Produces;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.MultivaluedMap;
-import javax.ws.rs.ext.MessageBodyWriter;
-import javax.ws.rs.ext.Provider;
-import javax.xml.bind.JAXBContext;
-import javax.xml.bind.JAXBException;
-import javax.xml.bind.Marshaller;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.lang.annotation.Annotation;
-import java.lang.reflect.Type;
-import java.util.HashMap;
-import java.util.Map;
-
-@Provider
-@Produces({MediaType.APPLICATION_JSON})
-public class HrefListingMarshaller implements MessageBodyWriter<TopologiesResource.HrefListing> {
-
- @Override
- public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
- return (TopologiesResource.HrefListing.class == type);
- }
-
- @Override
- public long getSize(TopologiesResource.HrefListing instance,
- Class<?> type,
- Type genericType,
- Annotation[] annotations,
- MediaType mediaType) {
- return -1;
- }
-
- @Override
- public void writeTo(TopologiesResource.HrefListing instance,
- Class<?> type,
- Type genericType,
- Annotation[] annotations,
- MediaType mediaType,
- MultivaluedMap<String, Object> httpHeaders,
- OutputStream entityStream) throws IOException, WebApplicationException {
- try {
- Map<String, Object> properties = new HashMap<>(1);
- properties.put( JAXBContextProperties.MEDIA_TYPE, mediaType.toString());
- JAXBContext context = JAXBContext.newInstance(new Class[]{TopologiesResource.HrefListing.class}, properties);
- Marshaller m = context.createMarshaller();
- m.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
- m.marshal(instance, entityStream);
- } catch (JAXBException e) {
- throw new IOException(e);
- }
- }
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/HrefListingMarshaller.java
----------------------------------------------------------------------
diff --git a/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/HrefListingMarshaller.java b/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/HrefListingMarshaller.java
new file mode 100644
index 0000000..3313601
--- /dev/null
+++ b/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/HrefListingMarshaller.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.service.admin;
+
+import org.eclipse.persistence.jaxb.JAXBContextProperties;
+
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.ext.MessageBodyWriter;
+import javax.ws.rs.ext.Provider;
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Marshaller;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Type;
+import java.util.HashMap;
+import java.util.Map;
+
+@Provider
+@Produces({MediaType.APPLICATION_JSON})
+public class HrefListingMarshaller implements MessageBodyWriter<TopologiesResource.HrefListing> {
+
+ @Override
+ public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
+ return (TopologiesResource.HrefListing.class == type);
+ }
+
+ @Override
+ public long getSize(TopologiesResource.HrefListing instance,
+ Class<?> type,
+ Type genericType,
+ Annotation[] annotations,
+ MediaType mediaType) {
+ return -1;
+ }
+
+ @Override
+ public void writeTo(TopologiesResource.HrefListing instance,
+ Class<?> type,
+ Type genericType,
+ Annotation[] annotations,
+ MediaType mediaType,
+ MultivaluedMap<String, Object> httpHeaders,
+ OutputStream entityStream) throws IOException, WebApplicationException {
+ try {
+ Map<String, Object> properties = new HashMap<>(1);
+ properties.put( JAXBContextProperties.MEDIA_TYPE, mediaType.toString());
+ JAXBContext context = JAXBContext.newInstance(new Class[]{TopologiesResource.HrefListing.class}, properties);
+ Marshaller m = context.createMarshaller();
+ m.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
+ m.marshal(instance, entityStream);
+ } catch (JAXBException e) {
+ throw new IOException(e);
+ }
+ }
+
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/TopologiesResource.java
----------------------------------------------------------------------
diff --git a/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/TopologiesResource.java b/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/TopologiesResource.java
index 948447b..a0035fc 100644
--- a/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/TopologiesResource.java
+++ b/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/TopologiesResource.java
@@ -461,7 +461,7 @@ public class TopologiesResource {
return buildHref(t.getName(), req);
}
- private SimpleTopology getSimpleTopology(org.apache.hadoop.gateway.topology.Topology t, GatewayConfig config) {
+ private SimpleTopology getSimpleTopology(org.apache.knox.gateway.topology.Topology t, GatewayConfig config) {
String uri = buildURI(t, config, request);
String href = buildHref(t, request);
return new SimpleTopology(t, uri, href);
http://git-wip-us.apache.org/repos/asf/knox/blob/46109ad8/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml
----------------------------------------------------------------------
diff --git a/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml b/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml
index ab4ab2b..c6135ae 100644
--- a/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml
+++ b/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml
@@ -87,6 +87,6 @@
<!-- No need to rewrite Slider View -->
</routes>
- <dispatch classname="org.apache.hadoop.gateway.dispatch.PassAllHeadersNoEncodingDispatch"/>
+ <dispatch classname="org.apache.knox.gateway.dispatch.PassAllHeadersNoEncodingDispatch"/>
</service>
[16/25] knox git commit: Merge branch 'master' into
KNOX-998-Package_Restructuring
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminTopologyFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminTopologyFuncTest.java
index d33d59e,0000000..7dcb4e0
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminTopologyFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminTopologyFuncTest.java
@@@ -1,800 -1,0 +1,1386 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.StringReader;
+import java.net.URI;
+import java.net.URISyntaxException;
++import java.util.Arrays;
+import java.util.Enumeration;
+import java.util.HashMap;
++import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import javax.ws.rs.core.MediaType;
+
+import io.restassured.http.ContentType;
+import com.mycila.xmltool.XMLDoc;
+import com.mycila.xmltool.XMLTag;
+import org.apache.directory.server.protocol.shared.transport.TcpTransport;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
+import org.apache.knox.gateway.services.DefaultGatewayServices;
+import org.apache.knox.gateway.services.GatewayServices;
+import org.apache.knox.gateway.services.ServiceLifecycleException;
+import org.apache.knox.gateway.services.topology.TopologyService;
+import org.apache.knox.gateway.topology.Param;
+import org.apache.knox.gateway.topology.Provider;
+import org.apache.knox.gateway.topology.Service;
+import org.apache.knox.gateway.topology.Topology;
+import org.apache.knox.gateway.util.XmlUtils;
++import io.restassured.response.ResponseBody;
++import org.apache.commons.io.FileUtils;
++import org.apache.commons.io.FilenameUtils;
+import org.apache.hadoop.test.TestUtils;
+import org.apache.http.HttpStatus;
+import org.apache.log4j.Appender;
+import org.hamcrest.MatcherAssert;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.w3c.dom.Document;
+import org.xml.sax.InputSource;
+
+import static io.restassured.RestAssured.given;
++import static junit.framework.TestCase.assertTrue;
+import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
+import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.not;
+import static org.hamcrest.CoreMatchers.notNullValue;
+import static org.hamcrest.CoreMatchers.nullValue;
+import static org.hamcrest.xml.HasXPath.hasXPath;
++import static org.junit.Assert.assertEquals;
++import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.fail;
+
+public class GatewayAdminTopologyFuncTest {
+
+ private static Logger LOG = LoggerFactory.getLogger( GatewayAdminTopologyFuncTest.class );
+
+ public static Enumeration<Appender> appenders;
+ public static GatewayConfig config;
+ public static GatewayServer gateway;
+ public static String gatewayUrl;
+ public static String clusterUrl;
+ private static GatewayTestDriver driver = new GatewayTestDriver();
+
+ @BeforeClass
+ public static void setupSuite() throws Exception {
+ //appenders = NoOpAppender.setUp();
+ driver.setupLdap(0);
+ setupGateway(new GatewayTestConfig());
+ }
+
+ @AfterClass
+ public static void cleanupSuite() throws Exception {
+ gateway.stop();
+ driver.cleanup();
+ //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
+ //NoOpAppender.tearDown( appenders );
+ }
+
+ public static void setupGateway(GatewayTestConfig testConfig) throws Exception {
+
+ File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+ File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+ gatewayDir.mkdirs();
+
+ config = testConfig;
+ testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+ File topoDir = new File( testConfig.getGatewayTopologyDir() );
+ topoDir.mkdirs();
+
+ File deployDir = new File( testConfig.getGatewayDeploymentDir() );
+ deployDir.mkdirs();
+
++ File providerConfigDir = new File(testConfig.getGatewayConfDir(), "shared-providers");
++ providerConfigDir.mkdirs();
++
++ File descriptorsDir = new File(testConfig.getGatewayConfDir(), "descriptors");
++ descriptorsDir.mkdirs();
++
+ File descriptor = new File( topoDir, "admin.xml" );
+ FileOutputStream stream = new FileOutputStream( descriptor );
+ createKnoxTopology().toStream( stream );
+ stream.close();
+
+ File descriptor2 = new File( topoDir, "test-cluster.xml" );
+ FileOutputStream stream2 = new FileOutputStream( descriptor2 );
+ createNormalTopology().toStream( stream2 );
+ stream.close();
+
+ DefaultGatewayServices srvcs = new DefaultGatewayServices();
+ Map<String,String> options = new HashMap<>();
+ options.put( "persist-master", "false" );
+ options.put( "master", "password" );
+
+ try {
+ srvcs.init( testConfig, options );
+ } catch ( ServiceLifecycleException e ) {
+ e.printStackTrace(); // I18N not required.
+ }
+ gateway = GatewayServer.startGateway( testConfig, srvcs );
+ MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
+
+ LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
+
+ gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
+ clusterUrl = gatewayUrl + "/admin";
+ }
+
+ private static XMLTag createNormalTopology() {
+ XMLTag xml = XMLDoc.newDocument( true )
+ .addRoot( "topology" )
+ .addTag( "gateway" )
+ .addTag( "provider" )
+ .addTag( "role" ).addText( "webappsec" )
+ .addTag( "name" ).addText( "WebAppSec" )
+ .addTag( "enabled" ).addText( "true" )
+ .addTag( "param" )
+ .addTag( "name" ).addText( "csrf.enabled" )
+ .addTag( "value" ).addText( "true" ).gotoParent().gotoParent()
+ .addTag( "provider" )
+ .addTag( "role" ).addText( "authentication" )
+ .addTag( "name" ).addText( "ShiroProvider" )
+ .addTag( "enabled" ).addText( "true" )
+ .addTag( "param" )
+ .addTag( "name" ).addText( "main.ldapRealm" )
+ .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
+ .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
+ .addTag( "value" ).addText( driver.getLdapUrl() ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
+ .addTag( "value" ).addText( "simple" ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "urls./**" )
+ .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
+ .addTag( "provider" )
+ .addTag( "role" ).addText( "identity-assertion" )
+ .addTag( "enabled" ).addText( "true" )
+ .addTag( "name" ).addText( "Default" ).gotoParent()
+ .addTag( "provider" )
+ .addTag( "role" ).addText( "authorization" )
+ .addTag( "enabled" ).addText( "true" )
+ .addTag( "name" ).addText( "AclsAuthz" ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "webhdfs-acl" )
+ .addTag( "value" ).addText( "hdfs;*;*" ).gotoParent()
+ .gotoRoot()
+ .addTag( "service" )
+ .addTag( "role" ).addText( "WEBHDFS" )
+ .addTag( "url" ).addText( "http://localhost:50070/webhdfs/v1" ).gotoParent()
+ .gotoRoot();
+// System.out.println( "GATEWAY=" + xml.toString() );
+ return xml;
+ }
+
+ private static XMLTag createKnoxTopology() {
+ XMLTag xml = XMLDoc.newDocument( true )
+ .addRoot( "topology" )
+ .addTag( "gateway" )
+ .addTag( "provider" )
+ .addTag( "role" ).addText( "authentication" )
+ .addTag( "name" ).addText( "ShiroProvider" )
+ .addTag( "enabled" ).addText( "true" )
+ .addTag( "param" )
+ .addTag( "name" ).addText( "main.ldapRealm" )
+ .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
+ .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
+ .addTag( "value" ).addText( driver.getLdapUrl() ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
+ .addTag( "value" ).addText( "simple" ).gotoParent()
+ .addTag( "param" )
+ .addTag( "name" ).addText( "urls./**" )
+ .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
+ .addTag("provider")
+ .addTag( "role" ).addText( "authorization" )
+ .addTag( "name" ).addText( "AclsAuthz" )
+ .addTag( "enabled" ).addText( "true" )
+ .addTag("param")
+ .addTag("name").addText("knox.acl")
+ .addTag("value").addText("admin;*;*").gotoParent().gotoParent()
+ .addTag("provider")
+ .addTag( "role" ).addText( "identity-assertion" )
+ .addTag( "enabled" ).addText( "true" )
+ .addTag( "name" ).addText( "Default" ).gotoParent()
+ .gotoRoot()
+ .addTag( "service" )
+ .addTag( "role" ).addText( "KNOX" )
+ .gotoRoot();
+ // System.out.println( "GATEWAY=" + xml.toString() );
+ return xml;
+ }
+
++ private static XMLTag createProviderConfiguration() {
++ XMLTag xml = XMLDoc.newDocument( true )
++ .addRoot( "gateway" )
++ .addTag( "provider" )
++ .addTag( "role" ).addText( "authentication" )
++ .addTag( "name" ).addText( "ShiroProvider" )
++ .addTag( "enabled" ).addText( "true" )
++ .addTag( "param" )
++ .addTag( "name" ).addText( "main.ldapRealm" )
++ .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
++ .addTag( "param" )
++ .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
++ .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
++ .addTag( "param" )
++ .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
++ .addTag( "value" ).addText( driver.getLdapUrl() ).gotoParent()
++ .addTag( "param" )
++ .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
++ .addTag( "value" ).addText( "simple" ).gotoParent()
++ .addTag( "param" )
++ .addTag( "name" ).addText( "urls./**" )
++ .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
++ .addTag("provider")
++ .addTag( "role" ).addText( "authorization" )
++ .addTag( "name" ).addText( "AclsAuthz" )
++ .addTag( "enabled" ).addText( "true" )
++ .addTag("param")
++ .addTag("name").addText("knox.acl")
++ .addTag("value").addText("admin;*;*").gotoParent().gotoParent()
++ .addTag("provider")
++ .addTag( "role" ).addText( "identity-assertion" )
++ .addTag( "enabled" ).addText( "true" )
++ .addTag( "name" ).addText( "Default" ).gotoParent()
++ .gotoRoot();
++ // System.out.println( "GATEWAY=" + xml.toString() );
++ return xml;
++ }
++
++
++ private static String createDescriptor(String clusterName) {
++ return createDescriptor(clusterName, null);
++ }
++
++
++ private static String createDescriptor(String clusterName, String providerConfigRef) {
++ StringBuilder sb = new StringBuilder();
++ if (providerConfigRef == null) {
++ providerConfigRef = "sandbox-providers";
++ }
++
++ sb.append("{\n");
++ sb.append(" \"discovery-type\":\"AMBARI\",\n");
++ sb.append(" \"discovery-address\":\"http://c6401.ambari.apache.org:8080\",\n");
++ sb.append(" \"discovery-user\":\"ambariuser\",\n");
++ sb.append(" \"discovery-pwd-alias\":\"ambari.discovery.password\",\n");
++ sb.append(" \"provider-config-ref\":\"");
++ sb.append(providerConfigRef);
++ sb.append("\",\n");
++ sb.append(" \"cluster\":\"");
++ sb.append(clusterName);
++ sb.append("\",\n");
++ sb.append(" \"services\":[\n");
++ sb.append(" {\"name\":\"NAMENODE\"},\n");
++ sb.append(" {\"name\":\"JOBTRACKER\"},\n");
++ sb.append(" {\"name\":\"WEBHDFS\"},\n");
++ sb.append(" {\"name\":\"WEBHCAT\"},\n");
++ sb.append(" {\"name\":\"OOZIE\"},\n");
++ sb.append(" {\"name\":\"WEBHBASE\"},\n");
++ sb.append(" {\"name\":\"HIVE\"},\n");
++ sb.append(" {\"name\":\"RESOURCEMANAGER\"},\n");
++ sb.append(" {\"name\":\"AMBARI\", \"urls\":[\"http://c6401.ambari.apache.org:8080\"]}\n");
++ sb.append(" ]\n");
++ sb.append("}\n");
++
++ return sb.toString();
++ }
++
++
+ //@Test
+ public void waitForManualTesting() throws IOException {
+ System.in.read();
+ }
+
+ @Test( timeout = TestUtils.LONG_TIMEOUT )
+ public void testTopologyCollection() throws ClassNotFoundException {
+ LOG_ENTER();
+
+ String username = "admin";
+ String password = "admin-password";
+ String serviceUrl = clusterUrl + "/api/v1/topologies";
+ String href1 = given()
+ //.log().all()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .contentType(MediaType.APPLICATION_JSON)
+ .then()
+ //.log().all()
+ .statusCode(HttpStatus.SC_OK)
+ .body("topologies.topology[0].name", not(nullValue()))
+ .body("topologies.topology[1].name", not(nullValue()))
+ .body("topologies.topology[0].uri", not(nullValue()))
+ .body("topologies.topology[1].uri", not(nullValue()))
+ .body("topologies.topology[0].href", not(nullValue()))
+ .body("topologies.topology[1].href", not(nullValue()))
+ .body("topologies.topology[0].timestamp", not(nullValue()))
+ .body("topologies.topology[1].timestamp", not(nullValue()))
+ .when().get(serviceUrl).thenReturn().getBody().path("topologies.topology.href[1]");
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .then()
+ //.log().all()
+ .body("topologies.topology.href[1]", equalTo(href1))
+ .statusCode(HttpStatus.SC_OK)
+ .when().get(serviceUrl);
+
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic(username, password)
+ .then()
+ //.log().all()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_XML)
+ .when().get(serviceUrl);
+
+
+ given().auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ //.log().all()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType("application/json")
+ .body("topology.name", equalTo("test-cluster"))
+ .when().get(href1);
+
+ LOG_EXIT();
+ }
+
+ @Test( timeout = TestUtils.LONG_TIMEOUT )
+ public void testTopologyObject() throws ClassNotFoundException {
+ LOG_ENTER();
+
+ String username = "admin";
+ String password = "admin-password";
+ String serviceUrl = clusterUrl + "/api/v1/topologies";
+ String hrefJson = given()
+ //.log().all()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ //.log().all()
+ .statusCode(HttpStatus.SC_OK)
+ .when().get(serviceUrl).thenReturn().getBody().path("topologies.topology[1].href");
+
+ String timestampJson = given()
+ //.log().all()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ //.log().all()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType("application/json")
+ .when().get(serviceUrl).andReturn()
+ .getBody().path("topologies.topology[1].timestamp");
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ //.log().all()
+ .statusCode(HttpStatus.SC_OK)
+ .body("topology.name", equalTo("test-cluster"))
+ .body("topology.timestamp", equalTo(Long.parseLong(timestampJson)))
+ .when()
+ .get(hrefJson);
+
+
+ String hrefXml = given()
+ //.log().all()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .then()
+ //.log().all()
+ .statusCode(HttpStatus.SC_OK)
+ .when().get(serviceUrl).thenReturn().getBody().path("topologies.topology[1].href");
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .then()
+ //.log().all()
+ .statusCode(HttpStatus.SC_OK)
+ .when()
+ .get(hrefXml);
+
+ LOG_EXIT();
+ }
+
+ @Test( timeout = TestUtils.LONG_TIMEOUT )
+ public void testPositiveAuthorization() throws ClassNotFoundException{
+ LOG_ENTER();
+
+ String adminUser = "admin";
+ String adminPass = "admin-password";
+ String url = clusterUrl + "/api/v1/topologies";
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic(adminUser, adminPass)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ //.log().all()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(ContentType.JSON)
+ .body("topologies.topology[0].name", not(nullValue()))
+ .body("topologies.topology[1].name", not(nullValue()))
+ .body("topologies.topology[0].uri", not(nullValue()))
+ .body("topologies.topology[1].uri", not(nullValue()))
+ .body("topologies.topology[0].href", not(nullValue()))
+ .body("topologies.topology[1].href", not(nullValue()))
+ .body("topologies.topology[0].timestamp", not(nullValue()))
+ .body("topologies.topology[1].timestamp", not(nullValue()))
+ .when().get(url);
+
+ LOG_EXIT();
+ }
+
+ @Test( timeout = TestUtils.LONG_TIMEOUT )
+ public void testNegativeAuthorization() throws ClassNotFoundException{
+ LOG_ENTER();
+
+ String guestUser = "guest";
+ String guestPass = "guest-password";
+ String url = clusterUrl + "/api/v1/topologies";
+
+ given()
+ //.log().all()
+ .auth().basic(guestUser, guestPass)
+ .then()
+ //.log().all()
+ .statusCode(HttpStatus.SC_FORBIDDEN)
+ .when().get(url);
+
+ LOG_EXIT();
+ }
+
+ private Topology createTestTopology(){
+ Topology topology = new Topology();
+ topology.setName("test-topology");
+
+ try {
+ topology.setUri(new URI(gatewayUrl + "/" + topology.getName()));
+ } catch (URISyntaxException ex) {
+ assertThat(topology.getUri(), not(nullValue()));
+ }
+
+ Provider identityProvider = new Provider();
+ identityProvider.setName("Default");
+ identityProvider.setRole("identity-assertion");
+ identityProvider.setEnabled(true);
+
+ Provider AuthenicationProvider = new Provider();
+ AuthenicationProvider.setName("ShiroProvider");
+ AuthenicationProvider.setRole("authentication");
+ AuthenicationProvider.setEnabled(true);
+
+ Param ldapMain = new Param();
+ ldapMain.setName("main.ldapRealm");
+ ldapMain.setValue("org.apache.knox.gateway.shirorealm.KnoxLdapRealm");
+
+ Param ldapGroupContextFactory = new Param();
+ ldapGroupContextFactory.setName("main.ldapGroupContextFactory");
+ ldapGroupContextFactory.setValue("org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory");
+
+ Param ldapRealmContext = new Param();
+ ldapRealmContext.setName("main.ldapRealm.contextFactory");
+ ldapRealmContext.setValue("$ldapGroupContextFactory");
+
+ Param ldapURL = new Param();
+ ldapURL.setName("main.ldapRealm.contextFactory.url");
+ ldapURL.setValue(driver.getLdapUrl());
+
+ Param ldapUserTemplate = new Param();
+ ldapUserTemplate.setName("main.ldapRealm.userDnTemplate");
+ ldapUserTemplate.setValue("uid={0},ou=people,dc=hadoop,dc=apache,dc=org");
+
+ Param authcBasic = new Param();
+ authcBasic.setName("urls./**");
+ authcBasic.setValue("authcBasic");
+
+ AuthenicationProvider.addParam(ldapGroupContextFactory);
+ AuthenicationProvider.addParam(ldapMain);
+ AuthenicationProvider.addParam(ldapRealmContext);
+ AuthenicationProvider.addParam(ldapURL);
+ AuthenicationProvider.addParam(ldapUserTemplate);
+ AuthenicationProvider.addParam(authcBasic);
+
+ Service testService = new Service();
+ testService.setRole("test-service-role");
+
+ topology.addProvider(AuthenicationProvider);
+ topology.addProvider(identityProvider);
+ topology.addService(testService);
+ topology.setTimestamp(System.nanoTime());
+
+ return topology;
+ }
+
+ @Test( timeout = TestUtils.LONG_TIMEOUT )
+ public void testDeployTopology() throws Exception {
+ LOG_ENTER();
+
+ Topology testTopology = createTestTopology();
+
+ String user = "guest";
+ String password = "guest-password";
+
+ String url = gatewayUrl + "/" + testTopology.getName() + "/test-service-path/test-service-resource";
+
+ GatewayServices srvs = GatewayServer.getGatewayServices();
+
+ TopologyService ts = srvs.getService(GatewayServices.TOPOLOGY_SERVICE);
+ try {
+ ts.stopMonitor();
+
+ assertThat( testTopology, not( nullValue() ) );
+ assertThat( testTopology.getName(), is( "test-topology" ) );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( "admin", "admin-password" ).header( "Accept", MediaType.APPLICATION_JSON ).then()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK ).body( containsString( "ServerVersion" ) ).when().get( gatewayUrl + "/admin/api/v1/version" );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( user, password ).then()
+ //.log().all()
+ .statusCode( HttpStatus.SC_NOT_FOUND ).when().get( url );
+
+ ts.deployTopology( testTopology );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( user, password ).then()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK ).contentType( "text/plain" ).body( is( "test-service-response" ) ).when().get( url ).getBody();
+
+ ts.deleteTopology( testTopology );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( user, password ).then()
+ //.log().all()
+ .statusCode( HttpStatus.SC_NOT_FOUND ).when().get( url );
+ } finally {
+ ts.startMonitor();
+ }
+
+ LOG_EXIT();
+ }
+
+ @Test( timeout = TestUtils.LONG_TIMEOUT )
+ public void testDeleteTopology() throws ClassNotFoundException {
+ LOG_ENTER();
+
+ Topology test = createTestTopology();
+
+ String username = "admin";
+ String password = "admin-password";
+ String url = clusterUrl + "/api/v1/topologies/" + test.getName();
+
+ GatewayServices gs = GatewayServer.getGatewayServices();
+
+ TopologyService ts = gs.getService(GatewayServices.TOPOLOGY_SERVICE);
+
+ ts.deployTopology(test);
+
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ //.log().all()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().get(url);
+
+ given()
+ .auth().preemptive().basic(username, password)
+ .then()
+ //.log().all()
+ .statusCode(HttpStatus.SC_OK)
+ .contentType(MediaType.APPLICATION_JSON)
+ .when().delete(url);
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic(username, password)
+ .then()
+ //.log().all()
+ .statusCode(HttpStatus.SC_NO_CONTENT)
+ .when().get(url);
+
+ LOG_EXIT();
+ }
+
+ @Test( timeout = TestUtils.LONG_TIMEOUT )
+ public void testPutTopology() throws Exception {
+ LOG_ENTER() ;
+
+ String username = "admin";
+ String password = "admin-password";
+ String url = clusterUrl + "/api/v1/topologies/test-put";
+
+ String JsonPut =
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .get(clusterUrl + "/api/v1/topologies/test-cluster")
+ .getBody().asString();
+
+ String XML = given()
+ //.log().all()
+ .auth().preemptive().basic(username, password)
+ .contentType(MediaType.APPLICATION_JSON)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .body(JsonPut)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ //.log().all()
+ .when().put(url).getBody().asString();
+
+ InputSource source = new InputSource( new StringReader( XML ) );
+ Document doc = XmlUtils.readXml( source );
+
+ assertThat( doc, hasXPath( "/topology/gateway/provider[1]/name", containsString( "WebAppSec" ) ) );
+ assertThat( doc, hasXPath( "/topology/gateway/provider[1]/param/name", containsString( "csrf.enabled" ) ) );
+
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .body(equalTo(XML))
+ .when().get(url)
+ .getBody().asString();
+
+ String XmlPut =
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .get(clusterUrl + "/api/v1/topologies/test-cluster")
+ .getBody().asString();
+
+ String JSON = given()
+ //.log().all()
+ .auth().preemptive().basic(username, password)
+ .contentType(MediaType.APPLICATION_XML)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .body(XmlPut)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ //.log().all()
+ .when().put(url).getBody().asString();
+
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_JSON)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .body(equalTo(JSON))
+ .when().get(url)
+ .getBody().asString();
+
+ LOG_EXIT();
+ }
+
+ @Test( timeout = TestUtils.LONG_TIMEOUT )
+ public void testXForwardedHeaders() {
+ LOG_ENTER();
+
+ String username = "admin";
+ String password = "admin-password";
+ String url = clusterUrl + "/api/v1/topologies";
+
+// X-Forward header values
+ String port = String.valueOf(777);
+ String server = "myserver";
+ String host = server + ":" + port;
+ String proto = "protocol";
+ String context = "/mycontext";
+ String newUrl = proto + "://" + host + context;
+// String port = String.valueOf(gateway.getAddresses()[0].getPort());
+
+// Case 1: Add in all x-forward headers (host, port, server, context, proto)
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .header("X-Forwarded-Host", host )
+ .header("X-Forwarded-Port", port )
+ .header("X-Forwarded-Server", server )
+ .header("X-Forwarded-Context", context)
+ .header("X-Forwarded-Proto", proto)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .body(containsString(newUrl))
+ .body(containsString("test-cluster"))
+ .body(containsString("admin"))
+ .when().get(url);
+
+
+// Case 2: add in x-forward headers (host, server, proto, context)
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .header("X-Forwarded-Host", host )
+ .header("X-Forwarded-Server", server )
+ .header("X-Forwarded-Context", context )
+ .header("X-Forwarded-Proto", proto )
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .body(containsString(server))
+ .body(containsString(context))
+ .body(containsString(proto))
+ .body(containsString(host))
+ .body(containsString("test-cluster"))
+ .body(containsString("admin"))
+ .when().get(url);
+
+// Case 3: add in x-forward headers (host, proto, port, context)
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .header("X-Forwarded-Host", host )
+ .header("X-Forwarded-Port", port )
+ .header("X-Forwarded-Context", context )
+ .header("X-Forwarded-Proto", proto)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .body(containsString(host))
+ .body(containsString(port))
+ .body(containsString(context))
+ .body(containsString(proto))
+ .body(containsString("test-cluster"))
+ .body(containsString("admin"))
+ .when().get(url);
+
+// Case 4: add in x-forward headers (host, proto, port, context) no port in host.
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .header("X-Forwarded-Host", server)
+ .header("X-Forwarded-Port", port)
+ .header("X-Forwarded-Context", context)
+ .header("X-Forwarded-Proto", proto)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .body(containsString(server))
+ .body(containsString(port))
+ .body(containsString(context))
+ .body(containsString(proto))
+ .body(containsString("test-cluster"))
+ .body(containsString("admin"))
+ .when().get(url);
+
+// Case 5: add in x-forward headers (host, port)
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .header("X-Forwarded-Host", host )
+ .header("X-Forwarded-Port", port )
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .body(containsString(host))
+ .body(containsString(port))
+ .body(containsString("test-cluster"))
+ .body(containsString("admin"))
+ .when().get(url);
+
+// Case 6: Normal Request
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .body(containsString(url))
+ .body(containsString("test-cluster"))
+ .body(containsString("admin"))
+ .when().get(url);
+
+ LOG_EXIT();
+ }
+
+ @Test( timeout = TestUtils.LONG_TIMEOUT )
+ public void testGatewayPathChange() throws Exception {
+ LOG_ENTER();
+ String username = "admin";
+ String password = "admin-password";
+ String url = clusterUrl + "/api/v1/topologies";
+
+// Case 1: Normal Request (No Change in gateway.path). Ensure HTTP OK resp + valid URL.
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .body(containsString(url + "/test-cluster"))
+ .when().get(url);
+
+
+// Case 2: Change gateway.path to another String. Ensure HTTP OK resp + valid URL.
+ try {
+ gateway.stop();
+
+ GatewayTestConfig conf = new GatewayTestConfig();
+ conf.setGatewayPath("new-gateway-path");
+ setupGateway(conf);
+
+ String newUrl = clusterUrl + "/api/v1/topologies";
+
+ given()
+ .auth().preemptive().basic(username, password)
+ .header("Accept", MediaType.APPLICATION_XML)
+ .then()
+ .statusCode(HttpStatus.SC_OK)
+ .body(containsString(newUrl + "/test-cluster"))
+ .when().get(newUrl);
+ } catch(Exception e){
+ fail(e.getMessage());
+ }
+ finally {
+// Restart the gateway with old settings.
+ gateway.stop();
+ setupGateway(new GatewayTestConfig());
+ }
+
+ LOG_EXIT();
+ }
+
++
++ @Test( timeout = TestUtils.LONG_TIMEOUT )
++ public void testProviderConfigurationCollection() throws Exception {
++ LOG_ENTER();
++
++ final String username = "admin";
++ final String password = "admin-password";
++ final String serviceUrl = clusterUrl + "/api/v1/providerconfig";
++
++ final File sharedProvidersDir = new File(config.getGatewayConfDir(), "shared-providers");
++ final List<String> configNames = Arrays.asList("sandbox-providers", "custom-providers");
++ final List<String> configFileNames = Arrays.asList(configNames.get(0) + ".xml", configNames.get(1) + ".xml");
++
++ // Request a listing of all the provider configs with an INCORRECT Accept header
++ given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_XML)
++ .then()
++ .statusCode(HttpStatus.SC_NOT_ACCEPTABLE)
++ .when().get(serviceUrl);
++
++ // Request a listing of all the provider configs (with the CORRECT Accept header)
++ ResponseBody responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().get(serviceUrl).body();
++ List<String> items = responseBody.path("items");
++ assertTrue("Expected no items since the shared-providers dir is empty.", items.isEmpty());
++
++ // Manually write a file to the shared-providers directory
++ File providerConfig = new File(sharedProvidersDir, configFileNames.get(0));
++ FileOutputStream stream = new FileOutputStream(providerConfig);
++ createProviderConfiguration().toStream(stream);
++ stream.close();
++
++ // Request a listing of all the provider configs
++ responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().get(serviceUrl).body();
++ items = responseBody.path("items");
++ assertEquals("Expected items to include the new file in the shared-providers dir.", 1, items.size());
++ assertEquals(configFileNames.get(0), responseBody.path("items[0].name"));
++ String href1 = responseBody.path("items[0].href");
++
++ // Manually write another file to the shared-providers directory
++ File anotherProviderConfig = new File(sharedProvidersDir, configFileNames.get(1));
++ stream = new FileOutputStream(anotherProviderConfig);
++ createProviderConfiguration().toStream(stream);
++ stream.close();
++
++ // Request a listing of all the provider configs
++ responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().get(serviceUrl).body();
++ items = responseBody.path("items");
++ assertEquals(2, items.size());
++ String pcOne = responseBody.path("items[0].name");
++ String pcTwo = responseBody.path("items[1].name");
++ assertTrue(configFileNames.contains(pcOne));
++ assertTrue(configFileNames.contains(pcTwo));
++
++ // Request a specific provider configuration with an INCORRECT Accept header
++ given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_NOT_ACCEPTABLE)
++ .when().get(href1).body();
++
++ // Request a specific provider configuration (with the CORRECT Accept header)
++ responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_XML)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_XML)
++ .when().get(href1).body();
++ String sandboxProvidersConfigContent = responseBody.asString();
++
++ // Parse the result, to make sure it's at least valid XML
++ XmlUtils.readXml(new InputSource(new StringReader(sandboxProvidersConfigContent)));
++
++ providerConfig.delete();
++ anotherProviderConfig.delete();
++
++ // Request a specific provider configuration, which does NOT exist
++ given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_XML)
++ .then()
++ .statusCode(HttpStatus.SC_NOT_FOUND)
++ .when().get(serviceUrl + "/not-a-real-provider-config");
++
++ LOG_EXIT();
++ }
++
++
++ @Test( timeout = TestUtils.LONG_TIMEOUT )
++ public void testPutProviderConfiguration() throws Exception {
++ LOG_ENTER();
++
++ final String username = "admin";
++ final String password = "admin-password";
++ final String serviceUrl = clusterUrl + "/api/v1/providerconfig";
++
++ final String newProviderConfigName = "new-provider-config";
++ final String newProviderConfigFileName = newProviderConfigName + ".xml";
++
++ XMLTag newProviderConfigXML = createProviderConfiguration();
++
++ // Attempt to PUT a provider config with an INCORRECT Content-type header
++ given()
++ .auth().preemptive().basic(username, password)
++ .header("Content-type", MediaType.APPLICATION_JSON)
++ .body(newProviderConfigXML.toBytes("utf-8"))
++ .then()
++ .statusCode(HttpStatus.SC_UNSUPPORTED_MEDIA_TYPE)
++ .when().put(serviceUrl + "/" + newProviderConfigName);
++
++ // Attempt to PUT a provider config with the CORRECT Content-type header
++ given()
++ .auth().preemptive().basic(username, password)
++ .header("Content-type", MediaType.APPLICATION_XML)
++ .body(newProviderConfigXML.toBytes("utf-8"))
++ .then()
++ .statusCode(HttpStatus.SC_CREATED)
++ .when().put(serviceUrl + "/" + newProviderConfigName);
++
++ // Verify that the provider configuration was written to the expected location
++ File newProviderConfigFile =
++ new File(new File(config.getGatewayConfDir(), "shared-providers"), newProviderConfigFileName);
++ assertTrue(newProviderConfigFile.exists());
++
++ // Request a listing of all the provider configs to further verify the PUT
++ ResponseBody responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().get(serviceUrl).body();
++ List<String> items = responseBody.path("items");
++ assertEquals(1, items.size());
++ assertEquals(newProviderConfigFileName, responseBody.path("items[0].name"));
++ String href = responseBody.path("items[0].href");
++
++ // Get the new provider config content
++ responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_XML)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_XML)
++ .when().get(href).body();
++ String configContent = responseBody.asString();
++
++ // Parse the result, to make sure it's at least valid XML
++ XmlUtils.readXml(new InputSource(new StringReader(configContent)));
++
++ // Manually delete the provider config
++ newProviderConfigFile.delete();
++
++ LOG_EXIT();
++ }
++
++
++ @Test( timeout = TestUtils.LONG_TIMEOUT )
++ public void testDeleteProviderConfiguration() throws Exception {
++ LOG_ENTER();
++
++ final String username = "admin";
++ final String password = "admin-password";
++ final String serviceUrl = clusterUrl + "/api/v1/providerconfig";
++
++ final File sharedProvidersDir = new File(config.getGatewayConfDir(), "shared-providers");
++
++ // Manually add two provider config files to the shared-providers directory
++ File providerConfigOneFile = new File(sharedProvidersDir, "deleteme-one-config.xml");
++ FileOutputStream stream = new FileOutputStream(providerConfigOneFile);
++ createProviderConfiguration().toStream(stream);
++ stream.close();
++ assertTrue(providerConfigOneFile.exists());
++
++ File providerConfigTwoFile = new File(sharedProvidersDir, "deleteme-two-config.xml");
++ stream = new FileOutputStream(providerConfigTwoFile);
++ createProviderConfiguration().toStream(stream);
++ stream.close();
++ assertTrue(providerConfigTwoFile.exists());
++
++ // Request a listing of all the provider configs
++ ResponseBody responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().get(serviceUrl).body();
++ List<String> items = responseBody.path("items");
++ assertEquals(2, items.size());
++ String name1 = responseBody.path("items[0].name");
++ String href1 = responseBody.path("items[0].href");
++ String name2 = responseBody.path("items[1].name");
++ String href2 = responseBody.path("items[1].href");
++
++ // Delete one of the provider configs
++ responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().delete(href1).body();
++ String deletedMsg = responseBody.path("deleted");
++ assertEquals("provider config " + FilenameUtils.getBaseName(name1), deletedMsg);
++ assertFalse((new File(sharedProvidersDir, name1).exists()));
++
++ assertTrue((new File(sharedProvidersDir, name2).exists()));
++ // Delete the other provider config
++ responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().delete(href2).body();
++ deletedMsg = responseBody.path("deleted");
++ assertEquals("provider config " + FilenameUtils.getBaseName(name2), deletedMsg);
++ assertFalse((new File(sharedProvidersDir, name2).exists()));
++
++ // Attempt to delete a provider config that does not exist
++ given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .when().delete(serviceUrl + "/does-not-exist");
++
++ LOG_EXIT();
++ }
++
++
++ @Test( timeout = TestUtils.LONG_TIMEOUT )
++ public void testDescriptorCollection() throws Exception {
++ LOG_ENTER();
++
++ final String username = "admin";
++ final String password = "admin-password";
++ final String serviceUrl = clusterUrl + "/api/v1/descriptors";
++
++ final File descriptorsDir = new File(config.getGatewayConfDir(), "descriptors");
++ final List<String> clusterNames = Arrays.asList("clusterOne", "clusterTwo");
++ final List<String> descriptorNames = Arrays.asList("test-descriptor-one", "test-descriptor-two");
++ final List<String> descriptorFileNames = Arrays.asList(descriptorNames.get(0) + ".json",
++ descriptorNames.get(1) + ".json");
++
++ // Request a listing of all the descriptors with an INCORRECT Accept header
++ given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_XML)
++ .then()
++ .statusCode(HttpStatus.SC_NOT_ACCEPTABLE)
++ .when().get(serviceUrl);
++
++ // Request a listing of all the descriptors (with the CORRECT Accept header)
++ ResponseBody responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().get(serviceUrl).body();
++ List<String> items = responseBody.path("items");
++ assertTrue("Expected no items since the descriptors dir is empty.", items.isEmpty());
++
++ // Manually write a file to the descriptors directory
++ File descriptorOneFile = new File(descriptorsDir, descriptorFileNames.get(0));
++ FileUtils.write(descriptorOneFile, createDescriptor(clusterNames.get(0)));
++
++ // Request a listing of all the descriptors
++ responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().get(serviceUrl).body();
++ items = responseBody.path("items");
++ assertEquals("Expected items to include the new file in the shared-providers dir.", 1, items.size());
++ assertEquals(descriptorFileNames.get(0), responseBody.path("items[0].name"));
++ String href1 = responseBody.path("items[0].href");
++
++ // Manually write another file to the descriptors directory
++ File descriptorTwoFile = new File(descriptorsDir, descriptorFileNames.get(1));
++ FileUtils.write(descriptorTwoFile, createDescriptor(clusterNames.get(1)));
++
++ // Request a listing of all the descriptors
++ responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().get(serviceUrl).body();
++ items = responseBody.path("items");
++ assertEquals(2, items.size());
++ String descOne = responseBody.path("items[0].name");
++ String descTwo = responseBody.path("items[1].name");
++ assertTrue(descriptorFileNames.contains(descOne));
++ assertTrue(descriptorFileNames.contains(descTwo));
++
++ // Request a specific descriptor with an INCORRECT Accept header
++ given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_XML)
++ .then()
++ .statusCode(HttpStatus.SC_NOT_ACCEPTABLE)
++ .when().get(href1).body();
++
++ // Request a specific descriptor (with the CORRECT Accept header)
++ responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().get(href1).body();
++ String cluster = responseBody.path("cluster");
++ assertEquals(cluster, clusterNames.get(0));
++
++ // Request a specific descriptor, which does NOT exist
++ given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_NOT_FOUND)
++ .when().get(serviceUrl + "/not-a-real-descriptor").body();
++
++ descriptorOneFile.delete();
++ descriptorTwoFile.delete();
++
++ LOG_EXIT();
++ }
++
++
++ @Test( timeout = TestUtils.LONG_TIMEOUT )
++ public void testPutDescriptor() throws Exception {
++ LOG_ENTER();
++
++ final String username = "admin";
++ final String password = "admin-password";
++ final String serviceUrl = clusterUrl + "/api/v1/descriptors";
++
++ final String clusterName = "test-cluster";
++ final String newDescriptorName = "new-descriptor";
++ final String newDescriptorFileName = newDescriptorName + ".json";
++
++ String newDescriptorJSON = createDescriptor(clusterName);
++
++ // Attempt to PUT a descriptor with an INCORRECT Content-type header
++ given()
++ .auth().preemptive().basic(username, password)
++ .header("Content-type", MediaType.APPLICATION_XML)
++ .body(newDescriptorJSON.getBytes("utf-8"))
++ .then()
++ .statusCode(HttpStatus.SC_UNSUPPORTED_MEDIA_TYPE)
++ .when().put(serviceUrl + "/" + newDescriptorName);
++
++ // Attempt to PUT a descriptor with the CORRECT Content-type header
++ given()
++ .auth().preemptive().basic(username, password)
++ .header("Content-type", MediaType.APPLICATION_JSON)
++ .body(newDescriptorJSON.getBytes("utf-8"))
++ .then()
++ .statusCode(HttpStatus.SC_CREATED)
++ .when().put(serviceUrl + "/" + newDescriptorName);
++
++ // Verify that the descriptor was written to the expected location
++ File newDescriptorFile =
++ new File(new File(config.getGatewayConfDir(), "descriptors"), newDescriptorFileName);
++ assertTrue(newDescriptorFile.exists());
++
++ // Request a listing of all the descriptors to verify the PUT
++ ResponseBody responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().get(serviceUrl).body();
++ List<String> items = responseBody.path("items");
++ assertEquals(1, items.size());
++ assertEquals(newDescriptorFileName, responseBody.path("items[0].name"));
++ String href = responseBody.path("items[0].href");
++
++ // Get the new descriptor content
++ responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().get(href).body();
++ String cluster = responseBody.path("cluster");
++ assertEquals(clusterName, cluster);
++
++ // Manually delete the descriptor
++ newDescriptorFile.delete();
++
++ LOG_EXIT();
++ }
++
++
++ @Test( timeout = TestUtils.LONG_TIMEOUT )
++ public void testDeleteDescriptor() throws Exception {
++ LOG_ENTER();
++
++ final String username = "admin";
++ final String password = "admin-password";
++ final String serviceUrl = clusterUrl + "/api/v1/descriptors";
++
++ final File descriptorsDir = new File(config.getGatewayConfDir(), "descriptors");
++
++ // Manually add two descriptor files to the descriptors directory
++ File descriptorOneFile = new File(descriptorsDir, "deleteme-one.json");
++ FileUtils.writeStringToFile(descriptorOneFile, createDescriptor("clusterOne"));
++ assertTrue(descriptorOneFile.exists());
++
++ File descriptorTwoFile = new File(descriptorsDir, "deleteme-two.json");
++ FileUtils.writeStringToFile(descriptorTwoFile, createDescriptor("clusterTwo"));
++ assertTrue(descriptorTwoFile.exists());
++
++ // Request a listing of all the descriptors
++ ResponseBody responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().get(serviceUrl).body();
++ List<String> items = responseBody.path("items");
++ assertEquals(2, items.size());
++ String name1 = responseBody.path("items[0].name");
++ String href1 = responseBody.path("items[0].href");
++ String name2 = responseBody.path("items[1].name");
++ String href2 = responseBody.path("items[1].href");
++
++ // Delete one of the descriptors
++ responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().delete(href1).body();
++ String deletedMsg = responseBody.path("deleted");
++ assertEquals("descriptor " + FilenameUtils.getBaseName(name1), deletedMsg);
++ assertFalse((new File(descriptorsDir, name1).exists()));
++
++ assertTrue((new File(descriptorsDir, name2).exists()));
++ // Delete the other descriptor
++ responseBody = given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .contentType(MediaType.APPLICATION_JSON)
++ .when().delete(href2).body();
++ deletedMsg = responseBody.path("deleted");
++ assertEquals("descriptor " + FilenameUtils.getBaseName(name2), deletedMsg);
++ assertFalse((new File(descriptorsDir, name2).exists()));
++
++ // Attempt to delete a descriptor that does not exist
++ given()
++ .auth().preemptive().basic(username, password)
++ .header("Accept", MediaType.APPLICATION_JSON)
++ .then()
++ .statusCode(HttpStatus.SC_OK)
++ .when().delete(serviceUrl + "/does-not-exist");
++
++ LOG_EXIT();
++ }
++
++
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/pom.xml
----------------------------------------------------------------------
[07/25] knox git commit: Merge branch 'master' into KNOX-1049
Posted by mo...@apache.org.
Merge branch 'master' into KNOX-1049
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/1ee93707
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/1ee93707
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/1ee93707
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 1ee937071b236909a38dc764db451e65a3225ad6
Parents: f549041 986615f
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Oct 26 10:21:54 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Oct 26 10:21:54 2017 -0400
----------------------------------------------------------------------
gateway-discovery-ambari/pom.xml | 66 ++
.../discovery/ambari/AmbariCluster.java | 115 +++
.../discovery/ambari/AmbariComponent.java | 85 ++
.../ambari/AmbariDynamicServiceURLCreator.java | 151 ++++
.../ambari/AmbariServiceDiscovery.java | 305 +++++++
.../ambari/AmbariServiceDiscoveryMessages.java | 121 +++
.../ambari/AmbariServiceDiscoveryType.java | 35 +
.../ambari/ConditionalValueHandler.java | 24 +
.../discovery/ambari/PropertyEqualsHandler.java | 76 ++
.../ambari/ServiceURLPropertyConfig.java | 324 +++++++
.../discovery/ambari/SimpleValueHandler.java | 32 +
...eway.topology.discovery.ServiceDiscoveryType | 19 +
...iscovery-component-config-mapping.properties | 36 +
.../ambari-service-discovery-url-mappings.xml | 398 +++++++++
.../AmbariDynamicServiceURLCreatorTest.java | 876 +++++++++++++++++++
.../ambari/AmbariServiceDiscoveryTest.java | 858 ++++++++++++++++++
.../ha/provider/impl/DefaultURLManager.java | 9 +-
...entityAsserterHttpServletRequestWrapper.java | 25 +-
.../provider/federation/jwt/JWTMessages.java | 3 +
.../jwt/filter/AbstractJWTFilter.java | 59 +-
.../filter/JWTAccessTokenAssertionFilter.java | 23 +-
.../jwt/filter/JWTAuthCodeAssertionFilter.java | 16 +-
.../jwt/filter/JWTFederationFilter.java | 5 +-
.../jwt/filter/SSOCookieFederationFilter.java | 5 +-
.../federation/AbstractJWTFilterTest.java | 287 +++++-
.../federation/SSOCookieProviderTest.java | 5 +-
gateway-provider-security-picketlink/pom.xml | 76 --
.../gateway/picketlink/PicketlinkMessages.java | 40 -
.../picketlink/deploy/PicketlinkConf.java | 194 ----
...PicketlinkFederationProviderContributor.java | 132 ---
.../filter/CaptureOriginalURLFilter.java | 89 --
.../filter/PicketlinkIdentityAdapter.java | 102 ---
...gateway.deploy.ProviderDeploymentContributor | 19 -
.../gateway/picketlink/PicketlinkTest.java | 30 -
gateway-release/home/conf/descriptors/README | 1 +
.../home/conf/shared-providers/README | 1 +
gateway-release/pom.xml | 8 +-
gateway-server/pom.xml | 5 +
.../apache/hadoop/gateway/GatewayMessages.java | 9 +-
.../gateway/config/impl/GatewayConfigImpl.java | 10 +
.../services/DefaultGatewayServices.java | 3 +-
.../impl/DefaultServiceRegistryService.java | 50 +-
.../security/impl/DefaultAliasService.java | 12 +-
.../services/security/impl/JettySSLService.java | 11 +-
.../impl/DefaultTokenAuthorityService.java | 43 +-
.../topology/impl/DefaultTopologyService.java | 294 ++++++-
.../builder/BeanPropertyTopologyBuilder.java | 2 +-
.../DefaultServiceDiscoveryConfig.java | 48 +
.../discovery/ServiceDiscoveryFactory.java | 81 ++
.../topology/simple/SimpleDescriptor.java | 48 +
.../simple/SimpleDescriptorFactory.java | 71 ++
.../simple/SimpleDescriptorHandler.java | 267 ++++++
.../topology/simple/SimpleDescriptorImpl.java | 123 +++
.../simple/SimpleDescriptorMessages.java | 50 ++
.../websockets/GatewayWebsocketHandler.java | 41 +-
.../gateway/websockets/ProxyInboundClient.java | 107 +++
.../websockets/ProxyWebSocketAdapter.java | 20 +-
.../impl/DefaultTokenAuthorityServiceTest.java | 253 ++++++
.../topology/DefaultTopologyServiceTest.java | 70 +-
.../PropertiesFileServiceDiscoveryTest.java | 90 ++
.../discovery/ServiceDiscoveryFactoryTest.java | 81 ++
.../test/extension/DummyServiceDiscovery.java | 66 ++
.../extension/DummyServiceDiscoveryType.java | 32 +
.../PropertiesFileServiceDiscovery.java | 108 +++
.../PropertiesFileServiceDiscoveryType.java | 35 +
.../extension/SneakyServiceDiscoveryImpl.java | 40 +
.../extension/SneakyServiceDiscoveryType.java | 33 +
.../simple/SimpleDescriptorFactoryTest.java | 422 +++++++++
.../simple/SimpleDescriptorHandlerTest.java | 447 ++++++++++
.../websockets/ProxyInboundClientTest.java | 374 ++++++++
...eway.topology.discovery.ServiceDiscoveryType | 21 +
.../resources/keystores/server-keystore.jks | Bin 0 -> 1387 bytes
.../topology/file/ambari-cluster-policy.xml | 74 ++
.../topology/file/simple-topology-four.json | 18 +
.../services/ambariui/2.2.0/service.xml | 5 +
.../resources/services/atlas/0.8.0/rewrite.xml | 6 +-
gateway-service-knoxsso/pom.xml | 11 +-
.../gateway/service/knoxsso/WebSSOResource.java | 29 +-
.../service/knoxsso/WebSSOResourceTest.java | 411 ++++++++-
.../service/knoxtoken/TokenResource.java | 50 +-
.../knoxtoken/TokenServiceResourceTest.java | 288 +++++-
gateway-shell-release/pom.xml | 4 +
.../apache/hadoop/gateway/shell/job/Sqoop.java | 2 +-
.../hadoop/gateway/config/GatewayConfig.java | 2 +
.../dispatch/AbstractGatewayDispatch.java | 8 +
.../gateway/dispatch/DefaultDispatch.java | 9 +
.../hadoop/gateway/dispatch/Dispatch.java | 6 +
.../gateway/dispatch/GatewayDispatchFilter.java | 8 +
.../security/token/JWTokenAuthority.java | 19 +-
.../services/security/token/impl/JWT.java | 42 +-
.../services/security/token/impl/JWTToken.java | 59 +-
.../topology/discovery/GatewayService.java | 29 +
.../topology/discovery/ServiceDiscovery.java | 76 ++
.../discovery/ServiceDiscoveryConfig.java | 42 +
.../discovery/ServiceDiscoveryType.java | 40 +
.../security/token/impl/JWTTokenTest.java | 67 +-
.../hadoop/gateway/GatewayTestConfig.java | 5 +
.../apache/hadoop/gateway/util/HttpUtils.java | 7 +-
.../hadoop/gateway/util/urltemplate/Parser.java | 10 +-
.../gateway/util/urltemplate/ParserTest.java | 17 +
pom.xml | 51 +-
101 files changed, 8396 insertions(+), 1016 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/1ee93707/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
index b33e52c,a30cf13..65278a1
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
@@@ -86,9 -76,8 +86,9 @@@ public class BeanPropertyTopologyBuilde
public Topology build() {
Topology topology = new Topology();
topology.setName(name);
+ topology.setDefaultServicePath(defaultService);
- for (Provider provider : providers) {
+ for (Provider provider : providers) {
topology.addProvider(provider);
}
[22/25] knox git commit: KNOX-998 - Some more refactoring
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/deploy/DeploymentFactoryFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/deploy/DeploymentFactoryFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/deploy/DeploymentFactoryFuncTest.java
index 25ad1c3..c9f262b 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/deploy/DeploymentFactoryFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/deploy/DeploymentFactoryFuncTest.java
@@ -46,8 +46,8 @@ import org.apache.knox.gateway.topology.Provider;
import org.apache.knox.gateway.topology.Service;
import org.apache.knox.gateway.topology.Topology;
import org.apache.knox.gateway.util.XmlUtils;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.log.NoOpAppender;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.log.NoOpAppender;
import org.apache.log4j.Appender;
import org.jboss.shrinkwrap.api.Archive;
import org.jboss.shrinkwrap.api.ArchivePath;
@@ -58,8 +58,8 @@ import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-util-common/src/test/java/org/apache/knox/gateway/audit/AuditLayoutTest.java
----------------------------------------------------------------------
diff --git a/gateway-util-common/src/test/java/org/apache/knox/gateway/audit/AuditLayoutTest.java b/gateway-util-common/src/test/java/org/apache/knox/gateway/audit/AuditLayoutTest.java
index 8ff183e..6400f1b 100644
--- a/gateway-util-common/src/test/java/org/apache/knox/gateway/audit/AuditLayoutTest.java
+++ b/gateway-util-common/src/test/java/org/apache/knox/gateway/audit/AuditLayoutTest.java
@@ -26,7 +26,7 @@ import org.apache.knox.gateway.audit.api.CorrelationService;
import org.apache.knox.gateway.audit.api.CorrelationServiceFactory;
import org.apache.knox.gateway.audit.log4j.audit.AuditConstants;
import org.apache.knox.gateway.audit.log4j.layout.AuditLayout;
-import org.apache.hadoop.test.log.CollectAppender;
+import org.apache.knox.test.log.CollectAppender;
import org.apache.log4j.LogManager;
import org.apache.log4j.PropertyConfigurator;
import org.apache.log4j.spi.LoggingEvent;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-util-common/src/test/java/org/apache/knox/gateway/audit/AuditServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-util-common/src/test/java/org/apache/knox/gateway/audit/AuditServiceTest.java b/gateway-util-common/src/test/java/org/apache/knox/gateway/audit/AuditServiceTest.java
index 7c05a2a..7b08e83 100644
--- a/gateway-util-common/src/test/java/org/apache/knox/gateway/audit/AuditServiceTest.java
+++ b/gateway-util-common/src/test/java/org/apache/knox/gateway/audit/AuditServiceTest.java
@@ -27,7 +27,7 @@ import org.apache.knox.gateway.audit.api.CorrelationServiceFactory;
import org.apache.knox.gateway.audit.log4j.audit.AuditConstants;
import org.apache.knox.gateway.audit.log4j.audit.Log4jAuditService;
import org.apache.knox.gateway.audit.log4j.correlation.Log4jCorrelationService;
-import org.apache.hadoop.test.log.CollectAppender;
+import org.apache.knox.test.log.CollectAppender;
import org.apache.log4j.LogManager;
import org.apache.log4j.PropertyConfigurator;
import org.apache.log4j.spi.LoggingEvent;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-util-common/src/test/java/org/apache/knox/gateway/audit/StoreAndForwardAppenderTest.java
----------------------------------------------------------------------
diff --git a/gateway-util-common/src/test/java/org/apache/knox/gateway/audit/StoreAndForwardAppenderTest.java b/gateway-util-common/src/test/java/org/apache/knox/gateway/audit/StoreAndForwardAppenderTest.java
index 808acb7..becad46 100644
--- a/gateway-util-common/src/test/java/org/apache/knox/gateway/audit/StoreAndForwardAppenderTest.java
+++ b/gateway-util-common/src/test/java/org/apache/knox/gateway/audit/StoreAndForwardAppenderTest.java
@@ -17,7 +17,7 @@
*/
package org.apache.knox.gateway.audit;
-import org.apache.hadoop.test.log.CollectAppender;
+import org.apache.knox.test.log.CollectAppender;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-util-common/src/test/resources/audit-log4j.properties
----------------------------------------------------------------------
diff --git a/gateway-util-common/src/test/resources/audit-log4j.properties b/gateway-util-common/src/test/resources/audit-log4j.properties
index ccc92f5..c4d4fd1 100644
--- a/gateway-util-common/src/test/resources/audit-log4j.properties
+++ b/gateway-util-common/src/test/resources/audit-log4j.properties
@@ -22,4 +22,4 @@ log4j.logger.audit.forward = INFO, audit-forward
log4j.appender.audit-store = org.apache.knox.gateway.audit.log4j.appender.JdbmStoreAndForwardAppender
log4j.appender.audit-store.file = target/audit
-log4j.appender.audit-forward = org.apache.hadoop.test.log.CollectAppender
\ No newline at end of file
+log4j.appender.audit-forward = org.apache.knox.test.log.CollectAppender
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/ExpanderTest.java
----------------------------------------------------------------------
diff --git a/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/ExpanderTest.java b/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/ExpanderTest.java
index acf7cf6..60f6bbd 100644
--- a/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/ExpanderTest.java
+++ b/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/ExpanderTest.java
@@ -17,8 +17,8 @@
*/
package org.apache.knox.gateway.util.urltemplate;
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
+import org.apache.knox.test.category.FastTests;
+import org.apache.knox.test.category.UnitTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/MatcherTest.java
----------------------------------------------------------------------
diff --git a/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/MatcherTest.java b/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/MatcherTest.java
index df31d3d..e75c89b 100644
--- a/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/MatcherTest.java
+++ b/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/MatcherTest.java
@@ -18,8 +18,8 @@
package org.apache.knox.gateway.util.urltemplate;
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
+import org.apache.knox.test.category.FastTests;
+import org.apache.knox.test.category.UnitTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/MatcherTest.java.orig
----------------------------------------------------------------------
diff --git a/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/MatcherTest.java.orig b/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/MatcherTest.java.orig
deleted file mode 100644
index 4e1a9c8..0000000
--- a/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/MatcherTest.java.orig
+++ /dev/null
@@ -1,839 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.knox.gateway.util.urltemplate;
-
-
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-import java.net.URISyntaxException;
-
-import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.nullValue;
-import static org.hamcrest.CoreMatchers.sameInstance;
-import static org.hamcrest.core.IsCollectionContaining.hasItem;
-import static org.hamcrest.core.IsNull.notNullValue;
-import static org.junit.Assert.assertThat;
-
-//TODO: Test to make sure that extra unmatched query parameters prevent a match.
-@Category( { UnitTests.class, FastTests.class } )
-public class MatcherTest {
-
- private void addTemplate( Matcher<String> matcher, String template ) throws URISyntaxException {
- matcher.add( Parser.parse( template ), template );
- }
-
- private void assertValidMatch( Matcher<String> matcher, String uri, String template ) throws URISyntaxException {
- if( template == null ) {
- assertThat( matcher.match( Parser.parse( uri ) ), nullValue() );
- } else {
- Template uriTemplate = Parser.parse( uri );
- Matcher<String>.Match match = matcher.match( uriTemplate );
- assertThat( "Expected to find a match.", match, notNullValue() );
- assertThat( match.getValue(), equalTo( template ) );
- }
- }
-
- @Test
- public void testWildcardCharacterInInputTemplate() throws URISyntaxException {
- Matcher<String> matcher;
- Template patternTemplate, inputTemplate;
- Matcher<String>.Match match;
-
- // First verify that if .../test_table/test_row/family1... works.
- matcher = new Matcher<String>();
- inputTemplate = Parser.parse( "https://localhost:8443/gateway/sandbox/hbase/test_table/test_row/family1:row2_col1,family2/0,9223372036854775807?v=1" );
- patternTemplate = Parser.parse( "*://*:*/**/webhdfs/{version}/{path=**}?{**}" );
- matcher.add( patternTemplate, "webhdfs" );
- match = matcher.match( inputTemplate );
- assertThat( match, nullValue() );
-
- // Then reproduce the issue with .../test_table/*/family1..
- matcher = new Matcher<String>();
- inputTemplate = Parser.parse( "https://localhost:8443/gateway/sandbox/hbase/test_table/*/family1:row2_col1,family2/0,9223372036854775807?v=1" );
- patternTemplate = Parser.parse( "*://*:*/**/webhdfs/{version}/{path=**}?{**}" );
- matcher.add( patternTemplate, "webhdfs" );
- match = matcher.match( inputTemplate );
- assertThat( match, nullValue() );
-
- // Reproduce the issue where the wrong match was picked when there was a "*" in the input URL template.
- matcher = new Matcher<String>();
- inputTemplate = Parser.parse( "https://localhost:8443/gateway/sandbox/hbase/test_table/*/family1:row2_col1,family2/0,9223372036854775807?v=1" );
- patternTemplate = Parser.parse( "*://*:*/**/webhdfs/{version}/{path=**}?{**}" );
- matcher.add( patternTemplate, "webhdfs" );
- patternTemplate = Parser.parse( "*://*:*/**/hbase/{path=**}?{**}" );
- matcher.add( patternTemplate, "hbase" );
- match = matcher.match( inputTemplate );
- assertThat( match.getValue(), is( "hbase" ) );
- }
-
- @Test
- public void testDefaultAppDeployment() throws Exception {
- Matcher<String> matcher;
- Template patternTemplate, inputTemplate;
- Matcher<String>.Match match;
-
- matcher = new Matcher<String>();
- inputTemplate = Parser.parse( "https://localhost:8443/webhdfs/v1/tmp?op=LISTSTATUS" );
- patternTemplate = Parser.parse( "*://*:*/webhdfs/{version}/{path=**}?{**}" );
- matcher.add( patternTemplate, "webhdfs" );
- match = matcher.match( inputTemplate );
- assertThat( match, notNullValue() );
- }
-
- @Test
- public void testRootPathMatching() throws Exception {
- Matcher<String> matcher;
- Template patternTemplate, inputTemplate;
- Matcher<String>.Match match;
-
- ///////
- patternTemplate = Parser.parse( "*://*:*" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "test-match" );
-
- inputTemplate = Parser.parse( "test-scheme://test-host:42" );
- match = matcher.match( inputTemplate );
- assertThat( match, notNullValue() );
- inputTemplate = Parser.parse( "test-scheme://test-host:42/" );
- match = matcher.match( inputTemplate );
- assertThat( match, notNullValue() );
- inputTemplate = Parser.parse( "test-scheme://test-host:42/test-path" );
- match = matcher.match( inputTemplate );
- assertThat( match, nullValue() );
-
- ///////
- patternTemplate = Parser.parse( "*://*:*/" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "test-match" );
-
- inputTemplate = Parser.parse( "test-scheme://test-host:42" );
- match = matcher.match( inputTemplate );
- assertThat( match, notNullValue() );
- inputTemplate = Parser.parse( "test-scheme://test-host:42/" );
- match = matcher.match( inputTemplate );
- assertThat( match, notNullValue() );
- inputTemplate = Parser.parse( "test-scheme://test-host:42/test-path" );
- match = matcher.match( inputTemplate );
- assertThat( match, nullValue() );
-
- ///////
- patternTemplate = Parser.parse( "*://*:*/*" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "test-match" );
-
- inputTemplate = Parser.parse( "test-scheme://test-host:42" );
- match = matcher.match( inputTemplate );
- assertThat( match, nullValue() );
- inputTemplate = Parser.parse( "test-scheme://test-host:42/" );
- match = matcher.match( inputTemplate );
- assertThat( match, nullValue() );
- inputTemplate = Parser.parse( "test-scheme://test-host:42/test-path" );
- match = matcher.match( inputTemplate );
- assertThat( match, notNullValue() );
-
- ///////
- patternTemplate = Parser.parse( "*://*:*/**" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "test-match" );
-
-//KM: I'm not sure what the correct behavior is here.
-// inputTemplate = Parser.parse( "test-scheme://test-host:42" );
-// match = matcher.match( inputTemplate );
-// assertThat( match, ? );
-// inputTemplate = Parser.parse( "test-scheme://test-host:42/" );
-// match = matcher.match( inputTemplate );
-// assertThat( match, ? );
- inputTemplate = Parser.parse( "test-scheme://test-host:42/test-path" );
- match = matcher.match( inputTemplate );
- assertThat( match, notNullValue() );
-
- ///////
- patternTemplate = Parser.parse( "*://*:*/{path=*}" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "test-match" );
-
- inputTemplate = Parser.parse( "test-scheme://test-host:42" );
- match = matcher.match( inputTemplate );
- assertThat( match, nullValue() );
- inputTemplate = Parser.parse( "test-scheme://test-host:42/" );
- match = matcher.match( inputTemplate );
- assertThat( match, nullValue() );
- inputTemplate = Parser.parse( "test-scheme://test-host:42/test-path" );
- match = matcher.match( inputTemplate );
- assertThat( match, notNullValue() );
-
- ///////
- patternTemplate = Parser.parse( "*://*:*/{path=**}" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "test-match" );
-
-//KM: I'm not sure what the correct behavior is here.
-// inputTemplate = Parser.parse( "test-scheme://test-host:42" );
-// match = matcher.match( inputTemplate );
-// assertThat( match, ? );
-// inputTemplate = Parser.parse( "test-scheme://test-host:42/" );
-// match = matcher.match( inputTemplate );
-// assertThat( match, ? );
- inputTemplate = Parser.parse( "test-scheme://test-host:42/test-path" );
- match = matcher.match( inputTemplate );
- assertThat( match, notNullValue() );
- }
-
- @Test
- public void testTopLevelPathGlobMatch() throws Exception {
- Matcher<String> matcher;
- Template patternTemplate, inputTemplate;
- Matcher<String>.Match match;
-
- patternTemplate = Parser.parse( "{*}://{host}:{*}/{**=**}?{**}" );
- inputTemplate = Parser.parse( "test-scheme://test-input-host:42/test-path/test-file?test-name=test-value" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "test-math" );
- match = matcher.match( inputTemplate );
- assertThat( "Should match because the path ** should include both test-path and test-file", match, notNullValue() );
-
- patternTemplate = Parser.parse( "{*}://{host}:{*}/{**}?{**}" );
- inputTemplate = Parser.parse( "test-scheme://test-input-host:42/test-path/test-file?test-name=test-value" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "test-math" );
- match = matcher.match( inputTemplate );
- assertThat( "Should match because the path ** should include both test-path and test-file", match, notNullValue() );
- }
-
- @Test
- public void testQueryHandling() throws Exception {
- Matcher<String> matcher;
- Template patternTemplate, inputTemplate;
- Matcher<String>.Match match;
-
- patternTemplate = Parser.parse( "/path?{query}" );
- inputTemplate = Parser.parse( "/path" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "T" );
- match = matcher.match( inputTemplate );
- assertThat( "Should not match because input does not contain the required query.", match, nullValue() );
-
- matcher = new Matcher<String>();
- matcher.add( Parser.parse( "/path?{query}" ), "T1" );
- matcher.add( Parser.parse( "/path" ), "T2" );
- inputTemplate = Parser.parse( "/path" );
- match = matcher.match( inputTemplate );
- assertThat( "Should match because there is an entry in the matcher without a query.", match, notNullValue() );
- assertThat( match.getValue(), equalTo( "T2") );
-
- patternTemplate = Parser.parse( "/path?{query}" );
- inputTemplate = Parser.parse( "/path?query=value" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "T" );
- match = matcher.match( inputTemplate );
- assertThat( "Should match because input does contain the required query.", match, notNullValue() );
- assertThat( match.getParams().resolve( "query" ), hasItem( "value" ) );
- assertThat( match.getParams().resolve( "query" ).size(), equalTo( 1 ) );
-
- patternTemplate = Parser.parse( "/path?{*}" );
- inputTemplate = Parser.parse( "/path" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "T" );
- match = matcher.match( inputTemplate );
- assertThat( "Should not match because input does not contain the required query.", match, nullValue() );
-
- patternTemplate = Parser.parse( "/path?*" );
- inputTemplate = Parser.parse( "/path" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "T" );
- match = matcher.match( inputTemplate );
- assertThat( "Should not match because input does not contain the required query.", match, nullValue() );
-
- patternTemplate = Parser.parse( "/path?*" );
- inputTemplate = Parser.parse( "/path?query=value" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "T" );
- match = matcher.match( inputTemplate );
- assertThat(
- "Should match because the template has an extra query and the input has a query.",
- match, notNullValue() );
- assertThat(
- "Should not have extracts any parameters since pattern template didn't contain {}",
- match.getParams().resolve( "query" ), nullValue() );
-
- patternTemplate = Parser.parse( "/path?{*}" );
- inputTemplate = Parser.parse( "/path?query=value" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "T" );
- match = matcher.match( inputTemplate );
- assertThat( "Should match because input does contain the required query.", match, notNullValue() );
- assertThat( match.getParams().resolve( "query" ), hasItem( "value" ) );
-
- patternTemplate = Parser.parse( "/path?{**}" );
- inputTemplate = Parser.parse( "/path" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "T" );
- match = matcher.match( inputTemplate );
- assertThat( "Should match because the template has an optional query.", match, notNullValue() );
-
- patternTemplate = Parser.parse( "/path?**" );
- inputTemplate = Parser.parse( "/path" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "T" );
- match = matcher.match( inputTemplate );
- assertThat( "Should match because the template has an optional extra query.", match, notNullValue() );
-
- patternTemplate = Parser.parse( "/path?**" );
- inputTemplate = Parser.parse( "/path?query=value" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "T" );
- match = matcher.match( inputTemplate );
- assertThat( "Should match because the template has an optional extra query.", match, notNullValue() );
- assertThat( match.getParams().resolve( "query" ), nullValue() );
-
- patternTemplate = Parser.parse( "/path?{**}" );
- inputTemplate = Parser.parse( "/path?query=value" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "T" );
- match = matcher.match( inputTemplate );
- assertThat( "Should match because the template has an optional extra query.", match, notNullValue() );
- assertThat( match.getParams().resolve( "query" ), hasItem( "value" ) );
- assertThat( match.getParams().resolve( "query" ).size(), equalTo( 1 ) );
-
- patternTemplate = Parser.parse( "/path?{query}&{*}" );
- inputTemplate = Parser.parse( "/path?query=value" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "T" );
- match = matcher.match( inputTemplate );
- assertThat( "Should not match because input does not contain the required extra query.", match, nullValue() );
-
- patternTemplate = Parser.parse( "/path?{query}&{*}" );
- inputTemplate = Parser.parse( "/path?query=value&extra=extra-value" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "T" );
- match = matcher.match( inputTemplate );
- assertThat( "Should match because input does contain the required query.", match, notNullValue() );
- assertThat( match.getParams().resolve( "query" ), hasItem( "value" ) );
- assertThat( match.getParams().resolve( "query" ).size(), equalTo( 1 ) );
-
- patternTemplate = Parser.parse( "/path?{query=**}" );
- inputTemplate = Parser.parse( "/path?query=value1&query=value2" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "T" );
- match = matcher.match( inputTemplate );
- assertThat( "Should match because input does contain the required query.", match, notNullValue() );
- assertThat( match.getParams().resolve( "query" ), hasItem( "value1" ) );
- assertThat( match.getParams().resolve( "query" ), hasItem( "value2" ) );
- assertThat( match.getParams().resolve( "query" ).size(), equalTo( 2 ) );
-
- patternTemplate = Parser.parse( "/path?{query}" );
- inputTemplate = Parser.parse( "/path?query=value1&query=value2" );
- matcher = new Matcher<String>();
- matcher.add( patternTemplate, "T" );
- match = matcher.match( inputTemplate );
- assertThat( "Should match because input does contain the required query.", match, notNullValue() );
- assertThat( match.getParams().resolve( "query" ), hasItem( "value1" ) );
- assertThat( match.getParams().resolve( "query" ), hasItem( "value2" ) );
- assertThat( match.getParams().resolve( "query" ).size(), equalTo( 2 ) );
- }
-
- @Test
- public void testMatchCompleteUrl() throws Exception {
- Matcher<String> matcher;
- String pattern, input;
- Template patternTemplate, inputTemplate;
- Matcher<String>.Match match;
-
- matcher = new Matcher<String>();
- pattern = "foo://username:password@example.com:8042/over/there/index.dtb?type=animal&name=narwhal#nose";
- patternTemplate = Parser.parse( pattern );
- matcher.add( patternTemplate, pattern );
- input = "foo://username:password@example.com:8042/over/there/index.dtb?type=animal&name=narwhal#nose";
- inputTemplate = Parser.parse( input );
- match = matcher.match( inputTemplate );
- assertThat( match.getTemplate(), sameInstance( patternTemplate ) );
- assertThat( match.getValue(), equalTo( pattern ) );
-
- matcher = new Matcher<String>();
- pattern = "foo://username:password@example.com:8042/over/there/index.dtb?type=animal&name=narwhal#nose";
- patternTemplate = Parser.parse( pattern );
- matcher.add( patternTemplate, pattern );
-
- input = pattern;
- inputTemplate = Parser.parse( input );
- match = matcher.match( inputTemplate );
- assertThat( match, notNullValue() );
-
- input = "not://username:password@example.com:8042/over/there/index.dtb?type=animal&name=narwhal#nose";
- inputTemplate = Parser.parse( input );
- match = matcher.match( inputTemplate );
- assertThat( match, nullValue() );
- }
-
- @Test
- public void testMatch() throws Exception {
- Matcher<String> matcher;
- String pattern, input;
- Template patternTemplate, inputTemplate;
- Matcher<String>.Match match;
-
- matcher = new Matcher<String>();
- pattern = "path";
- patternTemplate = Parser.parse( pattern );
- matcher.add( patternTemplate, pattern );
- assertThat( matcher.get( patternTemplate ), is( pattern ) );
- input = "path";
- inputTemplate = Parser.parse( input );
- match = matcher.match( inputTemplate );
- assertThat( match.getTemplate(), sameInstance( patternTemplate ) );
- assertThat( match.getValue(), equalTo( pattern ) );
-
-
- matcher = new Matcher<String>();
- pattern = "/path";
- patternTemplate = Parser.parse( pattern );
- matcher.add( patternTemplate, pattern );
- input = "/path";
- inputTemplate = Parser.parse( input );
- match = matcher.match( inputTemplate );
- assertThat( match.getTemplate(), sameInstance( patternTemplate ) );
- assertThat( match.getValue(), equalTo( pattern ) );
-
- matcher = new Matcher<String>();
- pattern = "path/path";
- patternTemplate = Parser.parse( pattern );
- matcher.add( patternTemplate, pattern );
- input = "path/path";
- inputTemplate = Parser.parse( input );
- match = matcher.match( inputTemplate );
- assertThat( match.getTemplate(), sameInstance( patternTemplate ) );
- assertThat( match.getValue(), equalTo( pattern ) );
-
- matcher = new Matcher<String>();
- pattern = "*/path";
- patternTemplate = Parser.parse( pattern );
- matcher.add( patternTemplate, pattern );
- input = "pathA/path";
- inputTemplate = Parser.parse( input );
- match = matcher.match( inputTemplate );
- assertThat( match.getTemplate(), sameInstance( patternTemplate ) );
- assertThat( match.getValue(), equalTo( pattern ) );
-
- matcher = new Matcher<String>();
- pattern = "**/path";
- patternTemplate = Parser.parse( pattern );
- matcher.add( patternTemplate, pattern );
- input = "pathA/pathB/path";
- inputTemplate = Parser.parse( input );
- match = matcher.match( inputTemplate );
- assertThat( match.getTemplate(), sameInstance( patternTemplate ) );
- assertThat( match.getValue(), equalTo( pattern ) );
-
- matcher = new Matcher<String>();
- pattern = "path-1/{path=**}/path-4";
- patternTemplate = Parser.parse( pattern );
- matcher.add( patternTemplate, pattern );
- input = "path-1/path-2/path-3/path-4";
- inputTemplate = Parser.parse( input );
- match = matcher.match( inputTemplate );
- assertThat( match.getTemplate(), sameInstance( patternTemplate ) );
- assertThat( match.getValue(), equalTo( pattern ) );
- assertThat( match.getParams().resolve( "path" ).get( 0 ), equalTo( "path-2" ) );
- assertThat( match.getParams().resolve( "path" ).get( 1 ), equalTo( "path-3" ) );
-
- matcher = new Matcher<String>();
- pattern = "/";
- patternTemplate = Parser.parse( pattern );
- matcher.add( patternTemplate, pattern );
- input = "/";
- inputTemplate = Parser.parse( input );
- match = matcher.match( inputTemplate );
- assertThat( match.getTemplate(), sameInstance( patternTemplate ) );
- assertThat( match.getValue(), equalTo( pattern ) );
-
- matcher = new Matcher<String>();
- pattern = "";
- patternTemplate = Parser.parse( pattern );
- matcher.add( patternTemplate, pattern );
- input = "";
- inputTemplate = Parser.parse( input );
- match = matcher.match( inputTemplate );
- assertThat( match.getTemplate(), sameInstance( patternTemplate ) );
- assertThat( match.getValue(), equalTo( pattern ) );
- }
-
- @Test
- public void testVariousPatterns() throws URISyntaxException {
- Matcher<String> matcher = new Matcher<String>();
- matcher.add( Parser.parse( "/webhdfs" ), "/webhdfs" );
- matcher.add( Parser.parse( "/webhdfs/dfshealth.jsp" ), "/webhdfs/dfshealth.jsp" );
- matcher.add( Parser.parse( "/webhdfs/*.jsp" ), "/webhdfs/*.jsp" );
- matcher.add( Parser.parse( "/webhdfs/other.jsp" ), "/webhdfs/other.jsp" );
- matcher.add( Parser.parse( "/webhdfs/*" ), "/webhdfs/*" );
- matcher.add( Parser.parse( "/webhdfs/**" ), "/webhdfs/**" );
- matcher.add( Parser.parse( "/webhdfs/v1/**" ), "/webhdfs/v1/**" );
- matcher.add( Parser.parse( "/webhdfs/**/middle/*.xml" ), "/webhdfs/**/middle/*.xml" );
-
- assertValidMatch( matcher, "/webhdfs", "/webhdfs" );
- assertValidMatch( matcher, "/webhdfs/dfshealth.jsp", "/webhdfs/dfshealth.jsp" );
- assertValidMatch( matcher, "/webhdfs/v1", "/webhdfs/*" ); // The star should be picked in preference to the glob.
- assertValidMatch( matcher, "/webhdfs/some.jsp", "/webhdfs/*.jsp" );
- assertValidMatch( matcher, "/webhdfs/other.jsp", "/webhdfs/other.jsp" );
- assertValidMatch( matcher, "/webhdfs/path/some.jsp", "/webhdfs/**" );
- assertValidMatch( matcher, "/webhdfs/path/middle/some.jsp", "/webhdfs/**" );
- assertValidMatch( matcher, "/webhdfs/path/middle/some.xml", "/webhdfs/**/middle/*.xml" );
- assertValidMatch( matcher, "/webhdfs/path/to/file", "/webhdfs/**" );
- assertValidMatch( matcher, "/webhdfs/v1/path/to/file", "/webhdfs/v1/**" );
- }
-
- @Test
- public void testStar() throws URISyntaxException {
- Matcher<String> matcher = new Matcher<String>();
- matcher.add( Parser.parse( "/webhdfs/*" ), "/webhdfs/*" );
- assertValidMatch( matcher, "/webhdfs/*", "/webhdfs/*" );
- assertValidMatch( matcher, "/webhdfs/file", "/webhdfs/*" );
- assertValidMatch( matcher, "/webhdfs/path/", "/webhdfs/*" );
- assertValidMatch( matcher, "/webhdfs/path/file", null );
- assertValidMatch( matcher, "/webhdfs/path/path/", null );
- }
-
- @Test
- public void testGlob() throws URISyntaxException {
- Matcher<String> matcher = new Matcher<String>();
- matcher.add( Parser.parse( "/webhdfs/**" ), "/webhdfs/**" );
- assertValidMatch( matcher, "/webhdfs/file", "/webhdfs/**" );
- assertValidMatch( matcher, "/webhdfs/path/", "/webhdfs/**" );
- assertValidMatch( matcher, "/webhdfs/path/file", "/webhdfs/**" );
- assertValidMatch( matcher, "/webhdfs/path/path/", "/webhdfs/**" );
- }
-
- @Test
- public void testMatrixParam() throws URISyntaxException {
- Matcher<String> matcher = new Matcher<String>();
- matcher.add( Parser.parse( "/webhdfs/**" ), "/webhdfs/**" );
- matcher.add( Parser.parse( "/webhdfs/browseDirectory.jsp;dn=*" ), "/webhdfs/browseDirectory.jsp;dn=*" );
- assertValidMatch( matcher, "/webhdfs/browseDirectory.jsp;dn=X", "/webhdfs/browseDirectory.jsp;dn=*" );
- }
-
- @Test
- public void testTwoGlobsAtDifferentDepths() throws URISyntaxException {
- Matcher<String> matcher = new Matcher<String>();
- matcher.add( Parser.parse( "/webhdfs/**" ), "/webhdfs/**" );
- matcher.add( Parser.parse( "/webhdfs/v1/**" ), "/webhdfs/v1/**" );
- assertValidMatch( matcher, "/webhdfs/file", "/webhdfs/**" );
- assertValidMatch( matcher, "/webhdfs/v1/file", "/webhdfs/v1/**" );
-
- // Reverse the put order.
- matcher = new Matcher<String>();
- matcher.add( Parser.parse( "/webhdfs/v1/**" ), "/webhdfs/v1/**" );
- matcher.add( Parser.parse( "/webhdfs/**" ), "/webhdfs/**" );
- assertValidMatch( matcher, "/webhdfs/file", "/webhdfs/**" );
- assertValidMatch( matcher, "/webhdfs/v1/file", "/webhdfs/v1/**" );
- }
-
- @Test
- public void testGlobsVsStarsAtSameDepth() throws URISyntaxException {
- Matcher<String> matcher = new Matcher<String>();
- matcher.add( Parser.parse( "/webhdfs/*" ), "/webhdfs/*" );
- matcher.add( Parser.parse( "/webhdfs/**" ), "/webhdfs/**" );
- assertValidMatch( matcher, "/webhdfs/file", "/webhdfs/*" ); // The star should be picked in preference to the glob.
- assertValidMatch( matcher, "/webhdfs/path/file", "/webhdfs/**" );
-
- // Reverse the put order.
- matcher = new Matcher<String>();
- matcher.add( Parser.parse( "/webhdfs/**" ), "/webhdfs/**" );
- matcher.add( Parser.parse( "/webhdfs/*" ), "/webhdfs/*" );
- assertValidMatch( matcher, "/webhdfs/path/file", "/webhdfs/**" );
- assertValidMatch( matcher, "/webhdfs/file", "/webhdfs/*" );
- }
-
- @Test
- public void testMatchingPatternsWithinPathSegments() throws URISyntaxException {
- Matcher<String> matcher = new Matcher<String>();
- matcher.add( Parser.parse( "/path/{file}" ), "default" );
- assertValidMatch( matcher, "/path/file-name", "default" );
-
- matcher = new Matcher<String>();
- matcher.add( Parser.parse( "/path/{file=*}" ), "*" );
- assertValidMatch( matcher, "/path/some-name", "*" );
-
- matcher = new Matcher<String>();
- matcher.add( Parser.parse( "/path/{more=**}" ), "**" );
- assertValidMatch( matcher, "/path/some-path/some-name", "**" );
-
- matcher = new Matcher<String>();
- matcher.add( Parser.parse( "/path/{regex=prefix*suffix}" ), "regex" );
- assertValidMatch( matcher, "/path/prefix-middle-suffix", "regex" );
- assertValidMatch( matcher, "/path/not-prefix-middle-suffix", null );
- }
-
- @Test
- public void testMatchingPatternsWithinQuerySegments() throws URISyntaxException {
- Matcher<String> matcher = new Matcher<String>();
- matcher.add( Parser.parse( "?query={queryParam}" ), "default" );
- assertValidMatch( matcher, "?query=value", "default" );
-
- matcher = new Matcher<String>();
- matcher.add( Parser.parse( "?query={queryParam=*}" ), "*" );
- assertValidMatch( matcher, "?query=some-value", "*" );
-
- matcher = new Matcher<String>();
- matcher.add( Parser.parse( "?query={queryParam=**}" ), "**" );
- assertValidMatch( matcher, "?query=some-value", "**" );
-
- matcher = new Matcher<String>();
- matcher.add( Parser.parse( "?query={queryParam=prefix*suffix}" ), "regex" );
- assertValidMatch( matcher, "?query=prefix-middle-suffix", "regex" );
- assertValidMatch( matcher, "?query=not-prefix-middle-suffix", null );
- }
-
- @Test
- public void testMatchingForTemplatesThatVaryOnlyByQueryParams() throws URISyntaxException {
- Matcher<String> matcher = new Matcher<String>();
- addTemplate( matcher, "?one={queryParam}" );
- addTemplate( matcher, "?two={queryParam}" );
-
- assertValidMatch( matcher, "?one=value", "?one={queryParam}" );
- assertValidMatch( matcher, "?two=value", "?two={queryParam}" );
- assertValidMatch( matcher, "?three=value", null );
- assertValidMatch( matcher, "?", null );
- }
-
- @Test
- public void testFullUrlExtraction() throws URISyntaxException {
- Template template;
- Template input;
- Matcher<?> matcher;
- Matcher<?>.Match match;
- Params params;
-
- template = Parser.parse( "{scheme}://{username}:{password}@{host}:{port}/{root}/{path}/{file}?queryA={paramA}&queryB={paramB}#{fragment}" );
- input = Parser.parse( "http://horton:hadoop@hortonworks.com:80/top/middle/end?queryA=valueA&queryB=valueB#section" );
- matcher = new Matcher<Void>( template, null );
- match = matcher.match( input );
- params = match.getParams();
-
- assertThat( params.getNames(), hasItem( "scheme" ) );
- assertThat( params.resolve( "scheme" ), hasItem( "http" ) );
- assertThat( params.getNames(), hasItem( "username" ) );
- assertThat( params.resolve( "username" ), hasItem( "horton" ) );
- assertThat( params.getNames(), hasItem( "password" ) );
- assertThat( params.resolve( "password" ), hasItem( "hadoop" ) );
- assertThat( params.getNames(), hasItem( "host" ) );
- assertThat( params.resolve( "host" ), hasItem( "hortonworks.com" ) );
- assertThat( params.getNames(), hasItem( "port" ) );
- assertThat( params.resolve( "port" ), hasItem( "80" ) );
- assertThat( params.getNames(), hasItem( "root" ) );
- assertThat( params.resolve( "root" ), hasItem( "top" ) );
- assertThat( params.getNames(), hasItem( "path" ) );
- assertThat( params.resolve( "path" ), hasItem( "middle" ) );
- assertThat( params.getNames(), hasItem( "file" ) );
- assertThat( params.resolve( "file" ), hasItem( "end" ) );
- assertThat( params.getNames(), hasItem( "paramA" ) );
- assertThat( params.resolve( "paramA" ), hasItem( "valueA" ) );
- assertThat( params.getNames(), hasItem( "paramB" ) );
- assertThat( params.resolve( "paramB" ), hasItem( "valueB" ) );
- assertThat( params.getNames(), hasItem( "fragment" ) );
- assertThat( params.resolve( "fragment" ), hasItem( "section" ) );
- assertThat( params.getNames().size(), equalTo( 11 ) );
- }
-
- @Test
- public void testMultipleDoubleStarPathMatching() throws URISyntaxException {
- Template template;
- Template input;
- Matcher<?> matcher;
- Matcher<String> stringMatcher;
- Matcher<?>.Match match;
-
-// template = Parser.parse( "*://*:*/**/webhdfs/v1/**?**" );
-// input = Parser.parse( "http://localhost:53221/gateway/cluster/webhdfs/v1/tmp/GatewayWebHdfsFuncTest/testBasicHdfsUseCase/dir?user.name=hdfs&op=MKDIRS" );
-// matcher = new Matcher<String>( template, "test-value" );
-// match = matcher.match( input );
-// assertThat( (String)match.getValue(), is( "test-value" ) );
-//
-// template = Parser.parse( "*://*:*/**/webhdfs/v1/{path=**}?{**=*}" );
-// input = Parser.parse( "http://localhost:53221/gateway/cluster/webhdfs/v1/tmp/GatewayWebHdfsFuncTest/testBasicHdfsUseCase/dir?user.name=hdfs&op=MKDIRS" );
-// matcher = new Matcher<String>( template, "test-value-2" );
-// match = matcher.match( input );
-// assertThat( (String)match.getValue(), is( "test-value-2" ) );
-//
-// stringMatcher = new Matcher<String>();
-// template = Parser.parse( "*://*:*/**/webhdfs/data/v1/{path=**}?host={host=*}&port={port=*}&{**=*}" );
-// stringMatcher.add( template, "test-value-C" );
-// template = Parser.parse( "*://*:*/**/webhdfs/v1/{path=**}?{**=*}" );
-// stringMatcher.add( template, "test-value-B" );
-// input = Parser.parse( "http://localhost:53221/gateway/cluster/webhdfs/v1/tmp/GatewayWebHdfsFuncTest/testBasicHdfsUseCase/dir?user.name=hdfs&op=MKDIRS" );
-// match = stringMatcher.match( input );
-// assertThat( match.getValue(), notNullValue() );
-// assertThat( (String)match.getValue(), is( "test-value-B" ) );
-
- // This is just a reverse of the above. The order caused a bug.
- stringMatcher = new Matcher<String>();
- template = Parser.parse( "*://*:*/**/webhdfs/v1/{path=**}?{**=*}" );
- stringMatcher.add( template, "test-value-B" );
- template = Parser.parse( "*://*:*/**/webhdfs/data/v1/{path=**}?host={host=*}&port={port=*}&{**=*}" );
- stringMatcher.add( template, "test-value-C" );
- input = Parser.parse( "http://localhost:53221/gateway/cluster/webhdfs/v1/tmp/GatewayWebHdfsFuncTest/testBasicHdfsUseCase/dir?user.name=hdfs&op=MKDIRS" );
- match = stringMatcher.match( input );
- assertThat( match.getValue(), notNullValue() );
- assertThat( (String)match.getValue(), is( "test-value-B" ) );
-
- }
-
- @Test
- public void testPathExtraction() throws Exception {
- Template template;
- Template input;
- Matcher<?> matcher;
- Matcher<?>.Match match;
- Params params;
-
- template = Parser.parse( "{path-queryParam}" );
- input = Parser.parse( "path-value" );
- matcher = new Matcher<Void>( template, null );
- match = matcher.match( input );
- params = match.getParams();
- assertThat( params, notNullValue() );
- assertThat( params.getNames().size(), equalTo( 1 ) );
- assertThat( params.getNames(), hasItem( "path-queryParam" ) );
- assertThat( params.resolve( "path-queryParam" ).size(), equalTo( 1 ) );
- assertThat( params.resolve( "path-queryParam" ), hasItem( "path-value" ) );
-
- template = Parser.parse( "/some-path/{path-queryParam}" );
- input = Parser.parse( "/some-path/path-value" );
- matcher = new Matcher<Void>( template, null );
- match = matcher.match( input );
- params = match.getParams();
- assertThat( params, notNullValue() );
- assertThat( params.getNames().size(), equalTo( 1 ) );
- assertThat( params.getNames(), hasItem( "path-queryParam" ) );
- assertThat( params.resolve( "path-queryParam" ).size(), equalTo( 1 ) );
- assertThat( params.resolve( "path-queryParam" ), hasItem( "path-value" ) );
-
- template = Parser.parse( "/some-path/{path-queryParam}/some-other-path" );
- input = Parser.parse( "/some-path/path-value/some-other-path" );
- matcher = new Matcher<Void>( template, null );
- match = matcher.match( input );
- params = match.getParams();
- assertThat( params, notNullValue() );
- assertThat( params.getNames().size(), equalTo( 1 ) );
- assertThat( params.getNames(), hasItem( "path-queryParam" ) );
- assertThat( params.resolve( "path-queryParam" ).size(), equalTo( 1 ) );
- assertThat( params.resolve( "path-queryParam" ), hasItem( "path-value" ) );
-
- template = Parser.parse( "{path=**}" );
- input = Parser.parse( "A/B" );
- matcher = new Matcher<Void>( template, null );
- match = matcher.match( input );
- params = match.getParams();
- assertThat( params, notNullValue() );
- assertThat( params.getNames().size(), equalTo( 1 ) );
- assertThat( params.getNames(), hasItem( "path" ) );
- assertThat( params.resolve( "path" ).size(), equalTo( 2 ) );
- assertThat( params.resolve( "path" ), hasItem( "A" ) );
- assertThat( params.resolve( "path" ), hasItem( "B" ) );
-
- template = Parser.parse( "/top/{mid=**}/end" );
- input = Parser.parse( "/top/A/B/end" );
- matcher = new Matcher<Void>( template, null );
- match = matcher.match( input );
- params = match.getParams();
- assertThat( params, notNullValue() );
- assertThat( params.getNames().size(), equalTo( 1 ) );
- assertThat( params.getNames(), hasItem( "mid" ) );
- assertThat( params.resolve( "mid" ).size(), equalTo( 2 ) );
- assertThat( params.resolve( "mid" ), hasItem( "A" ) );
- assertThat( params.resolve( "mid" ), hasItem( "B" ) );
-
- template = Parser.parse( "*://*:*/{path=**}?{**}" );
- input = Parser.parse( "http://host:port/pathA/pathB" );
- matcher = new Matcher<Void>( template, null );
- match = matcher.match( input );
- params = match.getParams();
- assertThat( params.resolve( "path" ), hasItem( "pathA" ) );
- assertThat( params.resolve( "path" ), hasItem( "pathB" ) );
- assertThat( params.resolve( "path" ).size(), is( 2 ) );
-
- template = Parser.parse( "*://*:*/{path=**}?{**}" );
- input = Parser.parse( "http://host:port/pathA/pathB" );
- matcher = new Matcher<Void>( template, null );
- match = matcher.match( input );
- params = match.getParams();
- assertThat( params.resolve( "path" ), hasItem( "pathA" ) );
- assertThat( params.resolve( "path" ), hasItem( "pathB" ) );
- assertThat( params.resolve( "path" ).size(), is( 2 ) );
-
- template = Parser.parse( "*://*:*/{path=**}?{**}" );
- input = Parser.parse( "http://host:port/pathA/pathB" );
- matcher = new Matcher<Void>( template, null );
- match = matcher.match( input );
- params = match.getParams();
- assertThat( params.resolve( "path" ), hasItem( "pathA" ) );
- assertThat( params.resolve( "path" ), hasItem( "pathB" ) );
- assertThat( params.resolve( "path" ).size(), is( 2 ) );
- }
-
- @Test
- public void testQueryExtraction() throws Exception {
- Template template;
- Template input;
- Matcher<?> matcher;
- Matcher<?>.Match match;
- Params params;
-
- template = Parser.parse( "?query-queryParam={queryParam-name}" );
- input = Parser.parse( "?query-queryParam=queryParam-value" );
- matcher = new Matcher<Void>( template, null );
- match = matcher.match( input );
- params = match.getParams();
- assertThat( params, notNullValue() );
- assertThat( params.getNames().size(), equalTo( 1 ) );
- assertThat( params.getNames(), hasItem( "queryParam-name" ) );
- assertThat( params.resolve( "queryParam-name" ).size(), equalTo( 1 ) );
- assertThat( params.resolve( "queryParam-name" ), hasItem( "queryParam-value" ) );
-
- template = Parser.parse( "?query-queryParam={queryParam-name}" );
- input = Parser.parse( "?query-queryParam=queryParam-value" );
- matcher = new Matcher<Void>( template, null );
- match = matcher.match( input );
- params = match.getParams();
- assertThat( params, notNullValue() );
- assertThat( params.getNames().size(), equalTo( 1 ) );
- assertThat( params.getNames(), hasItem( "queryParam-name" ) );
- assertThat( params.resolve( "queryParam-name" ).size(), equalTo( 1 ) );
- assertThat( params.resolve( "queryParam-name" ), hasItem( "queryParam-value" ) );
- }
-
- @Test
- public void testEdgeCaseExtraction() throws Exception {
- Template template;
- Template input;
- Matcher<?> matcher;
- Matcher<?>.Match match;
- Params params;
-
- template = Parser.parse( "" );
- input = Parser.parse( "" );
- matcher = new Matcher<Void>( template, null );
- match = matcher.match( input );
- params = match.getParams();
- assertThat( params, notNullValue() );
- assertThat( params.getNames().size(), equalTo( 0 ) );
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/ParserTest.java
----------------------------------------------------------------------
diff --git a/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/ParserTest.java b/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/ParserTest.java
index 70085d4..90410ae 100644
--- a/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/ParserTest.java
+++ b/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/ParserTest.java
@@ -17,8 +17,8 @@
*/
package org.apache.knox.gateway.util.urltemplate;
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
+import org.apache.knox.test.category.FastTests;
+import org.apache.knox.test.category.UnitTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/RewriterTest.java
----------------------------------------------------------------------
diff --git a/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/RewriterTest.java b/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/RewriterTest.java
index 9d65b05..7bc3b85 100644
--- a/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/RewriterTest.java
+++ b/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/RewriterTest.java
@@ -17,8 +17,8 @@
*/
package org.apache.knox.gateway.util.urltemplate;
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
+import org.apache.knox.test.category.FastTests;
+import org.apache.knox.test.category.UnitTests;
import org.easymock.EasyMock;
import org.junit.Test;
import org.junit.experimental.categories.Category;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/SegmentTest.java
----------------------------------------------------------------------
diff --git a/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/SegmentTest.java b/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/SegmentTest.java
index 47ad08e..c88aacf 100644
--- a/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/SegmentTest.java
+++ b/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/SegmentTest.java
@@ -17,8 +17,8 @@
*/
package org.apache.knox.gateway.util.urltemplate;
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
+import org.apache.knox.test.category.FastTests;
+import org.apache.knox.test.category.UnitTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/TemplateTest.java
----------------------------------------------------------------------
diff --git a/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/TemplateTest.java b/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/TemplateTest.java
index d3f1c2a..5b3db90 100644
--- a/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/TemplateTest.java
+++ b/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/TemplateTest.java
@@ -17,8 +17,8 @@
*/
package org.apache.knox.gateway.util.urltemplate;
-import org.apache.hadoop.test.category.FastTests;
-import org.apache.hadoop.test.category.UnitTests;
+import org.apache.knox.test.category.FastTests;
+import org.apache.knox.test.category.UnitTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 315f25e..5549d93 100644
--- a/pom.xml
+++ b/pom.xml
@@ -192,13 +192,13 @@
</plugins>
</build>
<properties>
- <failsafe.group>org.apache.hadoop.test.category.VerifyTest</failsafe.group>
+ <failsafe.group>org.apache.knox.test.category.VerifyTest</failsafe.group>
</properties>
</profile>
<profile>
<id>release</id>
<properties>
- <failsafe.group>org.apache.hadoop.test.category.VerifyTest,org.apache.hadoop.test.category.ReleaseTest</failsafe.group>
+ <failsafe.group>org.apache.knox.test.category.VerifyTest,org.apache.knox.test.category.ReleaseTest</failsafe.group>
</properties>
</profile>
<profile>
@@ -207,7 +207,7 @@
<activeByDefault>true</activeByDefault>
</activation>
<properties>
- <failsafe.group>org.apache.hadoop.test.category.VerifyTest</failsafe.group>
+ <failsafe.group>org.apache.knox.test.category.VerifyTest</failsafe.group>
</properties>
</profile>
</profiles>
@@ -308,7 +308,7 @@
<version>${surefire-version}</version>
<configuration>
<excludedGroups>
- org.apache.hadoop.test.category.SlowTests,org.apache.hadoop.test.category.ManualTests,org.apache.hadoop.test.category.VerifyTest,org.apache.hadoop.test.category.ReleaseTest
+ org.apache.knox.test.category.SlowTests,org.apache.knox.test.category.ManualTests,org.apache.knox.test.category.VerifyTest,org.apache.knox.test.category.ReleaseTest
</excludedGroups>
<systemPropertyVariables>
<gateway-version>${gateway-version}</gateway-version>
[13/25] knox git commit: KNOX-1049 - Default Service or App Context
for Topologies
Posted by mo...@apache.org.
KNOX-1049 - Default Service or App Context for Topologies
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/710e7848
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/710e7848
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/710e7848
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 710e784871662a54e0e8994b038a49e735f8be2b
Parents: 62a23fe 485520d
Author: Larry McCay <lm...@hortonworks.com>
Authored: Sun Oct 29 15:47:55 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Sun Oct 29 15:47:55 2017 -0400
----------------------------------------------------------------------
.../apache/hadoop/gateway/GatewayFilter.java | 65 +++++++++++-
.../builder/BeanPropertyTopologyBuilder.java | 11 ++
.../xml/KnoxFormatXmlTopologyRules.java | 2 +
.../src/main/resources/conf/topology-v1.xsd | 1 +
.../hadoop/gateway/GatewayFilterTest.java | 49 +++++++++
.../service/admin/TopologiesResource.java | 11 ++
.../service/admin/beans/BeanConverter.java | 2 +
.../gateway/service/admin/beans/Topology.java | 11 ++
.../services/ambariui/2.2.1/rewrite.xml | 104 +++++++++++++++++++
.../services/ambariui/2.2.1/service.xml | 92 ++++++++++++++++
.../hadoop/gateway/topology/Topology.java | 9 ++
.../gateway/topology/topology_binding-xml.xml | 5 +-
12 files changed, 359 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/710e7848/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/TopologiesResource.java
----------------------------------------------------------------------
[05/25] knox git commit: KNOX-1088 - Remove LDAP BaseDirectoryService*
Posted by mo...@apache.org.
KNOX-1088 - Remove LDAP BaseDirectoryService*
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/41952dd3
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/41952dd3
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/41952dd3
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 41952dd3344e2c83e35109bc778623017bc0ab73
Parents: 994ac32
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Wed Oct 25 15:42:21 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Thu Oct 26 13:02:34 2017 +0100
----------------------------------------------------------------------
gateway-demo-ldap/pom.xml | 36 +-
.../security/ldap/BaseDirectoryService.java | 2323 ------------------
.../ldap/BaseDirectoryServiceFactory.java | 290 ---
.../security/ldap/SimpleDirectoryService.java | 6 +-
.../ldap/SimpleDirectoryServiceFactory.java | 34 -
.../ldap/SimpleLdapDirectoryServer.java | 38 +-
.../security/ldap/SimpleLdapServerTest.java | 2 -
gateway-test-release/pom.xml | 11 -
gateway-test/pom.xml | 14 -
9 files changed, 43 insertions(+), 2711 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/41952dd3/gateway-demo-ldap/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-demo-ldap/pom.xml b/gateway-demo-ldap/pom.xml
index a1ddba5..55ab2b8 100644
--- a/gateway-demo-ldap/pom.xml
+++ b/gateway-demo-ldap/pom.xml
@@ -107,26 +107,6 @@
<dependencies>
- <!--
- <dependency>
- <groupId>org.apache.directory.server</groupId>
- <artifactId>apacheds-server-integ</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.directory.server</groupId>
- <artifactId>apacheds-core-entry</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.directory.shared</groupId>
- <artifactId>shared-ldap</artifactId>
- </dependency>
- -->
- <!--
- <dependency>
- <groupId>org.apache.directory.server</groupId>
- <artifactId>apacheds-jdbm</artifactId>
- </dependency>
- -->
<dependency>
<groupId>org.apache.directory.server</groupId>
<artifactId>apacheds-all</artifactId>
@@ -148,20 +128,6 @@
<artifactId>slf4j-log4j12</artifactId>
</dependency>
- <!--
- <dependency>
- <groupId>org.apache.directory.server</groupId>
- <artifactId>apacheds-all</artifactId>
- <version>1.5.5</version>
- <exclusions>
- <exclusion>
- <groupId>org.apache.directory.shared</groupId>
- <artifactId>shared-ldap-schema</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- -->
-
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
@@ -176,4 +142,4 @@
</dependencies>
-</project>
\ No newline at end of file
+</project>
http://git-wip-us.apache.org/repos/asf/knox/blob/41952dd3/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/BaseDirectoryService.java
----------------------------------------------------------------------
diff --git a/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/BaseDirectoryService.java b/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/BaseDirectoryService.java
deleted file mode 100644
index b519f4b..0000000
--- a/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/BaseDirectoryService.java
+++ /dev/null
@@ -1,2323 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- *
- */
-package org.apache.hadoop.gateway.security.ldap;
-
-
-import org.apache.directory.api.ldap.codec.api.LdapApiService;
-import org.apache.directory.api.ldap.codec.api.LdapApiServiceFactory;
-import org.apache.directory.api.ldap.model.constants.AuthenticationLevel;
-import org.apache.directory.api.ldap.model.constants.SchemaConstants;
-import org.apache.directory.api.ldap.model.csn.Csn;
-import org.apache.directory.api.ldap.model.csn.CsnFactory;
-import org.apache.directory.api.ldap.model.cursor.Cursor;
-import org.apache.directory.api.ldap.model.entry.Attribute;
-import org.apache.directory.api.ldap.model.entry.DefaultEntry;
-import org.apache.directory.api.ldap.model.entry.Entry;
-import org.apache.directory.api.ldap.model.entry.Modification;
-import org.apache.directory.api.ldap.model.entry.Value;
-import org.apache.directory.api.ldap.model.exception.LdapException;
-import org.apache.directory.api.ldap.model.exception.LdapNoPermissionException;
-import org.apache.directory.api.ldap.model.exception.LdapOperationException;
-import org.apache.directory.api.ldap.model.ldif.ChangeType;
-import org.apache.directory.api.ldap.model.ldif.LdifEntry;
-import org.apache.directory.api.ldap.model.ldif.LdifReader;
-import org.apache.directory.api.ldap.model.name.Dn;
-import org.apache.directory.api.ldap.model.name.DnUtils;
-import org.apache.directory.api.ldap.model.name.Rdn;
-import org.apache.directory.api.ldap.model.schema.SchemaManager;
-import org.apache.directory.api.ldap.util.tree.DnNode;
-import org.apache.directory.api.util.DateUtils;
-import org.apache.directory.api.util.Strings;
-import org.apache.directory.api.util.exception.NotImplementedException;
-import org.apache.directory.server.constants.ServerDNConstants;
-import org.apache.directory.server.core.DefaultOperationManager;
-import org.apache.directory.server.core.admin.AdministrativePointInterceptor;
-import org.apache.directory.server.core.api.*;
-import org.apache.directory.server.core.api.administrative.AccessControlAdministrativePoint;
-import org.apache.directory.server.core.api.administrative.CollectiveAttributeAdministrativePoint;
-import org.apache.directory.server.core.api.administrative.SubschemaAdministrativePoint;
-import org.apache.directory.server.core.api.administrative.TriggerExecutionAdministrativePoint;
-import org.apache.directory.server.core.api.changelog.ChangeLog;
-import org.apache.directory.server.core.api.changelog.ChangeLogEvent;
-import org.apache.directory.server.core.api.changelog.Tag;
-import org.apache.directory.server.core.api.changelog.TaggableSearchableChangeLogStore;
-import org.apache.directory.server.core.api.event.EventService;
-import org.apache.directory.server.core.api.interceptor.BaseInterceptor;
-import org.apache.directory.server.core.api.interceptor.Interceptor;
-import org.apache.directory.server.core.api.interceptor.context.AddOperationContext;
-import org.apache.directory.server.core.api.interceptor.context.BindOperationContext;
-import org.apache.directory.server.core.api.interceptor.context.HasEntryOperationContext;
-import org.apache.directory.server.core.api.interceptor.context.LookupOperationContext;
-import org.apache.directory.server.core.api.interceptor.context.OperationContext;
-import org.apache.directory.server.core.api.journal.Journal;
-import org.apache.directory.server.core.api.partition.Partition;
-import org.apache.directory.server.core.api.partition.PartitionNexus;
-import org.apache.directory.server.core.api.schema.SchemaPartition;
-import org.apache.directory.server.core.api.subtree.SubentryCache;
-import org.apache.directory.server.core.api.subtree.SubtreeEvaluator;
-import org.apache.directory.server.core.authn.AuthenticationInterceptor;
-import org.apache.directory.server.core.authn.ppolicy.PpolicyConfigContainer;
-import org.apache.directory.server.core.authz.AciAuthorizationInterceptor;
-import org.apache.directory.server.core.authz.DefaultAuthorizationInterceptor;
-import org.apache.directory.server.core.changelog.ChangeLogInterceptor;
-import org.apache.directory.server.core.changelog.DefaultChangeLog;
-import org.apache.directory.server.core.collective.CollectiveAttributeInterceptor;
-import org.apache.directory.server.core.event.EventInterceptor;
-import org.apache.directory.server.core.exception.ExceptionInterceptor;
-import org.apache.directory.server.core.journal.DefaultJournal;
-import org.apache.directory.server.core.journal.JournalInterceptor;
-import org.apache.directory.server.core.normalization.NormalizationInterceptor;
-import org.apache.directory.server.core.operational.OperationalAttributeInterceptor;
-import org.apache.directory.server.core.referral.ReferralInterceptor;
-import org.apache.directory.server.core.schema.SchemaInterceptor;
-import org.apache.directory.server.core.security.TlsKeyGenerator;
-import org.apache.directory.server.core.shared.DefaultCoreSession;
-import org.apache.directory.server.core.shared.DefaultDnFactory;
-import org.apache.directory.server.core.shared.partition.DefaultPartitionNexus;
-import org.apache.directory.server.core.subtree.SubentryInterceptor;
-import org.apache.directory.server.core.trigger.TriggerInterceptor;
-import org.apache.directory.server.i18n.I18n;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.RandomAccessFile;
-import java.io.StringReader;
-import java.lang.reflect.Method;
-import java.nio.channels.FileLock;
-import java.nio.channels.OverlappingFileLockException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.UUID;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-
-
-/**
- * Base implementation of {@link DirectoryService}.
- * This is a copy of org.apache.directory.server.core.DefaultDirectoryService
- * created to make showSecurityWarnings protected. This can be removed
- * when http://svn.apache.org/r1546144 in ApacheDS 2.0.0-M16 is available.
- *
- * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
- */
-public class BaseDirectoryService implements DirectoryService
-{
- /** The logger */
- private static final Logger LOG = LoggerFactory.getLogger( BaseDirectoryService.class );
-
- private SchemaPartition schemaPartition;
-
- /** A reference on the SchemaManager */
- private SchemaManager schemaManager;
-
- /** The LDAP Codec Service */
- private LdapApiService ldapCodecService = LdapApiServiceFactory.getSingleton();
-
- /** the root nexus */
- private DefaultPartitionNexus partitionNexus;
-
- /** whether or not server is started for the first time */
- private boolean firstStart;
-
- /** whether or not this instance has been shutdown */
- private boolean started;
-
- /** the change log service */
- private ChangeLog changeLog;
-
- /** the journal service */
- private Journal journal;
-
- /**
- * the interface used to perform various operations on this
- * DirectoryService
- */
- private OperationManager operationManager = new DefaultOperationManager( this );
-
- /** the distinguished name of the administrative user */
- private Dn adminDn;
-
- /** session used as admin for internal operations */
- private CoreSession adminSession;
-
- /** The referral manager */
- private ReferralManager referralManager;
-
- /** A flag to tell if the userPassword attribute's value must be hidden */
- private boolean passwordHidden = false;
-
- /** The service's CSN factory */
- private CsnFactory csnFactory;
-
- /** The directory instance replication ID */
- private int replicaId;
-
- /** remove me after implementation is completed */
- private static final String PARTIAL_IMPL_WARNING =
- "WARNING: the changelog is only partially operational and will revert\n" +
- "state without consideration of who made the original change. All reverting " +
- "changes are made by the admin user.\n Furthermore the used controls are not at " +
- "all taken into account";
-
- /** The delay to wait between each sync on disk */
- private long syncPeriodMillis;
-
- /** The default delay to wait between sync on disk : 15 seconds */
- private static final long DEFAULT_SYNC_PERIOD = 15000;
-
- /** */
- private Thread workerThread;
-
- /** The default timeLimit : 100 entries */
- public static final int MAX_SIZE_LIMIT_DEFAULT = 100;
-
- /** The default timeLimit : 10 seconds */
- public static final int MAX_TIME_LIMIT_DEFAULT = 10000;
-
- /** The instance Id */
- private String instanceId;
-
- /** The server directory layout*/
- private InstanceLayout instanceLayout;
-
- /**
- * A flag used to shutdown the VM when stopping the server. Useful
- * when the server is standalone. If the server is embedded, we don't
- * want to shutdown the VM
- */
- private boolean exitVmOnShutdown = true; // allow by default
-
- /** A flag used to indicate that a shutdown hook has been installed */
- private boolean shutdownHookEnabled = true; // allow by default
-
- /** Manage anonymous access to entries other than the RootDSE */
- private boolean allowAnonymousAccess = false; // forbid by default
-
- /** Manage the basic access control checks */
- private boolean accessControlEnabled; // off by default
-
- /** Manage the operational attributes denormalization */
- private boolean denormalizeOpAttrsEnabled; // off by default
-
- /** The list of declared interceptors */
- private List<Interceptor> interceptors;
- private Map<String, Interceptor> interceptorNames;
-
- /** A lock to protect the interceptors List */
- private ReadWriteLock interceptorsLock = new ReentrantReadWriteLock();
-
- /** The read and write locks */
- private Lock readLock = interceptorsLock.readLock();
- private Lock writeLock = interceptorsLock.writeLock();
-
- /** A map associating a list of interceptor to each operation */
- private Map<OperationEnum, List<String>> operationInterceptors;
-
- /** The System partition */
- private Partition systemPartition;
-
- /** The set of all declared partitions */
- private Set<Partition> partitions = new HashSet<>();
-
- /** A list of LDIF entries to inject at startup */
- private List<? extends LdifEntry> testEntries = new ArrayList<LdifEntry>(); // List<Attributes>
-
- /** The event service */
- private EventService eventService;
-
- /** The maximum size for an incoming PDU */
- private int maxPDUSize = Integer.MAX_VALUE;
-
- /** the value of last successful add/update operation's CSN */
- private String contextCsn;
-
- /** lock file for directory service's working directory */
- private RandomAccessFile lockFile = null;
-
- private static final String LOCK_FILE_NAME = ".dirservice.lock";
-
- /** the ehcache based cache service */
- private CacheService cacheService;
-
- /** The AccessControl AdministrativePoint cache */
- private DnNode<AccessControlAdministrativePoint> accessControlAPCache;
-
- /** The CollectiveAttribute AdministrativePoint cache */
- private DnNode<CollectiveAttributeAdministrativePoint> collectiveAttributeAPCache;
-
- /** The Subschema AdministrativePoint cache */
- private DnNode<SubschemaAdministrativePoint> subschemaAPCache;
-
- /** The TriggerExecution AdministrativePoint cache */
- private DnNode<TriggerExecutionAdministrativePoint> triggerExecutionAPCache;
-
- /** The Dn factory */
- private DnFactory dnFactory;
-
- /** The Subentry cache */
- SubentryCache subentryCache = new SubentryCache();
-
- /** The Subtree evaluator instance */
- private SubtreeEvaluator evaluator;
-
-
- // ------------------------------------------------------------------------
- // Constructor
- // ------------------------------------------------------------------------
-
- /**
- * Creates a new instance of the directory service.
- */
- public BaseDirectoryService() throws Exception
- {
- changeLog = new DefaultChangeLog();
- journal = new DefaultJournal();
- syncPeriodMillis = DEFAULT_SYNC_PERIOD;
- csnFactory = new CsnFactory( replicaId );
- evaluator = new SubtreeEvaluator( schemaManager );
- setDefaultInterceptorConfigurations();
- }
-
-
- // ------------------------------------------------------------------------
- // C O N F I G U R A T I O N M E T H O D S
- // ------------------------------------------------------------------------
-
- public void setInstanceId( String instanceId )
- {
- this.instanceId = instanceId;
- }
-
-
- public String getInstanceId()
- {
- return instanceId;
- }
-
-
- /**
- * Gets the {@link Partition}s used by this DirectoryService.
- *
- * @return the set of partitions used
- */
- public Set<? extends Partition> getPartitions()
- {
- Set<Partition> cloned = new HashSet<>();
- cloned.addAll( partitions );
- return cloned;
- }
-
-
- /**
- * Sets {@link Partition}s used by this DirectoryService.
- *
- * @param partitions the partitions to used
- */
- public void setPartitions( Set<? extends Partition> partitions )
- {
- Set<Partition> cloned = new HashSet<>();
- cloned.addAll( partitions );
- Set<String> names = new HashSet<>();
-
- for ( Partition partition : cloned )
- {
- String id = partition.getId();
-
- if ( names.contains( id ) )
- {
- LOG.warn( "Encountered duplicate partition {} identifier.", id );
- }
-
- names.add( id );
- }
-
- this.partitions = cloned;
- }
-
-
- /**
- * Returns <tt>true</tt> if access control checks are enabled.
- *
- * @return true if access control checks are enabled, false otherwise
- */
- public boolean isAccessControlEnabled()
- {
- return accessControlEnabled;
- }
-
-
- /**
- * Sets whether to enable basic access control checks or not.
- *
- * @param accessControlEnabled true to enable access control checks, false otherwise
- */
- public void setAccessControlEnabled( boolean accessControlEnabled )
- {
- this.accessControlEnabled = accessControlEnabled;
- }
-
-
- /**
- * Returns <tt>true</tt> if anonymous access is allowed on entries besides the RootDSE.
- * If the access control subsystem is enabled then access to some entries may not be
- * allowed even when full anonymous access is enabled.
- *
- * @return true if anonymous access is allowed on entries besides the RootDSE, false
- * if anonymous access is allowed to all entries.
- */
- public boolean isAllowAnonymousAccess()
- {
- return allowAnonymousAccess;
- }
-
-
- /**
- * Sets whether to allow anonymous access to entries other than the RootDSE. If the
- * access control subsystem is enabled then access to some entries may not be allowed
- * even when full anonymous access is enabled.
- *
- * @param enableAnonymousAccess true to enable anonymous access, false to disable it
- */
- public void setAllowAnonymousAccess( boolean enableAnonymousAccess )
- {
- this.allowAnonymousAccess = enableAnonymousAccess;
- }
-
-
- /**
- * Returns interceptors in the server.
- *
- * @return the interceptors in the server.
- */
- public List<Interceptor> getInterceptors()
- {
- List<Interceptor> cloned = new ArrayList<Interceptor>();
-
- readLock.lock();
-
- try
- {
- cloned.addAll( interceptors );
-
- return cloned;
- }
- finally
- {
- readLock.unlock();
- }
- }
-
-
- /**
- * Returns interceptors in the server for a given operation.
- *
- * @return the interceptors in the server for the given operation.
- */
- public List<String> getInterceptors( OperationEnum operation )
- {
- List<String> cloned = new ArrayList<String>();
-
- readLock.lock();
-
- try
- {
- cloned.addAll( operationInterceptors.get( operation ) );
-
- return cloned;
- }
- finally
- {
- readLock.unlock();
- }
-
- }
-
-
- /**
- * Compute the list of to call for each operation
- */
- private void initOperationsList()
- {
- writeLock.lock();
-
- try
- {
- operationInterceptors = new ConcurrentHashMap<OperationEnum, List<String>>();
-
- for ( OperationEnum operation : OperationEnum.getOperations() )
- {
- List<String> operationList = new ArrayList<String>();
-
- for ( Interceptor interceptor : interceptors )
- {
- gatherInterceptors( interceptor, interceptor.getClass(), operation, operationList );
- }
-
- operationInterceptors.put( operation, operationList );
- }
- }
- finally
- {
- writeLock.unlock();
- }
- }
-
-
- /**
- * Recursively checks if the given interceptor can be added to the list of interceptors for a given
- * operation and adds to the list of interceptors if it implements the respective operation
- *
- * @param interceptor the instance of the interceptor
- * @param interceptorClz the class of the interceptor
- * @param operation type of operation
- * @param selectedInterceptorList the list of selected interceptors
- */
- private void gatherInterceptors( Interceptor interceptor, Class<?> interceptorClz, OperationEnum operation,
- List<String> selectedInterceptorList )
- {
- // We stop recursing when we reach the Base class
- if ( ( interceptorClz == null ) || ( interceptorClz == BaseInterceptor.class ) )
- {
- return;
- }
-
- // We don't call getMethods() because it would get back the default methods
- // from the BaseInterceptor, something we don't want.
- Method[] methods = interceptorClz.getDeclaredMethods();
-
- for ( Method method : methods )
- {
- Class<?>[] param = method.getParameterTypes();
- boolean hasCorrestSig = false;
-
- // check for the correct signature
- if ( ( param == null ) || ( param.length > 1 ) || ( param.length == 0 ) )
- {
- continue;
- }
-
- if ( OperationContext.class.isAssignableFrom( param[0] ) )
- {
- hasCorrestSig = true;
- }
- else
- {
- continue;
- }
-
- if ( hasCorrestSig && method.getName().equals( operation.getMethodName() ) )
- {
- if ( !selectedInterceptorList.contains( interceptor.getName() ) )
- {
- selectedInterceptorList.add( interceptor.getName() );
- }
-
- break;
- }
- }
-
- // Recurse on extended classes, as we have used getDeclaredMethods() instead of getmethods()
- gatherInterceptors( interceptor, interceptorClz.getSuperclass(), operation, selectedInterceptorList );
- }
-
-
- /**
- * Add an interceptor to the list of interceptors to call for each operation
- * @throws LdapException
- */
- private void addInterceptor( Interceptor interceptor, int position ) throws LdapException
- {
- // First, init the interceptor
- interceptor.init( this );
-
- writeLock.lock();
-
- try
- {
- for ( OperationEnum operation : OperationEnum.getOperations() )
- {
- List<String> operationList = operationInterceptors.get( operation );
-
- Method[] methods = interceptor.getClass().getDeclaredMethods();
-
- for ( Method method : methods )
- {
- if ( method.getName().equals( operation.getMethodName() ) )
- {
- if ( position == -1 )
- {
- operationList.add( interceptor.getName() );
- }
- else
- {
- operationList.add( position, interceptor.getName() );
- }
-
- break;
- }
- }
- }
-
- interceptorNames.put( interceptor.getName(), interceptor );
-
- if ( position == -1 )
- {
- interceptors.add( interceptor );
- }
- else
- {
- interceptors.add( position, interceptor );
- }
- }
- finally
- {
- writeLock.unlock();
- }
- }
-
-
- /**
- * Remove an interceptor to the list of interceptors to call for each operation
- */
- private void removeOperationsList( String interceptorName )
- {
- Interceptor interceptor = interceptorNames.get( interceptorName );
-
- writeLock.lock();
-
- try
- {
- for ( OperationEnum operation : OperationEnum.getOperations() )
- {
- List<String> operationList = operationInterceptors.get( operation );
-
- Method[] methods = interceptor.getClass().getDeclaredMethods();
-
- for ( Method method : methods )
- {
- if ( method.getName().equals( operation.getMethodName() ) )
- {
- operationList.remove( interceptor.getName() );
-
- break;
- }
- }
- }
-
- interceptorNames.remove( interceptorName );
- interceptors.remove( interceptor );
- }
- finally
- {
- writeLock.unlock();
- }
- }
-
-
- /**
- * Sets the interceptors in the server.
- *
- * @param interceptors the interceptors to be used in the server.
- */
- public void setInterceptors( List<Interceptor> interceptors )
- {
- Map<String, Interceptor> interceptorNames = new HashMap<>();
-
- // Check if we don't have duplicate names in the interceptors list
- for ( Interceptor interceptor : interceptors )
- {
- if ( interceptorNames.containsKey( interceptor.getName() ) )
- {
- LOG.warn( "Encountered duplicate definitions for {} interceptor", interceptor.getName() );
- continue;
- }
-
- interceptorNames.put( interceptor.getName(), interceptor );
- }
-
- this.interceptors = interceptors;
- this.interceptorNames = interceptorNames;
-
- // Now update the Map that connect each operation with the list of interceptors.
- initOperationsList();
- }
-
-
- /**
- * Initialize the interceptors
- */
- private void initInterceptors() throws LdapException
- {
- for ( Interceptor interceptor : interceptors )
- {
- interceptor.init( this );
- }
- }
-
-
- /**
- * Returns test directory entries({@link LdifEntry}) to be loaded while
- * bootstrapping.
- *
- * @return test entries to load during bootstrapping
- */
- public List<LdifEntry> getTestEntries()
- {
- List<LdifEntry> cloned = new ArrayList<LdifEntry>();
- cloned.addAll( testEntries );
-
- return cloned;
- }
-
-
- /**
- * Sets test directory entries({@link javax.naming.directory.Attributes}) to be loaded while
- * bootstrapping.
- *
- * @param testEntries the test entries to load while bootstrapping
- */
- public void setTestEntries( List<? extends LdifEntry> testEntries )
- {
- //noinspection MismatchedQueryAndUpdateOfCollection
- List<LdifEntry> cloned = new ArrayList<LdifEntry>();
- cloned.addAll( testEntries );
- this.testEntries = cloned;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public InstanceLayout getInstanceLayout()
- {
- return instanceLayout;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public void setInstanceLayout( InstanceLayout instanceLayout ) throws IOException
- {
- this.instanceLayout = instanceLayout;
-
- // Create the directories if they are missing
- if ( !instanceLayout.getInstanceDirectory().exists() )
- {
- if ( !instanceLayout.getInstanceDirectory().mkdirs() )
- {
- throw new IOException( I18n.err( I18n.ERR_112_COULD_NOT_CREATE_DIRECORY,
- instanceLayout.getInstanceDirectory() ) );
- }
- }
-
- if ( !instanceLayout.getLogDirectory().exists() )
- {
- if ( !instanceLayout.getLogDirectory().mkdirs() )
- {
- throw new IOException( I18n.err( I18n.ERR_112_COULD_NOT_CREATE_DIRECORY,
- instanceLayout.getLogDirectory() ) );
- }
- }
-
- if ( !instanceLayout.getRunDirectory().exists() )
- {
- if ( !instanceLayout.getRunDirectory().mkdirs() )
- {
- throw new IOException( I18n.err( I18n.ERR_112_COULD_NOT_CREATE_DIRECORY,
- instanceLayout.getRunDirectory() ) );
- }
- }
-
- if ( !instanceLayout.getPartitionsDirectory().exists() )
- {
- if ( !instanceLayout.getPartitionsDirectory().mkdirs() )
- {
- throw new IOException( I18n.err( I18n.ERR_112_COULD_NOT_CREATE_DIRECORY,
- instanceLayout.getPartitionsDirectory() ) );
- }
- }
-
- if ( !instanceLayout.getConfDirectory().exists() )
- {
- if ( !instanceLayout.getConfDirectory().mkdirs() )
- {
- throw new IOException( I18n.err( I18n.ERR_112_COULD_NOT_CREATE_DIRECORY,
- instanceLayout.getConfDirectory() ) );
- }
- }
- }
-
-
- public void setShutdownHookEnabled( boolean shutdownHookEnabled )
- {
- this.shutdownHookEnabled = shutdownHookEnabled;
- }
-
-
- public boolean isShutdownHookEnabled()
- {
- return shutdownHookEnabled;
- }
-
-
- public void setExitVmOnShutdown( boolean exitVmOnShutdown )
- {
- this.exitVmOnShutdown = exitVmOnShutdown;
- }
-
-
- public boolean isExitVmOnShutdown()
- {
- return exitVmOnShutdown;
- }
-
-
- public void setSystemPartition( Partition systemPartition )
- {
- this.systemPartition = systemPartition;
- }
-
-
- public Partition getSystemPartition()
- {
- return systemPartition;
- }
-
-
- /**
- * return true if the operational attributes must be normalized when returned
- */
- public boolean isDenormalizeOpAttrsEnabled()
- {
- return denormalizeOpAttrsEnabled;
- }
-
-
- /**
- * Sets whether the operational attributes are denormalized when returned
- * @param denormalizeOpAttrsEnabled The flag value
- */
- public void setDenormalizeOpAttrsEnabled( boolean denormalizeOpAttrsEnabled )
- {
- this.denormalizeOpAttrsEnabled = denormalizeOpAttrsEnabled;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public ChangeLog getChangeLog()
- {
- return changeLog;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public Journal getJournal()
- {
- return journal;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public void setChangeLog( ChangeLog changeLog )
- {
- this.changeLog = changeLog;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public void setJournal( Journal journal )
- {
- this.journal = journal;
- }
-
-
- public void addPartition( Partition partition ) throws Exception
- {
- partition.setSchemaManager( schemaManager );
-
- try
- {
- // can be null when called before starting up
- if ( partitionNexus != null )
- {
- partitionNexus.addContextPartition( partition );
- }
- }
- catch ( LdapException le )
- {
- // We've got an exception, we cannot add the partition to the partitions
- throw le;
- }
-
- // Now, add the partition to the set of managed partitions
- partitions.add( partition );
- }
-
-
- public void removePartition( Partition partition ) throws Exception
- {
- // Do the backend cleanup first
- try
- {
- // can be null when called before starting up
- if ( partitionNexus != null )
- {
- partitionNexus.removeContextPartition( partition.getSuffixDn() );
- }
- }
- catch ( LdapException le )
- {
- // Bad ! We can't go any further
- throw le;
- }
-
- // And update the set of managed partitions
- partitions.remove( partition );
- }
-
-
- // ------------------------------------------------------------------------
- // BackendSubsystem Interface Method Implementations
- // ------------------------------------------------------------------------
- /**
- * Define a default list of interceptors that has to be used if no other
- * configuration is defined.
- */
- private void setDefaultInterceptorConfigurations()
- {
- // Set default interceptor chains
- List<Interceptor> list = new ArrayList<Interceptor>();
-
- list.add( new NormalizationInterceptor() );
- list.add( new AuthenticationInterceptor() );
- list.add( new ReferralInterceptor() );
- list.add( new AciAuthorizationInterceptor() );
- list.add( new DefaultAuthorizationInterceptor() );
- list.add( new AdministrativePointInterceptor() );
- list.add( new ExceptionInterceptor() );
- list.add( new SchemaInterceptor() );
- list.add( new OperationalAttributeInterceptor() );
- list.add( new CollectiveAttributeInterceptor() );
- list.add( new SubentryInterceptor() );
- list.add( new EventInterceptor() );
- list.add( new TriggerInterceptor() );
- list.add( new ChangeLogInterceptor() );
- list.add( new JournalInterceptor() );
-
- setInterceptors( list );
- }
-
-
- public CoreSession getAdminSession()
- {
- return adminSession;
- }
-
-
- /**
- * Get back an anonymous session
- */
- public CoreSession getSession()
- {
- return new DefaultCoreSession( new LdapPrincipal( schemaManager ), this );
- }
-
-
- /**
- * Get back a session for a given principal
- */
- public CoreSession getSession( LdapPrincipal principal )
- {
- return new DefaultCoreSession( principal, this );
- }
-
-
- /**
- * Get back a session for the give user and credentials bound with Simple Bind
- */
- public CoreSession getSession( Dn principalDn, byte[] credentials ) throws LdapException
- {
- synchronized ( this )
- {
- if ( !started )
- {
- throw new IllegalStateException( "Service has not started." );
- }
- }
-
- BindOperationContext bindContext = new BindOperationContext( null );
- bindContext.setCredentials( credentials );
- bindContext.setDn( principalDn.apply( schemaManager ) );
- bindContext.setInterceptors( getInterceptors( OperationEnum.BIND ) );
-
- operationManager.bind( bindContext );
-
- return bindContext.getSession();
- }
-
-
- /**
- * Get back a session for a given user bound with SASL Bind
- */
- public CoreSession getSession( Dn principalDn, byte[] credentials, String saslMechanism, String saslAuthId )
- throws Exception
- {
- synchronized ( this )
- {
- if ( !started )
- {
- throw new IllegalStateException( "Service has not started." );
-
- }
- }
-
- BindOperationContext bindContext = new BindOperationContext( null );
- bindContext.setCredentials( credentials );
- bindContext.setDn( principalDn.apply( schemaManager ) );
- bindContext.setSaslMechanism( saslMechanism );
- bindContext.setInterceptors( getInterceptors( OperationEnum.BIND ) );
-
- operationManager.bind( bindContext );
-
- return bindContext.getSession();
- }
-
-
- public long revert() throws LdapException
- {
- if ( changeLog == null || !changeLog.isEnabled() )
- {
- throw new IllegalStateException( I18n.err( I18n.ERR_310 ) );
- }
-
- Tag latest = changeLog.getLatest();
-
- if ( null != latest )
- {
- if ( latest.getRevision() < changeLog.getCurrentRevision() )
- {
- return revert( latest.getRevision() );
- }
- else
- {
- LOG.info( "Ignoring request to revert without changes since the latest tag." );
- return changeLog.getCurrentRevision();
- }
- }
-
- throw new IllegalStateException( I18n.err( I18n.ERR_311 ) );
- }
-
-
- /**
- * We handle the ModDN/ModRDN operation for the revert here.
- */
- private void moddn( Dn oldDn, Dn newDn, boolean delOldRdn ) throws LdapException
- {
- if ( oldDn.size() == 0 )
- {
- throw new LdapNoPermissionException( I18n.err( I18n.ERR_312 ) );
- }
-
- // calculate parents
- Dn oldBase = oldDn.getParent();
- Dn newBase = newDn.getParent();
-
- // Compute the Rdn for each of the Dn
- Rdn newRdn = newDn.getRdn();
- Rdn oldRdn = oldDn.getRdn();
-
- /*
- * We need to determine if this rename operation corresponds to a simple
- * Rdn name change or a move operation. If the two names are the same
- * except for the Rdn then it is a simple modifyRdn operation. If the
- * names differ in size or have a different baseDN then the operation is
- * a move operation. Furthermore if the Rdn in the move operation
- * changes it is both an Rdn change and a move operation.
- */
- if ( ( oldDn.size() == newDn.size() ) && oldBase.equals( newBase ) )
- {
- adminSession.rename( oldDn, newRdn, delOldRdn );
- }
- else
- {
- Dn target = newDn.getParent();
-
- if ( newRdn.equals( oldRdn ) )
- {
- adminSession.move( oldDn, target );
- }
- else
- {
- adminSession.moveAndRename( oldDn, target, new Rdn( newRdn ), delOldRdn );
- }
- }
- }
-
-
- public long revert( long revision ) throws LdapException
- {
- if ( changeLog == null || !changeLog.isEnabled() )
- {
- throw new IllegalStateException( I18n.err( I18n.ERR_310 ) );
- }
-
- if ( revision < 0 )
- {
- throw new IllegalArgumentException( I18n.err( I18n.ERR_239 ) );
- }
-
- if ( revision >= changeLog.getChangeLogStore().getCurrentRevision() )
- {
- throw new IllegalArgumentException( I18n.err( I18n.ERR_314 ) );
- }
-
- Cursor<ChangeLogEvent> cursor = changeLog.getChangeLogStore().findAfter( revision );
-
- /*
- * BAD, BAD, BAD!!!
- *
- * No synchronization no nothing. Just getting this to work for now
- * so we can revert tests. Any production grade use of this feature
- * needs to synchronize on all changes while the revert is in progress.
- *
- * How about making this operation transactional?
- *
- * First of all just stop using JNDI and construct the operations to
- * feed into the interceptor pipeline.
- *
- * TODO review this code.
- */
-
- try
- {
- LOG.warn( PARTIAL_IMPL_WARNING );
- cursor.afterLast();
-
- while ( cursor.previous() ) // apply ldifs in reverse order
- {
- ChangeLogEvent event = cursor.get();
- List<LdifEntry> reverses = event.getReverseLdifs();
-
- for ( LdifEntry reverse : reverses )
- {
- switch ( reverse.getChangeType().getChangeType() )
- {
- case ChangeType.ADD_ORDINAL:
- adminSession.add(
- new DefaultEntry( schemaManager, reverse.getEntry() ), true );
- break;
-
- case ChangeType.DELETE_ORDINAL:
- adminSession.delete( reverse.getDn(), true );
- break;
-
- case ChangeType.MODIFY_ORDINAL:
- List<Modification> mods = reverse.getModifications();
-
- adminSession.modify( reverse.getDn(), mods, true );
- break;
-
- case ChangeType.MODDN_ORDINAL:
- // NO BREAK - both ModDN and ModRDN handling is the same
-
- case ChangeType.MODRDN_ORDINAL:
- Dn forwardDn = event.getForwardLdif().getDn();
- Dn reverseDn = reverse.getDn();
-
- moddn( reverseDn, forwardDn, reverse.isDeleteOldRdn() );
-
- break;
-
- default:
- LOG.error( I18n.err( I18n.ERR_75 ) );
- throw new NotImplementedException( I18n.err( I18n.ERR_76, reverse.getChangeType() ) );
- }
- }
- }
- }
- catch ( Exception e )
- {
- throw new LdapOperationException( e.getMessage(), e );
- }
- finally
- {
- try
- {
- cursor.close();
- }
- catch ( Exception e )
- {
- throw new LdapOperationException( e.getMessage(), e );
- }
- }
-
- return changeLog.getCurrentRevision();
- }
-
-
- public OperationManager getOperationManager()
- {
- return operationManager;
- }
-
-
- /**
- * @throws Exception if the LDAP server cannot be started
- */
- public synchronized void startup() throws Exception
- {
- if ( started )
- {
- return;
- }
-
- lockWorkDir();
-
- if ( shutdownHookEnabled )
- {
- Runtime.getRuntime().addShutdownHook( new Thread( new Runnable()
- {
- public void run()
- {
- try
- {
- shutdown();
- }
- catch ( Exception e )
- {
- LOG.warn( "Failed to shut down the directory service: "
- + BaseDirectoryService.this.instanceId, e );
- }
- }
- }, "ApacheDS Shutdown Hook (" + instanceId + ')' ) );
-
- LOG.info( "ApacheDS shutdown hook has been registered with the runtime." );
- }
- else if ( LOG.isWarnEnabled() )
- {
- LOG.warn( "ApacheDS shutdown hook has NOT been registered with the runtime."
- + " This default setting for standalone operation has been overriden." );
- }
-
- initialize();
- showSecurityWarnings();
-
- // load the last stored valid CSN value
- LookupOperationContext loc = new LookupOperationContext( getAdminSession(), systemPartition.getSuffixDn(),
- SchemaConstants.ALL_ATTRIBUTES_ARRAY );
-
- Entry entry = systemPartition.lookup( loc );
-
- Attribute cntextCsnAt = entry.get( SchemaConstants.CONTEXT_CSN_AT );
-
- if ( cntextCsnAt != null )
- {
- // this is a multivalued attribute but current syncrepl provider implementation stores only ONE value at ou=system
- contextCsn = cntextCsnAt.getString();
- }
-
- started = true;
-
- if ( !testEntries.isEmpty() )
- {
- createTestEntries();
- }
- }
-
-
- public synchronized void sync() throws Exception
- {
- if ( !started )
- {
- return;
- }
-
- this.changeLog.sync();
- this.partitionNexus.sync();
- }
-
-
- public synchronized void shutdown() throws Exception
- {
- LOG.debug( "+++ DirectoryService Shutdown required" );
-
- if ( !started )
- {
- return;
- }
-
- // --------------------------------------------------------------------
- // Shutdown the sync thread
- // --------------------------------------------------------------------
- LOG.debug( "--- Syncing the nexus " );
- partitionNexus.sync();
-
- // --------------------------------------------------------------------
- // Shutdown the changelog
- // --------------------------------------------------------------------
- LOG.debug( "--- Syncing the changeLog " );
- changeLog.sync();
- changeLog.destroy();
-
- // --------------------------------------------------------------------
- // Shutdown the journal if enabled
- // --------------------------------------------------------------------
- if ( journal.isEnabled() )
- {
- LOG.debug( "--- Destroying the journal " );
- journal.destroy();
- }
-
- // --------------------------------------------------------------------
- // Shutdown the partition
- // --------------------------------------------------------------------
-
- LOG.debug( "--- Destroying the nexus" );
- partitionNexus.destroy();
-
- // Last flush...
- LOG.debug( "--- Flushing everything before quitting" );
- getOperationManager().lockWrite();
- partitionNexus.sync();
- getOperationManager().unlockWrite();
-
- // --------------------------------------------------------------------
- // And shutdown the server
- // --------------------------------------------------------------------
- LOG.debug( "--- Deleting the cache service" );
- cacheService.destroy();
-
- LOG.debug( "---Deleting the DnCache" );
- dnFactory = null;
-
- if ( lockFile != null )
- {
- try
- {
- lockFile.close();
- // no need to delete the lock file
- }
- catch ( IOException e )
- {
- LOG.warn( "couldn't delete the lock file {}", LOCK_FILE_NAME );
- }
- }
-
- LOG.debug( "+++ DirectoryService stopped" );
- started = false;
- }
-
-
- /**
- * @return The referral manager
- */
- public ReferralManager getReferralManager()
- {
- return referralManager;
- }
-
-
- /**
- * Set the referralManager
- * @param referralManager The initialized referralManager
- */
- public void setReferralManager( ReferralManager referralManager )
- {
- this.referralManager = referralManager;
- }
-
-
- /**
- * @return the SchemaManager
- */
- public SchemaManager getSchemaManager()
- {
- return schemaManager;
- }
-
-
- /**
- * Set the SchemaManager instance.
- *
- * @param schemaManager The server schemaManager
- */
- public void setSchemaManager( SchemaManager schemaManager )
- {
- this.schemaManager = schemaManager;
- }
-
-
- public LdapApiService getLdapCodecService()
- {
- return ldapCodecService;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public SchemaPartition getSchemaPartition()
- {
- return schemaPartition;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public void setSchemaPartition( SchemaPartition schemaPartition )
- {
- this.schemaPartition = schemaPartition;
- }
-
-
- public DefaultPartitionNexus getPartitionNexus()
- {
- return partitionNexus;
- }
-
-
- public boolean isFirstStart()
- {
- return firstStart;
- }
-
-
- public synchronized boolean isStarted()
- {
- return started;
- }
-
-
- public Entry newEntry( Dn dn )
- {
- return new DefaultEntry( schemaManager, dn );
- }
-
-
- /**
- * Returns true if we had to create the bootstrap entries on the first
- * start of the server. Otherwise if all entries exist, meaning none
- * had to be created, then we are not starting for the first time.
- *
- * @return true if the bootstrap entries had to be created, false otherwise
- * @throws Exception if entries cannot be created
- */
- private boolean createBootstrapEntries() throws Exception
- {
- boolean firstStart = false;
-
- // -------------------------------------------------------------------
- // create admin entry
- // -------------------------------------------------------------------
-
- /*
- * If the admin entry is there, then the database was already created
- */
- if ( !partitionNexus.hasEntry( new HasEntryOperationContext( adminSession, adminDn ) ) )
- {
- firstStart = true;
-
- Entry serverEntry = new DefaultEntry( schemaManager, adminDn );
-
- serverEntry.put( SchemaConstants.OBJECT_CLASS_AT,
- SchemaConstants.TOP_OC,
- SchemaConstants.PERSON_OC,
- SchemaConstants.ORGANIZATIONAL_PERSON_OC,
- SchemaConstants.INET_ORG_PERSON_OC );
-
- serverEntry.put( SchemaConstants.UID_AT, PartitionNexus.ADMIN_UID );
- serverEntry.put( SchemaConstants.USER_PASSWORD_AT, PartitionNexus.ADMIN_PASSWORD_BYTES );
- serverEntry.put( SchemaConstants.DISPLAY_NAME_AT, "Directory Superuser" );
- serverEntry.put( SchemaConstants.CN_AT, "system administrator" );
- serverEntry.put( SchemaConstants.SN_AT, "administrator" );
- serverEntry.put( SchemaConstants.CREATORS_NAME_AT, ServerDNConstants.ADMIN_SYSTEM_DN_NORMALIZED );
- serverEntry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
- serverEntry.put( SchemaConstants.DISPLAY_NAME_AT, "Directory Superuser" );
- serverEntry.add( SchemaConstants.ENTRY_CSN_AT, getCSN().toString() );
- serverEntry.add( SchemaConstants.ENTRY_UUID_AT, UUID.randomUUID().toString() );
-
- TlsKeyGenerator.addKeyPair( serverEntry );
- partitionNexus.add( new AddOperationContext( adminSession, serverEntry ) );
- }
-
- // -------------------------------------------------------------------
- // create system users area
- // -------------------------------------------------------------------
-
- Dn userDn = getDnFactory().create( ServerDNConstants.USERS_SYSTEM_DN );
-
- if ( !partitionNexus.hasEntry( new HasEntryOperationContext( adminSession, userDn ) ) )
- {
- firstStart = true;
-
- Entry serverEntry = new DefaultEntry( schemaManager, userDn );
-
- serverEntry.put( SchemaConstants.OBJECT_CLASS_AT,
- SchemaConstants.TOP_OC,
- SchemaConstants.ORGANIZATIONAL_UNIT_OC );
-
- serverEntry.put( SchemaConstants.OU_AT, "users" );
- serverEntry.put( SchemaConstants.CREATORS_NAME_AT, ServerDNConstants.ADMIN_SYSTEM_DN_NORMALIZED );
- serverEntry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
- serverEntry.add( SchemaConstants.ENTRY_CSN_AT, getCSN().toString() );
- serverEntry.add( SchemaConstants.ENTRY_UUID_AT, UUID.randomUUID().toString() );
-
- partitionNexus.add( new AddOperationContext( adminSession, serverEntry ) );
- }
-
- // -------------------------------------------------------------------
- // create system groups area
- // -------------------------------------------------------------------
-
- Dn groupDn = getDnFactory().create( ServerDNConstants.GROUPS_SYSTEM_DN );
-
- if ( !partitionNexus.hasEntry( new HasEntryOperationContext( adminSession, groupDn ) ) )
- {
- firstStart = true;
-
- Entry serverEntry = new DefaultEntry( schemaManager, groupDn );
-
- serverEntry.put( SchemaConstants.OBJECT_CLASS_AT,
- SchemaConstants.TOP_OC,
- SchemaConstants.ORGANIZATIONAL_UNIT_OC );
-
- serverEntry.put( SchemaConstants.OU_AT, "groups" );
- serverEntry.put( SchemaConstants.CREATORS_NAME_AT, ServerDNConstants.ADMIN_SYSTEM_DN_NORMALIZED );
- serverEntry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
- serverEntry.add( SchemaConstants.ENTRY_CSN_AT, getCSN().toString() );
- serverEntry.add( SchemaConstants.ENTRY_UUID_AT, UUID.randomUUID().toString() );
-
- partitionNexus.add( new AddOperationContext( adminSession, serverEntry ) );
- }
-
- // -------------------------------------------------------------------
- // create administrator group
- // -------------------------------------------------------------------
-
- Dn name = getDnFactory().create( ServerDNConstants.ADMINISTRATORS_GROUP_DN );
-
- if ( !partitionNexus.hasEntry( new HasEntryOperationContext( adminSession, name ) ) )
- {
- firstStart = true;
-
- Entry serverEntry = new DefaultEntry( schemaManager, name );
-
- serverEntry.put( SchemaConstants.OBJECT_CLASS_AT,
- SchemaConstants.TOP_OC,
- SchemaConstants.GROUP_OF_UNIQUE_NAMES_OC );
-
- serverEntry.put( SchemaConstants.CN_AT, "Administrators" );
- serverEntry.put( SchemaConstants.UNIQUE_MEMBER_AT, ServerDNConstants.ADMIN_SYSTEM_DN_NORMALIZED );
- serverEntry.put( SchemaConstants.CREATORS_NAME_AT, ServerDNConstants.ADMIN_SYSTEM_DN_NORMALIZED );
- serverEntry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
- serverEntry.add( SchemaConstants.ENTRY_CSN_AT, getCSN().toString() );
- serverEntry.add( SchemaConstants.ENTRY_UUID_AT, UUID.randomUUID().toString() );
-
- partitionNexus.add( new AddOperationContext( adminSession, serverEntry ) );
- }
-
- // -------------------------------------------------------------------
- // create system configuration area
- // -------------------------------------------------------------------
-
- Dn configurationDn = getDnFactory().create( "ou=configuration,ou=system" );
-
- if ( !partitionNexus.hasEntry( new HasEntryOperationContext( adminSession, configurationDn ) ) )
- {
- firstStart = true;
-
- Entry serverEntry = new DefaultEntry( schemaManager, configurationDn );
- serverEntry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC,
- SchemaConstants.ORGANIZATIONAL_UNIT_OC );
-
- serverEntry.put( SchemaConstants.OU_AT, "configuration" );
- serverEntry.put( SchemaConstants.CREATORS_NAME_AT, ServerDNConstants.ADMIN_SYSTEM_DN_NORMALIZED );
- serverEntry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
- serverEntry.add( SchemaConstants.ENTRY_CSN_AT, getCSN().toString() );
- serverEntry.add( SchemaConstants.ENTRY_UUID_AT, UUID.randomUUID().toString() );
-
- partitionNexus.add( new AddOperationContext( adminSession, serverEntry ) );
- }
-
- // -------------------------------------------------------------------
- // create system configuration area for partition information
- // -------------------------------------------------------------------
-
- Dn partitionsDn = getDnFactory().create( "ou=partitions,ou=configuration,ou=system" );
-
- if ( !partitionNexus.hasEntry( new HasEntryOperationContext( adminSession, partitionsDn ) ) )
- {
- firstStart = true;
-
- Entry serverEntry = new DefaultEntry( schemaManager, partitionsDn );
- serverEntry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC,
- SchemaConstants.ORGANIZATIONAL_UNIT_OC );
- serverEntry.put( SchemaConstants.OU_AT, "partitions" );
- serverEntry.put( SchemaConstants.CREATORS_NAME_AT, ServerDNConstants.ADMIN_SYSTEM_DN_NORMALIZED );
- serverEntry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
- serverEntry.add( SchemaConstants.ENTRY_CSN_AT, getCSN().toString() );
- serverEntry.add( SchemaConstants.ENTRY_UUID_AT, UUID.randomUUID().toString() );
-
- partitionNexus.add( new AddOperationContext( adminSession, serverEntry ) );
- }
-
- // -------------------------------------------------------------------
- // create system configuration area for services
- // -------------------------------------------------------------------
-
- Dn servicesDn = getDnFactory().create( "ou=services,ou=configuration,ou=system" );
-
- if ( !partitionNexus.hasEntry( new HasEntryOperationContext( adminSession, servicesDn ) ) )
- {
- firstStart = true;
-
- Entry serverEntry = new DefaultEntry( schemaManager, servicesDn );
- serverEntry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC,
- SchemaConstants.ORGANIZATIONAL_UNIT_OC );
-
- serverEntry.put( SchemaConstants.OU_AT, "services" );
- serverEntry.put( SchemaConstants.CREATORS_NAME_AT, ServerDNConstants.ADMIN_SYSTEM_DN_NORMALIZED );
- serverEntry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
- serverEntry.add( SchemaConstants.ENTRY_CSN_AT, getCSN().toString() );
- serverEntry.add( SchemaConstants.ENTRY_UUID_AT, UUID.randomUUID().toString() );
-
- partitionNexus.add( new AddOperationContext( adminSession, serverEntry ) );
- }
-
- // -------------------------------------------------------------------
- // create system configuration area for interceptors
- // -------------------------------------------------------------------
-
- Dn interceptorsDn = getDnFactory().create( "ou=interceptors,ou=configuration,ou=system" );
-
- if ( !partitionNexus.hasEntry( new HasEntryOperationContext( adminSession, interceptorsDn ) ) )
- {
- firstStart = true;
-
- Entry serverEntry = new DefaultEntry( schemaManager, interceptorsDn );
- serverEntry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC,
- SchemaConstants.ORGANIZATIONAL_UNIT_OC );
-
- serverEntry.put( SchemaConstants.OU_AT, "interceptors" );
- serverEntry.put( SchemaConstants.CREATORS_NAME_AT, ServerDNConstants.ADMIN_SYSTEM_DN_NORMALIZED );
- serverEntry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
- serverEntry.add( SchemaConstants.ENTRY_CSN_AT, getCSN().toString() );
- serverEntry.add( SchemaConstants.ENTRY_UUID_AT, UUID.randomUUID().toString() );
-
- partitionNexus.add( new AddOperationContext( adminSession, serverEntry ) );
- }
-
- // -------------------------------------------------------------------
- // create system preferences area
- // -------------------------------------------------------------------
-
- Dn sysPrefRootDn = getDnFactory().create( ServerDNConstants.SYSPREFROOT_SYSTEM_DN );
-
- if ( !partitionNexus.hasEntry( new HasEntryOperationContext( adminSession, sysPrefRootDn ) ) )
- {
- firstStart = true;
-
- Entry serverEntry = new DefaultEntry( schemaManager, sysPrefRootDn );
- serverEntry.put( SchemaConstants.OBJECT_CLASS_AT,
- SchemaConstants.TOP_OC,
- SchemaConstants.ORGANIZATIONAL_UNIT_OC,
- SchemaConstants.EXTENSIBLE_OBJECT_OC );
-
- serverEntry.put( "prefNodeName", "sysPrefRoot" );
- serverEntry.put( SchemaConstants.CREATORS_NAME_AT, ServerDNConstants.ADMIN_SYSTEM_DN_NORMALIZED );
- serverEntry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
- serverEntry.add( SchemaConstants.ENTRY_CSN_AT, getCSN().toString() );
- serverEntry.add( SchemaConstants.ENTRY_UUID_AT, UUID.randomUUID().toString() );
-
- partitionNexus.add( new AddOperationContext( adminSession, serverEntry ) );
- }
-
- return firstStart;
- }
-
-
- /**
- * Displays security warning messages if any possible secutiry issue is found.
- * @throws Exception if there are failures parsing and accessing internal structures
- */
- protected void showSecurityWarnings() throws Exception
- {
- // Warn if the default password is not changed.
- boolean needToChangeAdminPassword = false;
-
- Dn adminDn = getDnFactory().create( ServerDNConstants.ADMIN_SYSTEM_DN );
-
- Entry adminEntry = partitionNexus.lookup( new LookupOperationContext( adminSession, adminDn ) );
- Value<?> userPassword = adminEntry.get( SchemaConstants.USER_PASSWORD_AT ).get();
- needToChangeAdminPassword = Arrays.equals( PartitionNexus.ADMIN_PASSWORD_BYTES, userPassword.getBytes() );
-
- if ( needToChangeAdminPassword )
- {
- LOG.warn( "You didn't change the admin password of directory service " + "instance '" + instanceId + "'. "
- + "Please update the admin password as soon as possible " + "to prevent a possible security breach." );
- }
- }
-
-
- /**
- * Adds test entries into the core.
- *
- * @todo this may no longer be needed when JNDI is not used for bootstrapping
- *
- * @throws Exception if the creation of test entries fails.
- */
- private void createTestEntries() throws Exception
- {
- for ( LdifEntry testEntry : testEntries )
- {
- try
- {
- LdifEntry ldifEntry = testEntry.clone();
- Entry entry = ldifEntry.getEntry();
- String dn = ldifEntry.getDn().getName();
-
- try
- {
- getAdminSession().add( new DefaultEntry( schemaManager, entry ) );
- }
- catch ( Exception e )
- {
- LOG.warn( dn + " test entry already exists.", e );
- }
- }
- catch ( CloneNotSupportedException cnse )
- {
- LOG.warn( "Cannot clone the entry ", cnse );
- }
- }
- }
-
-
- private void initializeSystemPartition() throws Exception
- {
- Partition system = getSystemPartition();
-
- // Add root context entry for system partition
- Dn systemSuffixDn = getDnFactory().create( ServerDNConstants.SYSTEM_DN );
- CoreSession adminSession = getAdminSession();
-
- if ( !system.hasEntry( new HasEntryOperationContext( adminSession, systemSuffixDn ) ) )
- {
- Entry systemEntry = new DefaultEntry( schemaManager, systemSuffixDn );
-
- // Add the ObjectClasses
- systemEntry.put( SchemaConstants.OBJECT_CLASS_AT, SchemaConstants.TOP_OC,
- SchemaConstants.ORGANIZATIONAL_UNIT_OC, SchemaConstants.EXTENSIBLE_OBJECT_OC );
-
- // Add some operational attributes
- systemEntry.put( SchemaConstants.CREATORS_NAME_AT, ServerDNConstants.ADMIN_SYSTEM_DN );
- systemEntry.put( SchemaConstants.CREATE_TIMESTAMP_AT, DateUtils.getGeneralizedTime() );
- systemEntry.add( SchemaConstants.ENTRY_CSN_AT, getCSN().toString() );
- systemEntry.add( SchemaConstants.ENTRY_UUID_AT, UUID.randomUUID().toString() );
- systemEntry.put( DnUtils.getRdnAttributeType( ServerDNConstants.SYSTEM_DN ), DnUtils
- .getRdnValue( ServerDNConstants.SYSTEM_DN ) );
-
- AddOperationContext addOperationContext = new AddOperationContext( adminSession, systemEntry );
- system.add( addOperationContext );
- }
- }
-
-
- /**
- * Kicks off the initialization of the entire system.
- *
- * @throws Exception if there are problems along the way
- */
- private void initialize() throws Exception
- {
- if ( LOG.isDebugEnabled() )
- {
- LOG.debug( "---> Initializing the DefaultDirectoryService " );
- }
-
- csnFactory.setReplicaId( replicaId );
-
- // If no interceptor list is defined, setup a default list
- if ( interceptors == null )
- {
- setDefaultInterceptorConfigurations();
- }
-
- if ( cacheService == null )
- {
- // Initialize a default cache service
- cacheService = new CacheService();
- }
-
- cacheService.initialize( instanceLayout );
-
- // Initialize the AP caches
- accessControlAPCache = new DnNode<AccessControlAdministrativePoint>();
- collectiveAttributeAPCache = new DnNode<CollectiveAttributeAdministrativePoint>();
- subschemaAPCache = new DnNode<SubschemaAdministrativePoint>();
- triggerExecutionAPCache = new DnNode<TriggerExecutionAdministrativePoint>();
-
- if ( dnFactory == null )
- {
- dnFactory = new DefaultDnFactory( schemaManager, cacheService.getCache( "dnCache" ) );
- }
-
- // triggers partition to load schema fully from schema partition
- schemaPartition.setCacheService( cacheService );
- schemaPartition.initialize();
- partitions.add( schemaPartition );
- systemPartition.setCacheService( cacheService );
- systemPartition.getSuffixDn().apply( schemaManager );
-
- adminDn = getDnFactory().create( ServerDNConstants.ADMIN_SYSTEM_DN );
- adminSession = new DefaultCoreSession( new LdapPrincipal( schemaManager, adminDn, AuthenticationLevel.STRONG ),
- this );
-
- // @TODO - NOTE: Need to find a way to instantiate without dependency on DPN
- partitionNexus = new DefaultPartitionNexus( new DefaultEntry( schemaManager, Dn.ROOT_DSE ) );
- partitionNexus.setDirectoryService( this );
- partitionNexus.initialize();
-
- initializeSystemPartition();
-
- // --------------------------------------------------------------------
- // Create all the bootstrap entries before initializing chain
- // --------------------------------------------------------------------
-
- firstStart = createBootstrapEntries();
-
- // Initialize the interceptors
- initInterceptors();
-
- // --------------------------------------------------------------------
- // Initialize the changeLog if it's enabled
- // --------------------------------------------------------------------
-
- if ( changeLog.isEnabled() )
- {
- changeLog.init( this );
-
- if ( changeLog.isExposed() && changeLog.isTagSearchSupported() )
- {
- String clSuffix = ( ( TaggableSearchableChangeLogStore ) changeLog.getChangeLogStore() ).getPartition()
- .getSuffixDn().getName();
- partitionNexus.getRootDse( null ).add( SchemaConstants.CHANGELOG_CONTEXT_AT, clSuffix );
- }
- }
-
- // --------------------------------------------------------------------
- // Initialize the journal if it's enabled
- // --------------------------------------------------------------------
- if ( journal.isEnabled() )
- {
- journal.init( this );
- }
-
- if ( LOG.isDebugEnabled() )
- {
- LOG.debug( "<--- DefaultDirectoryService initialized" );
- }
- }
-
-
- /**
- * Read an entry (without Dn)
- *
- * @param text The ldif format file
- * @return An entry.
- */
- // This will suppress PMD.EmptyCatchBlock warnings in this method
- @SuppressWarnings("PMD.EmptyCatchBlock")
- private Entry readEntry( String text )
- {
- StringReader strIn = new StringReader( text );
- BufferedReader in = new BufferedReader( strIn );
-
- String line = null;
- Entry entry = new DefaultEntry();
-
- try
- {
- while ( ( line = in.readLine() ) != null )
- {
- if ( line.length() == 0 )
- {
- continue;
- }
-
- String addedLine = line.trim();
-
- if ( Strings.isEmpty( addedLine ) )
- {
- continue;
- }
-
- Attribute attribute = LdifReader.parseAttributeValue( addedLine );
- Attribute oldAttribute = entry.get( attribute.getId() );
-
- if ( oldAttribute != null )
- {
- try
- {
- oldAttribute.add( attribute.get() );
- entry.put( oldAttribute );
- }
- catch ( LdapException ne )
- {
- // Do nothing
- }
- }
- else
- {
- try
- {
- entry.put( attribute );
- }
- catch ( LdapException ne )
- {
- // TODO do nothing ...
- }
- }
- }
- }
- catch ( IOException ioe )
- {
- // Do nothing : we can't reach this point !
- }
-
- return entry;
- }
-
-
- /**
- * Create a new Entry
- *
- * @param ldif The String representing the attributes, as a LDIF file
- * @param dn The Dn for this new entry
- */
- public Entry newEntry( String ldif, String dn )
- {
- try
- {
- Entry entry = readEntry( ldif );
- Dn newDn = getDnFactory().create( dn );
-
- entry.setDn( newDn );
-
- // TODO Let's get rid of this Attributes crap
- Entry serverEntry = new DefaultEntry( schemaManager, entry );
- return serverEntry;
- }
- catch ( Exception e )
- {
- LOG.error( I18n.err( I18n.ERR_78, ldif, dn ) );
- // do nothing
- return null;
- }
- }
-
-
- public EventService getEventService()
- {
- return eventService;
- }
-
-
- public void setEventService( EventService eventService )
- {
- this.eventService = eventService;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public boolean isPasswordHidden()
- {
- return passwordHidden;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public void setPasswordHidden( boolean passwordHidden )
- {
- this.passwordHidden = passwordHidden;
- }
-
-
- /**
- * @return The maximum allowed size for an incoming PDU
- */
- public int getMaxPDUSize()
- {
- return maxPDUSize;
- }
-
-
- /**
- * Set the maximum allowed size for an incoming PDU
- * @param maxPDUSize A positive number of bytes for the PDU. A negative or
- * null value will be transformed to {@link Integer#MAX_VALUE}
- */
- public void setMaxPDUSize( int maxPDUSize )
- {
- if ( maxPDUSize <= 0 )
- {
- maxPDUSize = Integer.MAX_VALUE;
- }
-
- this.maxPDUSize = maxPDUSize;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public Interceptor getInterceptor( String interceptorName )
- {
- readLock.lock();
-
- try
- {
- return interceptorNames.get( interceptorName );
- }
- finally
- {
- readLock.unlock();
- }
- }
-
-
- /**
- * {@inheritDoc}
- * @throws LdapException
- */
- public void addFirst( Interceptor interceptor ) throws LdapException
- {
- addInterceptor( interceptor, 0 );
- }
-
-
- /**
- * {@inheritDoc}
- * @throws LdapException
- */
- public void addLast( Interceptor interceptor ) throws LdapException
- {
- addInterceptor( interceptor, -1 );
- }
-
-
- /**
- * {@inheritDoc}
- */
- public void addAfter( String interceptorName, Interceptor interceptor )
- {
- writeLock.lock();
-
- try
- {
- int position = 0;
-
- // Find the position
- for ( Interceptor inter : interceptors )
- {
- if ( interceptorName.equals( inter.getName() ) )
- {
- break;
- }
-
- position++;
- }
-
- if ( position == interceptors.size() )
- {
- interceptors.add( interceptor );
- }
- else
- {
- interceptors.add( position, interceptor );
- }
- }
- finally
- {
- writeLock.unlock();
- }
- }
-
-
- /**
- * {@inheritDoc}
- */
- public void remove( String interceptorName )
- {
- removeOperationsList( interceptorName );
- }
-
-
- /**
- * Get a new CSN
- * @return The CSN generated for this directory service
- */
- public Csn getCSN()
- {
- return csnFactory.newInstance();
- }
-
-
- /**
- * @return the replicaId
- */
- public int getReplicaId()
- {
- return replicaId;
- }
-
-
- /**
- * @param replicaId the replicaId to set
- */
- public void setReplicaId( int replicaId )
- {
- if ( ( replicaId < 0 ) || ( replicaId > 999 ) )
- {
- LOG.error( I18n.err( I18n.ERR_79 ) );
- this.replicaId = 0;
- }
- else
- {
- this.replicaId = replicaId;
- }
- }
-
-
- /**
- * {@inheritDoc}
- */
- public long getSyncPeriodMillis()
- {
- return syncPeriodMillis;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public void setSyncPeriodMillis( long syncPeriodMillis )
- {
- this.syncPeriodMillis = syncPeriodMillis;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public String getContextCsn()
- {
- return contextCsn;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public void setContextCsn( String lastKnownCsn )
- {
- this.contextCsn = lastKnownCsn;
- }
-
-
- /**
- * checks if the working directory is already in use by some other directory service, if yes
- * then throws a runtime exception else will obtain the lock on the working directory
- */
- private void lockWorkDir()
- {
- FileLock fileLock = null;
-
- try
- {
- lockFile = new RandomAccessFile( new File( instanceLayout.getInstanceDirectory(), LOCK_FILE_NAME ), "rw" );
- try
- {
- fileLock = lockFile.getChannel().tryLock( 0, 1, false );
- }
- catch ( IOException e )
- {
- // shoudn't happen, but log
- LOG.error( "failed to lock the work directory", e );
- }
- catch ( OverlappingFileLockException e ) // thrown if we can't get a lock
- {
- fileLock = null;
- }
- }
- catch ( FileNotFoundException e )
- {
- // shouldn't happen, but log anyway
- LOG.error( "failed to lock the work directory", e );
- }
-
- if ( ( fileLock == null ) || ( !fileLock.isValid() ) )
- {
- String message = "the working directory " + instanceLayout.getRunDirectory()
- + " has been locked by another directory service.";
- LOG.error( message );
- throw new RuntimeException( message );
- }
-
- }
-
-
- /**
- * {@inheritDoc}
- */
- public CacheService getCacheService()
- {
- return cacheService;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public DnNode<AccessControlAdministrativePoint> getAccessControlAPCache()
- {
- return accessControlAPCache;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public DnNode<CollectiveAttributeAdministrativePoint> getCollectiveAttributeAPCache()
- {
- return collectiveAttributeAPCache;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public DnNode<SubschemaAdministrativePoint> getSubschemaAPCache()
- {
- return subschemaAPCache;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public DnNode<TriggerExecutionAdministrativePoint> getTriggerExecutionAPCache()
- {
- return triggerExecutionAPCache;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public boolean isPwdPolicyEnabled()
- {
- AuthenticationInterceptor authenticationInterceptor = (AuthenticationInterceptor) getInterceptor( InterceptorEnum.AUTHENTICATION_INTERCEPTOR
- .getName() );
-
- if ( authenticationInterceptor == null )
- {
- return false;
- }
-
- PpolicyConfigContainer pwdPolicyContainer = authenticationInterceptor.getPwdPolicyContainer();
-
- return ( ( pwdPolicyContainer != null )
- && ( ( pwdPolicyContainer.getDefaultPolicy() != null )
- || ( pwdPolicyContainer.hasCustomConfigs() ) ) );
- }
-
-
- /**
- * {@inheritDoc}
- */
- public DnFactory getDnFactory()
- {
- return dnFactory;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public void setDnFactory( DnFactory dnFactory )
- {
- this.dnFactory = dnFactory;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public SubentryCache getSubentryCache()
- {
- return subentryCache;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public SubtreeEvaluator getEvaluator()
- {
- return evaluator;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public void setCacheService( CacheService cacheService )
- {
- this.cacheService = cacheService;
- }
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/41952dd3/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/BaseDirectoryServiceFactory.java
----------------------------------------------------------------------
diff --git a/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/BaseDirectoryServiceFactory.java b/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/BaseDirectoryServiceFactory.java
deleted file mode 100644
index 2d2bc1f..0000000
--- a/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/BaseDirectoryServiceFactory.java
+++ /dev/null
@@ -1,290 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hadoop.gateway.security.ldap;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.directory.api.ldap.model.constants.SchemaConstants;
-import org.apache.directory.api.ldap.model.schema.LdapComparator;
-import org.apache.directory.api.ldap.model.schema.SchemaManager;
-import org.apache.directory.api.ldap.model.schema.comparators.NormalizingComparator;
-import org.apache.directory.api.ldap.model.schema.registries.ComparatorRegistry;
-import org.apache.directory.api.ldap.model.schema.registries.SchemaLoader;
-import org.apache.directory.api.ldap.schemaextractor.SchemaLdifExtractor;
-import org.apache.directory.api.ldap.schemaextractor.impl.DefaultSchemaLdifExtractor;
-import org.apache.directory.api.ldap.schemaloader.LdifSchemaLoader;
-import org.apache.directory.api.ldap.schemamanager.impl.DefaultSchemaManager;
-import org.apache.directory.api.util.exception.Exceptions;
-import org.apache.directory.server.constants.ServerDNConstants;
-import org.apache.directory.server.core.DefaultDirectoryService;
-import org.apache.directory.server.core.api.CacheService;
-import org.apache.directory.server.core.api.DirectoryService;
-import org.apache.directory.server.core.api.InstanceLayout;
-import org.apache.directory.server.core.api.partition.Partition;
-import org.apache.directory.server.core.api.schema.SchemaPartition;
-import org.apache.directory.server.core.factory.DefaultDirectoryServiceFactory;
-import org.apache.directory.server.core.factory.DirectoryServiceFactory;
-import org.apache.directory.server.core.factory.JdbmPartitionFactory;
-import org.apache.directory.server.core.factory.PartitionFactory;
-import org.apache.directory.server.core.partition.ldif.LdifPartition;
-import org.apache.directory.server.i18n.I18n;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-
-
-/**
- * A Default factory for DirectoryService.
- * This is a copy of org.apache.directory.server.core.factory.DefaultDirectoryServiceFactory
- * created to control how the DirectoryService is created. This can be removed
- * when http://svn.apache.org/r1546144 in ApacheDS 2.0.0-M16 is available.
- *
- * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
- */
-public class BaseDirectoryServiceFactory implements DirectoryServiceFactory
-{
- /** A logger for this class */
- private static final Logger LOG = LoggerFactory.getLogger( DefaultDirectoryServiceFactory.class );
-
- /** The directory service. */
- private DirectoryService directoryService;
-
- /** The partition factory. */
- private PartitionFactory partitionFactory;
-
-
- public BaseDirectoryServiceFactory()
- {
- directoryService = createDirectoryService();
- partitionFactory = createPartitionFactory();
- }
-
- protected DirectoryService createDirectoryService() {
- DirectoryService result;
- try
- {
- // Creating the instance here so that
- // we we can set some properties like accesscontrol, anon access
- // before starting up the service
- result = new DefaultDirectoryService();
-
- // No need to register a shutdown hook during tests because this
- // starts a lot of threads and slows down test execution
- result.setShutdownHookEnabled( false );
- }
- catch ( Exception e )
- {
- throw new RuntimeException( e );
- }
- return result;
- }
-
- protected PartitionFactory createPartitionFactory() {
- PartitionFactory result;
- try
- {
- String typeName = System.getProperty( "apacheds.partition.factory" );
- if ( typeName != null )
- {
- Class<? extends PartitionFactory> type = ( Class<? extends PartitionFactory> ) Class.forName( typeName );
- result = type.newInstance();
- }
- else
- {
- result = new JdbmPartitionFactory();
- }
- }
- catch ( Exception e )
- {
- LOG.error( "Error instantiating custom partition factory", e );
- throw new RuntimeException( e );
- }
- return result;
- }
-
- /**
- * {@inheritDoc}
- */
- public void init( String name ) throws Exception
- {
- if ( ( directoryService != null ) && directoryService.isStarted() )
- {
- return;
- }
-
- build( name );
- }
-
-
- /**
- * Build the working directory
- */
- private void buildInstanceDirectory( String name ) throws IOException
- {
- String instanceDirectory = System.getProperty( "workingDirectory" );
-
- if ( instanceDirectory == null )
- {
- instanceDirectory = System.getProperty( "java.io.tmpdir" ) + "/server-work-" + name;
- }
-
- InstanceLayout instanceLayout = new InstanceLayout( instanceDirectory );
-
- if ( instanceLayout.getInstanceDirectory().exists() )
- {
- try
- {
- FileUtils.deleteDirectory( instanceLayout.getInstanceDirectory() );
- }
- catch ( IOException e )
- {
- LOG.warn( "couldn't delete the instance directory before initializing the DirectoryService", e );
- }
- }
-
- directoryService.setInstanceLayout( instanceLayout );
- }
-
-
- /**
- * Inits the schema and schema partition.
- */
- private void initSchema() throws Exception
- {
- File workingDirectory = directoryService.getInstanceLayout().getPartitionsDirectory();
-
- // Extract the schema on disk (a brand new one) and load the registries
- File schemaRepository = new File( workingDirectory, "schema" );
- SchemaLdifExtractor extractor = new DefaultSchemaLdifExtractor( workingDirectory );
-
- try
- {
- extractor.extractOrCopy();
- }
- catch ( IOException ioe )
- {
- // The schema has already been extracted, bypass
- }
-
- SchemaLoader loader = new LdifSchemaLoader( schemaRepository );
- SchemaManager schemaManager = new DefaultSchemaManager( loader );
-
- // We have to load the schema now, otherwise we won't be able
- // to initialize the Partitions, as we won't be able to parse
- // and normalize their suffix Dn
- schemaManager.loadAllEnabled();
-
- // Tell all the normalizer comparators that they should not normalize anything
- ComparatorRegistry comparatorRegistry = schemaManager.getComparatorRegistry();
-
- for ( LdapComparator<?> comparator : comparatorRegistry )
- {
- if ( comparator instanceof NormalizingComparator )
- {
- ( ( NormalizingComparator ) comparator ).setOnServer();
- }
- }
-
- directoryService.setSchemaManager( schemaManager );
-
- // Init the LdifPartition
- LdifPartition ldifPartition = new LdifPartition( schemaManager, directoryService.getDnFactory() );
- ldifPartition.setPartitionPath( new File( workingDirectory, "schema" ).toURI() );
- SchemaPartition schemaPartition = new SchemaPartition( schemaManager );
- schemaPartition.setWrappedPartition( ldifPartition );
- directoryService.setSchemaPartition( schemaPartition );
-
- List<Throwable> errors = schemaManager.getErrors();
-
- if ( errors.size() != 0 )
- {
- throw new Exception( I18n.err( I18n.ERR_317, Exceptions.printErrors( errors ) ) );
- }
- }
-
-
- /**
- * Inits the system partition.
- *
- * @throws Exception the exception
- */
- private void initSystemPartition() throws Exception
- {
- // change the working directory to something that is unique
- // on the system and somewhere either under target directory
- // or somewhere in a temp area of the machine.
-
- // Inject the System Partition
- Partition systemPartition = partitionFactory.createPartition(
- directoryService.getSchemaManager(),
- directoryService.getDnFactory(),
- "system",
- ServerDNConstants.SYSTEM_DN,
- 500,
- new File( directoryService.getInstanceLayout().getPartitionsDirectory(), "system" ) );
- systemPartition.setSchemaManager( directoryService.getSchemaManager() );
-
- partitionFactory.addIndex( systemPartition, SchemaConstants.OBJECT_CLASS_AT, 100 );
-
- directoryService.setSystemPartition( systemPartition );
- }
-
-
- /**
- * Builds the directory server instance.
- *
- * @param name the instance name
- */
- private void build( String name ) throws Exception
- {
- directoryService.setInstanceId( name );
- buildInstanceDirectory( name );
-
- CacheService cacheService = new CacheService();
- cacheService.initialize( directoryService.getInstanceLayout() );
-
- directoryService.setCacheService( cacheService );
-
- // Init the service now
- initSchema();
- initSystemPartition();
-
- directoryService.startup();
- }
-
-
- /**
- * {@inheritDoc}
- */
- public DirectoryService getDirectoryService() throws Exception
- {
- return directoryService;
- }
-
-
- /**
- * {@inheritDoc}
- */
- public PartitionFactory getPartitionFactory() throws Exception
- {
- return partitionFactory;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/41952dd3/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/SimpleDirectoryService.java
----------------------------------------------------------------------
diff --git a/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/SimpleDirectoryService.java b/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/SimpleDirectoryService.java
index 68fa1da..26c4937 100644
--- a/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/SimpleDirectoryService.java
+++ b/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/SimpleDirectoryService.java
@@ -17,11 +17,15 @@
*/
package org.apache.hadoop.gateway.security.ldap;
-public class SimpleDirectoryService extends BaseDirectoryService {
+import org.apache.directory.server.core.DefaultDirectoryService;
+
+public class SimpleDirectoryService extends DefaultDirectoryService {
public SimpleDirectoryService() throws Exception {
+ super();
}
+ @Override
protected void showSecurityWarnings() throws Exception {
// NoOp - This prevents confusing warnings from being output.
}
http://git-wip-us.apache.org/repos/asf/knox/blob/41952dd3/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/SimpleDirectoryServiceFactory.java
----------------------------------------------------------------------
diff --git a/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/SimpleDirectoryServiceFactory.java b/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/SimpleDirectoryServiceFactory.java
deleted file mode 100644
index 72a05ff..0000000
--- a/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/SimpleDirectoryServiceFactory.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.security.ldap;
-
-import org.apache.directory.server.core.api.DirectoryService;
-
-public class SimpleDirectoryServiceFactory extends BaseDirectoryServiceFactory {
-
- protected DirectoryService createDirectoryService() {
- DirectoryService result;
- try {
- result = new SimpleDirectoryService();
- } catch( Exception e ) {
- throw new RuntimeException( e );
- }
- return result;
- }
-
-}
[03/25] knox git commit: KNOX-1049 - marshaling changes
Posted by mo...@apache.org.
KNOX-1049 - marshaling changes
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/3346d99e
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/3346d99e
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/3346d99e
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 3346d99e361e1dab74fc8774d3089f9049529941
Parents: 222385b
Author: Larry McCay <lm...@hortonworks.com>
Authored: Wed Oct 25 13:15:38 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Wed Oct 25 13:15:38 2017 -0400
----------------------------------------------------------------------
.../xml/KnoxFormatXmlTopologyRules.java | 2 +-
.../src/main/resources/conf/topology-v1.xsd | 2 +-
.../services/ambariui/2.2.1/rewrite.xml | 104 +++++++++++++++++++
.../services/ambariui/2.2.1/service.xml | 92 ++++++++++++++++
.../gateway/topology/topology_binding-xml.xml | 4 +-
5 files changed, 200 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/3346d99e/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
index e221507..cb30769 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
@@ -32,7 +32,7 @@ public class KnoxFormatXmlTopologyRules extends AbstractRulesModule {
private static final String ROOT_TAG = "topology";
private static final String NAME_TAG = "name";
private static final String VERSION_TAG = "version";
- private static final String DEFAULT_SERVICE_TAG = "default-service";
+ private static final String DEFAULT_SERVICE_TAG = "path";
private static final String APPLICATION_TAG = "application";
private static final String SERVICE_TAG = "service";
private static final String ROLE_TAG = "role";
http://git-wip-us.apache.org/repos/asf/knox/blob/3346d99e/gateway-server/src/main/resources/conf/topology-v1.xsd
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/resources/conf/topology-v1.xsd b/gateway-server/src/main/resources/conf/topology-v1.xsd
index ab07caa..96c9ba2 100644
--- a/gateway-server/src/main/resources/conf/topology-v1.xsd
+++ b/gateway-server/src/main/resources/conf/topology-v1.xsd
@@ -21,7 +21,7 @@ limitations under the License.
<h:sequence maxOccurs="1">
<h:element name="name" minOccurs="0" maxOccurs="1"/>
- <h:element name="default-service" minOccurs="0" maxOccurs="1"/>
+ <h:element name="path" minOccurs="0" maxOccurs="1"/>
<h:element name="gateway" minOccurs="0" maxOccurs="1">
<h:complexType>
http://git-wip-us.apache.org/repos/asf/knox/blob/3346d99e/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/rewrite.xml
----------------------------------------------------------------------
diff --git a/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/rewrite.xml b/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/rewrite.xml
new file mode 100644
index 0000000..0c99d76
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/rewrite.xml
@@ -0,0 +1,104 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<rules>
+ <rule dir="IN" name="AMBARIUI/ambari/inbound/root" pattern="*://*:*/**/ambari/">
+ <rewrite template="{$serviceUrl[AMBARIUI]}/"/>
+ </rule>
+ <rule dir="IN" name="AMBARIUI/ambari/inbound/path" pattern="*://*:*/**/ambari/{**}">
+ <rewrite template="{$serviceUrl[AMBARIUI]}/{**}"/>
+ </rule>
+ <rule dir="IN" name="AMBARIUI/ambari/inbound/query" pattern="*://*:*/**/ambari/{**}?{**}">
+ <rewrite template="{$serviceUrl[AMBARIUI]}/{**}?{**}"/>
+ </rule>
+
+ <rule dir="OUT" name="AMBARIUI/ambari/outbound/sitepath">
+ <rewrite template="{$frontend[path]}/"/>
+ </rule>
+
+ <rule dir="OUT" name="AMBARIUI/ambari/outbound/extrapath">
+ <rewrite template="{$frontend[path]}/api/v1"/>
+ </rule>
+ <rule dir="OUT" name="AMBARIUI/ambari/outbound/logohref">
+ <rewrite template="#/main/dashboard"/>
+ </rule>
+ <rule dir="OUT" name="AMBARIUI/ambari/outbound/img" pattern="/img/{**}">
+ <rewrite template="{$frontend[url]}/img/{**}"/>
+ </rule>
+
+ <rule dir="OUT" name="AMBARIUI/ambari/outbound/css">
+ <rewrite template="{$frontend[path]}/stylesheets/{**}"/>
+ </rule>
+ <rule dir="OUT" name="AMBARIUI/ambari/outbound/js">
+ <rewrite template="{$frontend[path]}/javascripts/{**}"/>
+ </rule>
+
+ <filter name="AMBARIUI/ambari/outbound/proxyroot">
+ <content type="*/x-javascript">
+ <apply path="\{proxy_root\}" rule="AMBARIUI/ambari/outbound/sitepath"/>
+ </content>
+ <content type="application/javascript">
+ <apply path="\{proxy_root\}" rule="AMBARIUI/ambari/outbound/sitepath"/>
+ </content>
+ </filter>
+
+ <!-- filter to rewrite api prefix defined in .js from root -->
+ <!-- e.g. /api/v1 -->
+ <filter name="AMBARIUI/ambari/outbound/apiendpoint">
+ <content type="*/x-javascript">
+ <apply path="/api/v1" rule="AMBARIUI/ambari/outbound/extrapath"/>
+ </content>
+ <content type="application/javascript">
+ <apply path="/api/v1" rule="AMBARIUI/ambari/outbound/extrapath"/>
+ </content>
+ </filter>
+
+ <filter name="AMBARIUI/ambari/outbound/apiendpoint/html">
+ <content type="text/html">
+ <apply path="/api/v1" rule="AMBARIUI/ambari/outbound/extrapath"/>
+ </content>
+ </filter>
+
+ <filter name="AMBARIUI/ambari/outbound/apiendpoint/noroot">
+ <content type="*/x-javascript">
+ <apply path="api/v1" rule="AMBARIUI/ambari/outbound/extrapath"/>
+ </content>
+ <content type="application/javascript">
+ <apply path="api/v1" rule="AMBARIUI/ambari/outbound/extrapath"/>
+ </content>
+ </filter>
+
+ <filter name="AMBARIUI/ambari/outbound/links">
+ <content type="*/x-javascript">
+ <apply path="/api/v1" rule="AMBARIUI/ambari/outbound/extrapath"/>
+ <apply path="\{proxy_root\}" rule="AMBARIUI/ambari/outbound/sitepath"/>
+ <apply path="/#/main/dashboard" rule="AMBARIUI/ambari/outbound/logohref"/>
+ </content>
+ <content type="application/javascript">
+ <apply path="/api/v1" rule="AMBARIUI/ambari/outbound/extrapath"/>
+ <apply path="\{proxy_root\}" rule="AMBARIUI/ambari/outbound/sitepath"/>
+ <apply path="/#/main/dashboard" rule="AMBARIUI/ambari/outbound/logohref"/>
+ </content>
+ <content type="*/html">
+ </content>
+ </filter>
+ <filter name="AMBARIUI/ambari/outbound/mainpage">
+ <content type="*/html">
+ <apply path="stylesheets/{**}.css" rule="AMBARIUI/ambari/outbound/css" />
+ <apply path="javascripts/{**}.js" rule="AMBARIUI/ambari/outbound/js" />
+ </content>
+ </filter>
+</rules>
http://git-wip-us.apache.org/repos/asf/knox/blob/3346d99e/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml
----------------------------------------------------------------------
diff --git a/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml b/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml
new file mode 100644
index 0000000..ab4ab2b
--- /dev/null
+++ b/gateway-service-definitions/src/main/resources/services/ambariui/2.2.1/service.xml
@@ -0,0 +1,92 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="AMBARIUI" name="ambariui" version="2.2.0">
+ <policies>
+ <policy role="webappsec"/>
+ <policy role="authentication" name="Anonymous"/>
+ <policy role="rewrite"/>
+ <policy role="authorization"/>
+ </policies>
+ <routes>
+ <route path="/ambari">
+ <rewrite apply="AMBARIUI/ambari/inbound/root" to="request.url"/>
+ <rewrite apply="AMBARIUI/ambari/outbound/mainpage" to="response.body"/>
+ </route>
+ <route path="/ambari/**">
+ <rewrite apply="AMBARIUI/ambari/inbound/path" to="request.url"/>
+ </route>
+ <route path="/ambari/**?**">
+ <rewrite apply="AMBARIUI/ambari/inbound/query" to="request.url"/>
+ </route>
+ <route path="/ambari/**/app.js">
+ <rewrite apply="AMBARIUI/ambari/outbound/links" to="response.body"/>
+ </route>
+
+ <!-- Admin View route -->
+ <route path="/ambari/views/ADMIN_VIEW/**/INSTANCE/**/main.js">
+ <rewrite apply="AMBARIUI/ambari/outbound/proxyroot" to="response.body"/>
+ </route>
+
+ <!-- Files view -->
+ <route path="/ambari/views/FILES/**/assets/files-view.js">
+ <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint/noroot" to="response.body"/>
+ </route>
+
+ <!-- Capacity Scheduler view -->
+ <route path="/ambari/views/CAPACITY-SCHEDULER/**/javascripts/app.js">
+ <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint/noroot" to="response.body"/>
+ </route>
+
+ <!-- Pig view -->
+ <route path="/ambari/views/PIG/**/javascripts/app.js">
+ <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint/noroot" to="response.body"/>
+ </route>
+
+ <!-- Hive view route -->
+ <route path="/ambari/views/HIVE/**/assets/hive.js">
+ <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint" to="response.body"/>
+ </route>
+
+ <!-- Storm View -->
+ <route path="/ambari/views/Storm_Monitoring/**/scripts/models/*.js">
+ <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint" to="response.body"/>
+ </route>
+
+ <route path="/ambari/views/ZEPPELIN/*/*/">
+ <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint/html" to="response.body"/>
+ </route>
+
+ <!-- Tez View -->
+ <route path="/ambari/views/TEZ/*/*/ambari-scripts/init-view.js">
+ <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint" to="response.body"/>
+ </route>
+
+ <!-- Hive 2.0 view -->
+ <route path="/ambari/views/HIVE/**/assets/ui.js">
+ <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint/noroot" to="response.body"/>
+ </route>
+
+ <!-- SmartSense view -->
+ <route path="/ambari/views/SMARTSENSE/**/assets/hstapp-*.js">
+ <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint" to="response.body"/>
+ </route>
+
+ <!-- No need to rewrite Slider View -->
+ </routes>
+ <dispatch classname="org.apache.hadoop.gateway.dispatch.PassAllHeadersNoEncodingDispatch"/>
+</service>
+
http://git-wip-us.apache.org/repos/asf/knox/blob/3346d99e/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-xml.xml
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-xml.xml b/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-xml.xml
index 8c54ed7..4c89b5d 100644
--- a/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-xml.xml
+++ b/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-xml.xml
@@ -28,7 +28,7 @@ or more contributor license agreements. See the NOTICE file
<xml-root-element/>
<java-attributes>
<xml-element java-attribute="name" name="name"/>
- <xml-element java-attribute="defaultServicePath" name="default-service"/>
+ <xml-element java-attribute="defaultServicePath" name="path"/>
<xml-elements java-attribute="providers">
<xml-element name="provider"/>
<xml-element-wrapper name="gateway"/>
@@ -61,4 +61,4 @@ or more contributor license agreements. See the NOTICE file
</java-attributes>
</java-type>
</java-types>
-</xml-bindings>
\ No newline at end of file
+</xml-bindings>
[08/25] knox git commit: KNOX-1049 - check for null topology to avoid
NPE in tests
Posted by mo...@apache.org.
KNOX-1049 - check for null topology to avoid NPE in tests
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/c211d05e
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/c211d05e
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/c211d05e
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: c211d05e98c82d57eb0af61487a76f0ddd99660d
Parents: 1ee9370
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Oct 26 11:17:30 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Oct 26 11:17:30 2017 -0400
----------------------------------------------------------------------
.../apache/hadoop/gateway/GatewayFilter.java | 41 ++++++++++----------
1 file changed, 21 insertions(+), 20 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/c211d05e/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayFilter.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayFilter.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayFilter.java
index 2885fe3..7617ae8 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayFilter.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayFilter.java
@@ -128,28 +128,29 @@ public class GatewayFilter implements Filter {
// if there was no match then look for a default service for the topology
if (match == null) {
Topology topology = (Topology) servletRequest.getServletContext().getAttribute("org.apache.hadoop.gateway.topology");
- String defaultServicePath = topology.getDefaultServicePath();
- if (defaultServicePath != null) {
- try {
- String newPathWithQuery = defaultServicePath + "/" + pathWithQueryTemplate;
- match = chains.match(Parser.parseLiteral(newPathWithQuery));
- String origUrl = ((HttpServletRequest) servletRequest).getRequestURL().toString();
- String url = origUrl;
- if (path.equals("/")) {
- url += defaultServicePath;
- }
- else {
- int index = origUrl.indexOf(path);
- url = origUrl.substring(0, index) + "/" + defaultServicePath + path;
+ if (topology != null) {
+ String defaultServicePath = topology.getDefaultServicePath();
+ if (defaultServicePath != null) {
+ try {
+ String newPathWithQuery = defaultServicePath + "/" + pathWithQueryTemplate;
+ match = chains.match(Parser.parseLiteral(newPathWithQuery));
+ String origUrl = ((HttpServletRequest) servletRequest).getRequestURL().toString();
+ String url = origUrl;
+ if (path.equals("/")) {
+ url += defaultServicePath;
+ }
+ else {
+ int index = origUrl.indexOf(path);
+ url = origUrl.substring(0, index) + "/" + defaultServicePath + path;
+ }
+ String contextPath = defaultServicePath;
+ servletRequest = new ForwardedRequest((HttpServletRequest) servletRequest,
+ contextPath,
+ url);
+ } catch (URISyntaxException e) {
+ throw new ServletException( e );
}
- String contextPath = defaultServicePath;
- servletRequest = new ForwardedRequest((HttpServletRequest) servletRequest,
- contextPath,
- url);
- } catch (URISyntaxException e) {
- throw new ServletException( e );
}
-// ((HttpServletRequest) servletRequest).getRequestURL();
}
}
[17/25] knox git commit: Merge branch 'master' into
KNOX-998-Package_Restructuring
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/TopologiesResource.java
----------------------------------------------------------------------
diff --cc gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/TopologiesResource.java
index 1861490,0000000..948447b
mode 100644,000000..100644
--- a/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/TopologiesResource.java
+++ b/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/TopologiesResource.java
@@@ -1,312 -1,0 +1,657 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.service.admin;
+
++import com.fasterxml.jackson.annotation.JsonProperty;
++import org.apache.commons.io.FileUtils;
++import org.apache.commons.io.FilenameUtils;
++import org.apache.knox.gateway.i18n.GatewaySpiMessages;
++import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.service.admin.beans.BeanConverter;
+import org.apache.knox.gateway.service.admin.beans.Topology;
+import org.apache.knox.gateway.services.GatewayServices;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.services.topology.TopologyService;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlElementWrapper;
++import java.io.File;
++import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
++import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
+import static javax.ws.rs.core.MediaType.APPLICATION_XML;
++import static javax.ws.rs.core.MediaType.TEXT_PLAIN;
++
+import static javax.ws.rs.core.Response.ok;
++import static javax.ws.rs.core.Response.created;
++import static javax.ws.rs.core.Response.notModified;
++import static javax.ws.rs.core.Response.status;
++
+
+@Path("/api/v1")
+public class TopologiesResource {
++
++ private static final String XML_EXT = ".xml";
++ private static final String JSON_EXT = ".json";
++
++ private static final String TOPOLOGIES_API_PATH = "topologies";
++ private static final String SINGLE_TOPOLOGY_API_PATH = TOPOLOGIES_API_PATH + "/{id}";
++ private static final String PROVIDERCONFIG_API_PATH = "providerconfig";
++ private static final String SINGLE_PROVIDERCONFIG_API_PATH = PROVIDERCONFIG_API_PATH + "/{name}";
++ private static final String DESCRIPTORS_API_PATH = "descriptors";
++ private static final String SINGLE_DESCRIPTOR_API_PATH = DESCRIPTORS_API_PATH + "/{name}";
++
++ private static GatewaySpiMessages log = MessagesFactory.get(GatewaySpiMessages.class);
++
+ @Context
+ private HttpServletRequest request;
+
+ @GET
+ @Produces({APPLICATION_JSON, APPLICATION_XML})
- @Path("topologies/{id}")
++ @Path(SINGLE_TOPOLOGY_API_PATH)
+ public Topology getTopology(@PathParam("id") String id) {
+ GatewayServices services = (GatewayServices) request.getServletContext()
+ .getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+ GatewayConfig config = (GatewayConfig) request.getServletContext().getAttribute(GatewayConfig.GATEWAY_CONFIG_ATTRIBUTE);
+
+ TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
+
+ for (org.apache.knox.gateway.topology.Topology t : ts.getTopologies()) {
+ if(t.getName().equals(id)) {
+ try {
+ t.setUri(new URI( buildURI(t, config, request) ));
+ } catch (URISyntaxException se) {
+ t.setUri(null);
+ }
+ return BeanConverter.getTopology(t);
+ }
+ }
+ return null;
+ }
+
+ @GET
+ @Produces({APPLICATION_JSON, APPLICATION_XML})
- @Path("topologies")
++ @Path(TOPOLOGIES_API_PATH)
+ public SimpleTopologyWrapper getTopologies() {
+ GatewayServices services = (GatewayServices) request.getServletContext()
+ .getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+
+
+ TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
+
+ ArrayList<SimpleTopology> st = new ArrayList<SimpleTopology>();
+ GatewayConfig conf = (GatewayConfig) request.getServletContext().getAttribute(GatewayConfig.GATEWAY_CONFIG_ATTRIBUTE);
+
+ for (org.apache.knox.gateway.topology.Topology t : ts.getTopologies()) {
+ st.add(getSimpleTopology(t, conf));
+ }
+
+ Collections.sort(st, new TopologyComparator());
+ SimpleTopologyWrapper stw = new SimpleTopologyWrapper();
+
+ for(SimpleTopology t : st){
+ stw.topologies.add(t);
+ }
+
+ return stw;
+
+ }
+
+ @PUT
+ @Consumes({APPLICATION_JSON, APPLICATION_XML})
- @Path("topologies/{id}")
++ @Path(SINGLE_TOPOLOGY_API_PATH)
+ public Topology uploadTopology(@PathParam("id") String id, Topology t) {
+
+ GatewayServices gs = (GatewayServices) request.getServletContext()
+ .getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+
+ t.setName(id);
+ TopologyService ts = gs.getService(GatewayServices.TOPOLOGY_SERVICE);
+
+ ts.deployTopology(BeanConverter.getTopology(t));
+
+ return getTopology(id);
+ }
+
+ @DELETE
+ @Produces(APPLICATION_JSON)
- @Path("topologies/{id}")
++ @Path(SINGLE_TOPOLOGY_API_PATH)
+ public Response deleteTopology(@PathParam("id") String id) {
+ boolean deleted = false;
+ if(!"admin".equals(id)) {
+ GatewayServices services = (GatewayServices) request.getServletContext()
+ .getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+
+ TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
+
+ for (org.apache.knox.gateway.topology.Topology t : ts.getTopologies()) {
+ if(t.getName().equals(id)) {
+ ts.deleteTopology(t);
+ deleted = true;
+ }
+ }
+ }else{
+ deleted = false;
+ }
+ return ok().entity("{ \"deleted\" : " + deleted + " }").build();
+ }
+
++ @GET
++ @Produces({APPLICATION_JSON})
++ @Path(PROVIDERCONFIG_API_PATH)
++ public HrefListing getProviderConfigurations() {
++ HrefListing listing = new HrefListing();
++ listing.setHref(buildHref(request));
++
++ GatewayServices services =
++ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
++
++ List<HrefListItem> configs = new ArrayList<>();
++ TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
++ // Get all the simple descriptor file names
++ for (File providerConfig : ts.getProviderConfigurations()){
++ String id = FilenameUtils.getBaseName(providerConfig.getName());
++ configs.add(new HrefListItem(buildHref(id, request), providerConfig.getName()));
++ }
++
++ listing.setItems(configs);
++ return listing;
++ }
++
++ @GET
++ @Produces({APPLICATION_XML})
++ @Path(SINGLE_PROVIDERCONFIG_API_PATH)
++ public Response getProviderConfiguration(@PathParam("name") String name) {
++ Response response;
++
++ GatewayServices services =
++ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
++
++ TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
++
++ File providerConfigFile = null;
++
++ for (File pc : ts.getProviderConfigurations()){
++ // If the file name matches the specified id
++ if (FilenameUtils.getBaseName(pc.getName()).equals(name)) {
++ providerConfigFile = pc;
++ break;
++ }
++ }
++
++ if (providerConfigFile != null) {
++ byte[] content = null;
++ try {
++ content = FileUtils.readFileToByteArray(providerConfigFile);
++ response = ok().entity(content).build();
++ } catch (IOException e) {
++ log.failedToReadConfigurationFile(providerConfigFile.getAbsolutePath(), e);
++ response = Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
++ }
++
++ } else {
++ response = Response.status(Response.Status.NOT_FOUND).build();
++ }
++ return response;
++ }
++
++ @DELETE
++ @Produces(APPLICATION_JSON)
++ @Path(SINGLE_PROVIDERCONFIG_API_PATH)
++ public Response deleteProviderConfiguration(@PathParam("name") String name) {
++ Response response;
++ GatewayServices services =
++ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
++
++ TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
++ if (ts.deleteProviderConfiguration(name)) {
++ response = ok().entity("{ \"deleted\" : \"provider config " + name + "\" }").build();
++ } else {
++ response = notModified().build();
++ }
++ return response;
++ }
++
++
++ @DELETE
++ @Produces(APPLICATION_JSON)
++ @Path(SINGLE_DESCRIPTOR_API_PATH)
++ public Response deleteSimpleDescriptor(@PathParam("name") String name) {
++ Response response = null;
++ if(!"admin".equals(name)) {
++ GatewayServices services =
++ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
++
++ TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
++ if (ts.deleteDescriptor(name)) {
++ response = ok().entity("{ \"deleted\" : \"descriptor " + name + "\" }").build();
++ }
++ }
++
++ if (response == null) {
++ response = notModified().build();
++ }
++
++ return response;
++ }
++
++
++ @PUT
++ @Consumes({APPLICATION_XML})
++ @Path(SINGLE_PROVIDERCONFIG_API_PATH)
++ public Response uploadProviderConfiguration(@PathParam("name") String name, String content) {
++ Response response = null;
++
++ GatewayServices gs =
++ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
++
++ TopologyService ts = gs.getService(GatewayServices.TOPOLOGY_SERVICE);
++
++ boolean isUpdate = configFileExists(ts.getProviderConfigurations(), name);
++
++ String filename = name.endsWith(XML_EXT) ? name : name + XML_EXT;
++ if (ts.deployProviderConfiguration(filename, content)) {
++ try {
++ if (isUpdate) {
++ response = Response.noContent().build();
++ } else{
++ response = created(new URI(buildHref(request))).build();
++ }
++ } catch (URISyntaxException e) {
++ log.invalidResourceURI(e.getInput(), e.getReason(), e);
++ response = status(Response.Status.BAD_REQUEST).entity("{ \"error\" : \"Failed to deploy provider configuration " + name + "\" }").build();
++ }
++ }
++
++ return response;
++ }
++
++
++ private boolean configFileExists(Collection<File> existing, String candidateName) {
++ boolean result = false;
++ for (File exists : existing) {
++ if (FilenameUtils.getBaseName(exists.getName()).equals(candidateName)) {
++ result = true;
++ break;
++ }
++ }
++ return result;
++ }
++
++
++ @PUT
++ @Consumes({APPLICATION_JSON})
++ @Path(SINGLE_DESCRIPTOR_API_PATH)
++ public Response uploadSimpleDescriptor(@PathParam("name") String name, String content) {
++ Response response = null;
++
++ GatewayServices gs =
++ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
++
++ TopologyService ts = gs.getService(GatewayServices.TOPOLOGY_SERVICE);
++
++ boolean isUpdate = configFileExists(ts.getDescriptors(), name);
++
++ String filename = name.endsWith(JSON_EXT) ? name : name + JSON_EXT;
++ if (ts.deployDescriptor(filename, content)) {
++ try {
++ if (isUpdate) {
++ response = Response.noContent().build();
++ } else {
++ response = created(new URI(buildHref(request))).build();
++ }
++ } catch (URISyntaxException e) {
++ log.invalidResourceURI(e.getInput(), e.getReason(), e);
++ response = status(Response.Status.BAD_REQUEST).entity("{ \"error\" : \"Failed to deploy descriptor " + name + "\" }").build();
++ }
++ }
++
++ return response;
++ }
++
++
++ @GET
++ @Produces({APPLICATION_JSON})
++ @Path(DESCRIPTORS_API_PATH)
++ public HrefListing getSimpleDescriptors() {
++ HrefListing listing = new HrefListing();
++ listing.setHref(buildHref(request));
++
++ GatewayServices services =
++ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
++
++ List<HrefListItem> descriptors = new ArrayList<>();
++ TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
++ for (File descriptor : ts.getDescriptors()){
++ String id = FilenameUtils.getBaseName(descriptor.getName());
++ descriptors.add(new HrefListItem(buildHref(id, request), descriptor.getName()));
++ }
++
++ listing.setItems(descriptors);
++ return listing;
++ }
++
++
++ @GET
++ @Produces({APPLICATION_JSON, TEXT_PLAIN})
++ @Path(SINGLE_DESCRIPTOR_API_PATH)
++ public Response getSimpleDescriptor(@PathParam("name") String name) {
++ Response response;
++
++ GatewayServices services =
++ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
++
++ TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
++
++ File descriptorFile = null;
++
++ for (File sd : ts.getDescriptors()){
++ // If the file name matches the specified id
++ if (FilenameUtils.getBaseName(sd.getName()).equals(name)) {
++ descriptorFile = sd;
++ break;
++ }
++ }
++
++ if (descriptorFile != null) {
++ String mediaType = APPLICATION_JSON;
++
++ byte[] content = null;
++ try {
++ if ("yml".equals(FilenameUtils.getExtension(descriptorFile.getName()))) {
++ mediaType = TEXT_PLAIN;
++ }
++ content = FileUtils.readFileToByteArray(descriptorFile);
++ response = ok().type(mediaType).entity(content).build();
++ } catch (IOException e) {
++ log.failedToReadConfigurationFile(descriptorFile.getAbsolutePath(), e);
++ response = Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
++ }
++ } else {
++ response = Response.status(Response.Status.NOT_FOUND).build();
++ }
++
++ return response;
++ }
++
+
+ private static class TopologyComparator implements Comparator<SimpleTopology> {
+ @Override
+ public int compare(SimpleTopology t1, SimpleTopology t2) {
+ return t1.getName().compareTo(t2.getName());
+ }
+ }
+
- String buildURI(org.apache.knox.gateway.topology.Topology topology, GatewayConfig config, HttpServletRequest req){
++
++ String buildURI(org.apache.knox.gateway.topology.Topology topology, GatewayConfig config, HttpServletRequest req){
+ String uri = buildXForwardBaseURL(req);
+
- // Strip extra context
++ // Strip extra context
+ uri = uri.replace(req.getContextPath(), "");
+
- // Add the gateway path
++ // Add the gateway path
+ String gatewayPath;
+ if(config.getGatewayPath() != null){
+ gatewayPath = config.getGatewayPath();
+ }else{
+ gatewayPath = "gateway";
+ }
+ uri += "/" + gatewayPath;
+
+ uri += "/" + topology.getName();
+ return uri;
+ }
+
- String buildHref(org.apache.knox.gateway.topology.Topology t, HttpServletRequest req) {
++ String buildHref(HttpServletRequest req) {
++ return buildHref((String)null, req);
++ }
++
++ String buildHref(String id, HttpServletRequest req) {
+ String href = buildXForwardBaseURL(req);
- // Make sure that the pathInfo doesn't have any '/' chars at the end.
++ // Make sure that the pathInfo doesn't have any '/' chars at the end.
+ String pathInfo = req.getPathInfo();
- if(pathInfo.endsWith("/")) {
- while(pathInfo.endsWith("/")) {
- pathInfo = pathInfo.substring(0, pathInfo.length() - 1);
- }
++ while(pathInfo.endsWith("/")) {
++ pathInfo = pathInfo.substring(0, pathInfo.length() - 1);
++ }
++
++ href += pathInfo;
++
++ if (id != null) {
++ href += "/" + id;
+ }
+
- href += pathInfo + "/" + t.getName();
+ return href;
+ }
+
- private SimpleTopology getSimpleTopology(
- org.apache.knox.gateway.topology.Topology t, GatewayConfig config) {
++ String buildHref(org.apache.knox.gateway.topology.Topology t, HttpServletRequest req) {
++ return buildHref(t.getName(), req);
++ }
++
++ private SimpleTopology getSimpleTopology(org.apache.hadoop.gateway.topology.Topology t, GatewayConfig config) {
+ String uri = buildURI(t, config, request);
+ String href = buildHref(t, request);
+ return new SimpleTopology(t, uri, href);
+ }
+
+ private String buildXForwardBaseURL(HttpServletRequest req){
+ final String X_Forwarded = "X-Forwarded-";
+ final String X_Forwarded_Context = X_Forwarded + "Context";
+ final String X_Forwarded_Proto = X_Forwarded + "Proto";
+ final String X_Forwarded_Host = X_Forwarded + "Host";
+ final String X_Forwarded_Port = X_Forwarded + "Port";
+ final String X_Forwarded_Server = X_Forwarded + "Server";
+
+ String baseURL = "";
+
- // Get Protocol
++ // Get Protocol
+ if(req.getHeader(X_Forwarded_Proto) != null){
+ baseURL += req.getHeader(X_Forwarded_Proto) + "://";
+ } else {
+ baseURL += req.getProtocol() + "://";
+ }
+
- // Handle Server/Host and Port Here
++ // Handle Server/Host and Port Here
+ if (req.getHeader(X_Forwarded_Host) != null && req.getHeader(X_Forwarded_Port) != null){
- // Double check to see if host has port
++ // Double check to see if host has port
+ if(req.getHeader(X_Forwarded_Host).contains(req.getHeader(X_Forwarded_Port))){
+ baseURL += req.getHeader(X_Forwarded_Host);
+ } else {
- // If there's no port, add the host and port together;
++ // If there's no port, add the host and port together;
+ baseURL += req.getHeader(X_Forwarded_Host) + ":" + req.getHeader(X_Forwarded_Port);
+ }
+ } else if(req.getHeader(X_Forwarded_Server) != null && req.getHeader(X_Forwarded_Port) != null){
- // Tack on the server and port if they're available. Try host if server not available
++ // Tack on the server and port if they're available. Try host if server not available
+ baseURL += req.getHeader(X_Forwarded_Server) + ":" + req.getHeader(X_Forwarded_Port);
+ } else if(req.getHeader(X_Forwarded_Port) != null) {
- // if we at least have a port, we can use it.
++ // if we at least have a port, we can use it.
+ baseURL += req.getServerName() + ":" + req.getHeader(X_Forwarded_Port);
+ } else {
- // Resort to request members
++ // Resort to request members
+ baseURL += req.getServerName() + ":" + req.getLocalPort();
+ }
+
- // Handle Server context
++ // Handle Server context
+ if( req.getHeader(X_Forwarded_Context) != null ) {
+ baseURL += req.getHeader( X_Forwarded_Context );
+ } else {
+ baseURL += req.getContextPath();
+ }
+
+ return baseURL;
+ }
+
++
++ static class HrefListing {
++ @JsonProperty
++ String href;
++
++ @JsonProperty
++ List<HrefListItem> items;
++
++ HrefListing() {}
++
++ public void setHref(String href) {
++ this.href = href;
++ }
++
++ public String getHref() {
++ return href;
++ }
++
++ public void setItems(List<HrefListItem> items) {
++ this.items = items;
++ }
++
++ public List<HrefListItem> getItems() {
++ return items;
++ }
++ }
++
++ static class HrefListItem {
++ @JsonProperty
++ String href;
++
++ @JsonProperty
++ String name;
++
++ HrefListItem() {}
++
++ HrefListItem(String href, String name) {
++ this.href = href;
++ this.name = name;
++ }
++
++ public void setHref(String href) {
++ this.href = href;
++ }
++
++ public String getHref() {
++ return href;
++ }
++
++ public void setName(String name) {
++ this.name = name;
++ }
++ public String getName() {
++ return name;
++ }
++ }
++
++
+ @XmlAccessorType(XmlAccessType.NONE)
+ public static class SimpleTopology {
+
+ @XmlElement
+ private String name;
+ @XmlElement
+ private String timestamp;
+ @XmlElement
++ private String defaultServicePath;
++ @XmlElement
+ private String uri;
+ @XmlElement
+ private String href;
+
+ public SimpleTopology() {}
+
+ public SimpleTopology(org.apache.knox.gateway.topology.Topology t, String uri, String href) {
+ this.name = t.getName();
+ this.timestamp = Long.toString(t.getTimestamp());
++ this.defaultServicePath = t.getDefaultServicePath();
+ this.uri = uri;
+ this.href = href;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String n) {
+ name = n;
+ }
+
+ public String getTimestamp() {
+ return timestamp;
+ }
+
++ public void setDefaultService(String defaultServicePath) {
++ this.defaultServicePath = defaultServicePath;
++ }
++
++ public String getDefaultService() {
++ return defaultServicePath;
++ }
++
+ public void setTimestamp(String timestamp) {
+ this.timestamp = timestamp;
+ }
+
+ public String getUri() {
+ return uri;
+ }
+
+ public void setUri(String uri) {
+ this.uri = uri;
+ }
+
+ public String getHref() {
+ return href;
+ }
+
+ public void setHref(String href) {
+ this.href = href;
+ }
+ }
+
+ @XmlAccessorType(XmlAccessType.FIELD)
+ public static class SimpleTopologyWrapper{
+
+ @XmlElement(name="topology")
+ @XmlElementWrapper(name="topologies")
+ private List<SimpleTopology> topologies = new ArrayList<SimpleTopology>();
+
+ public List<SimpleTopology> getTopologies(){
+ return topologies;
+ }
+
+ public void setTopologies(List<SimpleTopology> ts){
+ this.topologies = ts;
+ }
+
+ }
+}
+
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/beans/BeanConverter.java
----------------------------------------------------------------------
diff --cc gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/beans/BeanConverter.java
index 358b5b5,0000000..e8d6915
mode 100644,000000..100644
--- a/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/beans/BeanConverter.java
+++ b/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/beans/BeanConverter.java
@@@ -1,168 -1,0 +1,170 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.service.admin.beans;
+
+import org.apache.knox.gateway.topology.Version;
+
+import java.util.Collection;
+
+public class BeanConverter {
+
+ public static Topology getTopology(
+ org.apache.knox.gateway.topology.Topology topology) {
+ Topology topologyResource = new Topology();
+ topologyResource.setName(topology.getName());
+ topologyResource.setTimestamp(topology.getTimestamp());
++ topologyResource.setPath(topology.getDefaultServicePath());
+ topologyResource.setUri(topology.getUri());
+ for ( org.apache.knox.gateway.topology.Provider provider : topology.getProviders() ) {
+ topologyResource.getProviders().add( getProvider(provider) );
+ }
+ for ( org.apache.knox.gateway.topology.Service service : topology.getServices() ) {
+ topologyResource.getServices().add( getService(service) );
+ }
+ for ( org.apache.knox.gateway.topology.Application application : topology.getApplications() ) {
+ topologyResource.getApplications().add( getApplication(application) );
+ }
+ return topologyResource;
+ }
+
+ public static org.apache.knox.gateway.topology.Topology getTopology(Topology topology) {
+ org.apache.knox.gateway.topology.Topology deploymentTopology = new org.apache.knox.gateway.topology.Topology();
+ deploymentTopology.setName(topology.getName());
+ deploymentTopology.setTimestamp(topology.getTimestamp());
++ deploymentTopology.setDefaultServicePath(topology.getPath());
+ deploymentTopology.setUri(topology.getUri());
+ for ( Provider provider : topology.getProviders() ) {
+ deploymentTopology.addProvider( getProvider(provider) );
+ }
+ for ( Service service : topology.getServices() ) {
+ deploymentTopology.addService( getService(service) );
+ }
+ for ( Application application : topology.getApplications() ) {
+ deploymentTopology.addApplication( getApplication(application) );
+ }
+ return deploymentTopology;
+ }
+
+ private static Provider getProvider(
+ org.apache.knox.gateway.topology.Provider provider) {
+ Provider providerResource = new Provider();
+ providerResource.setName(provider.getName());
+ providerResource.setEnabled(provider.isEnabled());
+ providerResource.setRole(provider.getRole());
+ Collection<org.apache.knox.gateway.topology.Param> paramsList = provider.getParamsList();
+ if (paramsList != null && !paramsList.isEmpty()) {
+ for ( org.apache.knox.gateway.topology.Param param : paramsList ) {
+ providerResource.getParams().add(getParam(param));
+ }
+ }
+ return providerResource;
+ }
+
+ private static org.apache.knox.gateway.topology.Provider getProvider(Provider provider) {
+ org.apache.knox.gateway.topology.Provider deploymentProvider = new org.apache.knox.gateway.topology.Provider();
+ deploymentProvider.setName(provider.getName());
+ deploymentProvider.setEnabled(provider.isEnabled());
+ deploymentProvider.setRole(provider.getRole());
+ for ( Param param : provider.getParams() ) {
+ deploymentProvider.addParam( getParam(param) );
+ }
+ return deploymentProvider;
+ }
+
+ private static Service getService(
+ org.apache.knox.gateway.topology.Service service) {
+ Service serviceResource = new Service();
+ serviceResource.setRole(service.getRole());
+ serviceResource.setName(service.getName());
+ Version version = service.getVersion();
+ if (version != null) {
+ serviceResource.setVersion(version.toString());
+ }
+ Collection<org.apache.knox.gateway.topology.Param> paramsList = service.getParamsList();
+ if (paramsList != null && !paramsList.isEmpty()) {
+ for ( org.apache.knox.gateway.topology.Param param : paramsList ) {
+ serviceResource.getParams().add(getParam(param));
+ }
+ }
+ for ( String url : service.getUrls() ) {
+ serviceResource.getUrls().add( url );
+ }
+ return serviceResource;
+ }
+
+ private static org.apache.knox.gateway.topology.Service getService(Service service) {
+ org.apache.knox.gateway.topology.Service deploymentService = new org.apache.knox.gateway.topology.Service();
+ deploymentService.setRole(service.getRole());
+ deploymentService.setName(service.getName());
+ if (service.getVersion() != null) {
+ deploymentService.setVersion(new Version(service.getVersion()));
+ }
+ for ( Param param : service.getParams() ) {
+ deploymentService.addParam( getParam(param) );
+ }
+ for ( String url : service.getUrls() ) {
+ deploymentService.addUrl( url );
+ }
+ return deploymentService;
+ }
+
+ private static Application getApplication(
+ org.apache.knox.gateway.topology.Application application) {
+ Application applicationResource = new Application();
+ applicationResource.setRole(application.getRole());
+ applicationResource.setName(application.getName());
+ Version version = application.getVersion();
+ if (version != null) {
+ applicationResource.setVersion(version.toString());
+ }
+ Collection<org.apache.knox.gateway.topology.Param> paramsList = application.getParamsList();
+ if (paramsList != null && !paramsList.isEmpty()) {
+ for ( org.apache.knox.gateway.topology.Param param : paramsList ) {
+ applicationResource.getParams().add(getParam(param));
+ }
+ }
+ for ( String url : application.getUrls() ) {
+ applicationResource.getUrls().add( url );
+ }
+ return applicationResource;
+ }
+
+ private static org.apache.knox.gateway.topology.Application getApplication(Application application) {
+ org.apache.knox.gateway.topology.Application applicationResource = new org.apache.knox.gateway.topology.Application();
+ applicationResource.setRole(application.getRole());
+ applicationResource.setName(application.getName());
+ if (application.getVersion() != null) {
+ applicationResource.setVersion(new Version(application.getVersion()));
+ }
+ for ( Param param : application.getParams() ) {
+ applicationResource.addParam( getParam(param) );
+ }
+ for ( String url : application.getUrls() ) {
+ applicationResource.getUrls().add( url );
+ }
+ return applicationResource;
+ }
+
+ private static Param getParam(org.apache.knox.gateway.topology.Param param) {
+ return new Param(param.getName(), param.getValue());
+ }
+
+ private static org.apache.knox.gateway.topology.Param getParam(Param param) {
+ return new org.apache.knox.gateway.topology.Param(param.getName(), param.getValue());
+ }
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/beans/Topology.java
----------------------------------------------------------------------
diff --cc gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/beans/Topology.java
index 9c58ad3,0000000..2d2eab8
mode 100644,000000..100644
--- a/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/beans/Topology.java
+++ b/gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/beans/Topology.java
@@@ -1,108 -1,0 +1,119 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.service.admin.beans;
+
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlElementWrapper;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.List;
+
+@XmlRootElement(name="topology")
+public class Topology {
+
+ @XmlElement
+ private URI uri;
+
+ @XmlElement
+ private String name;
+
+ @XmlElement
++ private String path;
++
++ @XmlElement
+ private long timestamp;
+
+ @XmlElement(name="provider")
+ @XmlElementWrapper(name="gateway")
+ public List<Provider> providers;
+
+ @XmlElement(name="service")
+ public List<Service> services;
+
+ @XmlElement(name="application")
+ private List<Application> applications;
+
+ public Topology() {
+ }
+
+ public URI getUri() {
+ return uri;
+ }
+
+ public void setUri( URI uri ) {
+ this.uri = uri;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName( String name ) {
+ this.name = name;
+ }
+
+ public long getTimestamp() {
+ return timestamp;
+ }
+
++ public void setPath( String defaultServicePath ) {
++ this.path = defaultServicePath;
++ }
++
++ public String getPath() {
++ return path;
++ }
++
+ public void setTimestamp( long timestamp ) {
+ this.timestamp = timestamp;
+ }
+
+ public List<Service> getServices() {
+ if (services == null) {
+ services = new ArrayList<>();
+ }
+ return services;
+ }
+
+ public List<Application> getApplications() {
+ if (applications == null) {
+ applications = new ArrayList<>();
+ }
+ return applications;
+ }
+
+ public List<Provider> getProviders() {
+ if (providers == null) {
+ providers = new ArrayList<>();
+ }
+ return providers;
+ }
+
+ public void setProviders(List<Provider> providers) {
+ this.providers = providers;
+ }
+
+ public void setServices(List<Service> services) {
+ this.services = services;
+ }
+
+ public void setApplications(List<Application> applications) {
+ this.applications = applications;
+ }
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-spi/src/main/java/org/apache/knox/gateway/i18n/GatewaySpiMessages.java
----------------------------------------------------------------------
diff --cc gateway-spi/src/main/java/org/apache/knox/gateway/i18n/GatewaySpiMessages.java
index 243bac3,0000000..27a1905
mode 100644,000000..100644
--- a/gateway-spi/src/main/java/org/apache/knox/gateway/i18n/GatewaySpiMessages.java
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/i18n/GatewaySpiMessages.java
@@@ -1,85 -1,0 +1,91 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.i18n;
+
+import org.apache.knox.gateway.i18n.messages.Message;
+import org.apache.knox.gateway.i18n.messages.MessageLevel;
+import org.apache.knox.gateway.i18n.messages.Messages;
+import org.apache.knox.gateway.i18n.messages.StackTrace;
+
+@Messages(logger="org.apache.knox.gateway")
+public interface GatewaySpiMessages {
+
+ @Message(level = MessageLevel.ERROR, text = "Failed to load the internal principal mapping table: {0}" )
+ void failedToLoadPrincipalMappingTable( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to execute filter: {0}" )
+ void failedToExecuteFilter( @StackTrace( level = MessageLevel.DEBUG ) Throwable t );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to encrypt passphrase: {0}" )
+ void failedToEncryptPassphrase( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to generate secret key from password: {0}" )
+ void failedToGenerateKeyFromPassword( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to create keystore [filename={0}, type={1}]: {2}" )
+ void failedToCreateKeystore( String fileName, String keyStoreType, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to load keystore [filename={0}, type={1}]: {2}" )
+ void failedToLoadKeystore( String fileName, String keyStoreType, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to add credential: {1}" )
+ void failedToAddCredential( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message(level = MessageLevel.ERROR, text = "Failed to remove credential: {1}")
+ void failedToRemoveCredential(@StackTrace(level = MessageLevel.DEBUG) Exception e);
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to get credential: {1}" )
+ void failedToGetCredential(@StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to persist master secret: {0}" )
+ void failedToPersistMasterSecret( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to encrypt master secret: {0}" )
+ void failedToEncryptMasterSecret( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to initialize master service from persistent master {0}: {1}" )
+ void failedToInitializeFromPersistentMaster( String masterFileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to add self signed certificate for Gateway {0}: {1}" )
+ void failedToAddSeflSignedCertForGateway( String alias, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to get key {0}: {1}" )
+ void failedToGetKey(String alias, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+ @Message( level = MessageLevel.DEBUG, text = "Loading from persistent master: {0}" )
+ void loadingFromPersistentMaster( String tag );
+
+ @Message( level = MessageLevel.DEBUG, text = "ALIAS: {0}" )
+ void printClusterAlias( String alias );
+
+ @Message( level = MessageLevel.DEBUG, text = "MASTER SERVICE == NULL: {0}" )
+ void printMasterServiceIsNull( boolean masterServiceIsNull );
+
+ @Message( level = MessageLevel.ERROR, text = "Gateway has failed to start. Unable to prompt user for master secret setup. Please consider using knoxcli.sh create-master" )
+ void unableToPromptForMasterUseKnoxCLI();
+
- @Message( level = MessageLevel.ERROR, text = "Error in generating certificate: {0}" )
- void failedToGenerateCertificate( @StackTrace( level = MessageLevel.ERROR ) Exception e );
++ @Message( level = MessageLevel.ERROR, text = "Error in generating certificate: {0}" )
++ void failedToGenerateCertificate( @StackTrace( level = MessageLevel.ERROR ) Exception e );
++
++ @Message(level = MessageLevel.ERROR, text = "Failed to read configuration: {0}")
++ void failedToReadConfigurationFile(final String filePath, @StackTrace(level = MessageLevel.DEBUG) Exception e );
++
++ @Message(level = MessageLevel.ERROR, text = "Invalid resource URI {0} : {1}")
++ void invalidResourceURI(final String uri, final String reason, @StackTrace(level = MessageLevel.DEBUG) Exception e );
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-spi/src/main/java/org/apache/knox/gateway/services/topology/TopologyService.java
----------------------------------------------------------------------
diff --cc gateway-spi/src/main/java/org/apache/knox/gateway/services/topology/TopologyService.java
index 820da73,0000000..3be3a4a
mode 100644,000000..100644
--- a/gateway-spi/src/main/java/org/apache/knox/gateway/services/topology/TopologyService.java
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/services/topology/TopologyService.java
@@@ -1,50 -1,0 +1,63 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.services.topology;
+
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.services.Service;
+import org.apache.knox.gateway.topology.Topology;
+import org.apache.knox.gateway.topology.TopologyListener;
+
++import java.io.File;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+
+public interface TopologyService extends Service {
+
- public void reloadTopologies();
++ void reloadTopologies();
+
- public void deployTopology(Topology t);
++ void deployTopology(Topology t);
+
- public void redeployTopologies(String topologyName);
++ void redeployTopologies(String topologyName);
+
- public void addTopologyChangeListener(TopologyListener listener);
++ void addTopologyChangeListener(TopologyListener listener);
+
- public void startMonitor() throws Exception;
++ void startMonitor() throws Exception;
+
- public void stopMonitor() throws Exception;
++ void stopMonitor() throws Exception;
+
- public Collection<Topology> getTopologies();
++ Collection<Topology> getTopologies();
+
- public void deleteTopology(Topology t);
++ boolean deployProviderConfiguration(String name, String content);
+
- public Map<String, List<String>> getServiceTestURLs(Topology t, GatewayConfig config);
++ Collection<File> getProviderConfigurations();
+
- }
++ boolean deployDescriptor(String name, String content);
++
++ Collection<File> getDescriptors();
++
++ void deleteTopology(Topology t);
++
++ boolean deleteDescriptor(String name);
++
++ boolean deleteProviderConfiguration(String name);
++
++ Map<String, List<String>> getServiceTestURLs(Topology t, GatewayConfig config);
++
++}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-spi/src/main/java/org/apache/knox/gateway/topology/Topology.java
----------------------------------------------------------------------
diff --cc gateway-spi/src/main/java/org/apache/knox/gateway/topology/Topology.java
index 6eac50b,0000000..815c218
mode 100644,000000..100644
--- a/gateway-spi/src/main/java/org/apache/knox/gateway/topology/Topology.java
+++ b/gateway-spi/src/main/java/org/apache/knox/gateway/topology/Topology.java
@@@ -1,142 -1,0 +1,151 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.topology;
+
+import org.apache.commons.collections.map.HashedMap;
+import org.apache.commons.collections.map.MultiKeyMap;
+
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class Topology {
+
+ private URI uri;
+ private String name;
++ private String defaultServicePath = null;
+ private long timestamp;
+ public List<Provider> providerList = new ArrayList<Provider>();
+ private Map<String,Map<String,Provider>> providerMap = new HashMap<>();
+ public List<Service> services = new ArrayList<Service>();
+ private MultiKeyMap serviceMap;
+ private List<Application> applications = new ArrayList<Application>();
+ private Map<String,Application> applicationMap = new HashMap<>();
+
+ public Topology() {
+ serviceMap = MultiKeyMap.decorate(new HashedMap());
+ }
+
+ public URI getUri() {
+ return uri;
+ }
+
+ public void setUri( URI uri ) {
+ this.uri = uri;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName( String name ) {
+ this.name = name;
+ }
+
+ public long getTimestamp() {
+ return timestamp;
+ }
+
+ public void setTimestamp( long timestamp ) {
+ this.timestamp = timestamp;
+ }
+
++ public String getDefaultServicePath() {
++ return defaultServicePath;
++ }
++
++ public void setDefaultServicePath(String servicePath) {
++ defaultServicePath = servicePath;
++ }
++
+ public Collection<Service> getServices() {
+ return services;
+ }
+
+ public Service getService( String role, String name, Version version) {
+ return (Service)serviceMap.get(role, name, version);
+ }
+
+ public void addService( Service service ) {
+ services.add( service );
+ serviceMap.put(service.getRole(), service.getName(), service.getVersion(), service);
+ }
+
+ public Collection<Application> getApplications() {
+ return applications;
+ }
+
+ private static String fixApplicationUrl( String url ) {
+ if( url == null ) {
+ url = "/";
+ }
+ if( !url.startsWith( "/" ) ) {
+ url = "/" + url;
+ }
+ return url;
+ }
+
+ public Application getApplication(String url) {
+ return applicationMap.get( fixApplicationUrl( url ) );
+ }
+
+ public void addApplication( Application application ) {
+ applications.add( application );
+ List<String> urls = application.getUrls();
+ if( urls == null || urls.isEmpty() ) {
+ applicationMap.put( fixApplicationUrl( application.getName() ), application );
+ } else {
+ for( String url : application.getUrls() ) {
+ applicationMap.put( fixApplicationUrl( url ), application );
+ }
+ }
+ }
+
+ public Collection<Provider> getProviders() {
+ return providerList;
+ }
+
+ public Provider getProvider( String role, String name ) {
+ Provider provider = null;
+ Map<String,Provider> nameMap = providerMap.get( role );
+ if( nameMap != null) {
+ if( name != null ) {
+ provider = nameMap.get( name );
+ }
+ else {
+ provider = (Provider) nameMap.values().toArray()[0];
+ }
+ }
+ return provider;
+ }
+
+ public void addProvider( Provider provider ) {
+ providerList.add( provider );
+ String role = provider.getRole();
+ Map<String,Provider> nameMap = providerMap.get( role );
+ if( nameMap == null ) {
+ nameMap = new HashMap<>();
+ providerMap.put( role, nameMap );
+ }
+ nameMap.put( provider.getName(), provider );
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-spi/src/main/resources/org/apache/knox/gateway/topology/topology_binding-xml.xml
----------------------------------------------------------------------
diff --cc gateway-spi/src/main/resources/org/apache/knox/gateway/topology/topology_binding-xml.xml
index 9e9c26f,0000000..956387e
mode 100644,000000..100644
--- a/gateway-spi/src/main/resources/org/apache/knox/gateway/topology/topology_binding-xml.xml
+++ b/gateway-spi/src/main/resources/org/apache/knox/gateway/topology/topology_binding-xml.xml
@@@ -1,63 -1,0 +1,64 @@@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements. See the NOTICE file
+ distributed with this work for additional information
+ regarding copyright ownership. The ASF licenses this file
+ to you under the Apache License, Version 2.0 (the
+ "License"); you may not use this file except in compliance
+ with the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<xml-bindings
+ xmlns="http://www.eclipse.org/eclipselink/xsds/persistence/oxm"
+ package-name="org.apache.knox.gateway.topology"
+ xml-mapping-metadata-complete="true">
+ <xml-schema
+ element-form-default="QUALIFIED"/>
+ <java-types>
+ <java-type name="Topology" xml-accessor-type="NONE">
- <xml-type prop-order="name providers services applications"/>
++ <xml-type prop-order="name defaultServicePath providers services applications"/>
+ <xml-root-element/>
+ <java-attributes>
+ <xml-element java-attribute="name" name="name"/>
++ <xml-element java-attribute="defaultServicePath" name="path"/>
+ <xml-elements java-attribute="providers">
+ <xml-element name="provider"/>
+ <xml-element-wrapper name="gateway"/>
+ </xml-elements>
+ <xml-element java-attribute="services" name="service"/>
+ <xml-element java-attribute="applications" name="application"/>
+ </java-attributes>
+ </java-type>
+ <java-type name="Provider" xml-accessor-type="NONE">
+ <java-attributes>
+ <xml-element java-attribute="name" name="name"/>
+ <xml-element java-attribute="enabled" name="enabled"/>
+ <xml-element java-attribute="role" name="role"/>
+ <xml-element java-attribute="paramsList" name="param"/>
+ </java-attributes>
+ </java-type>
+ <java-type name="Service" xml-accessor-type="NONE">
+ <java-attributes>
+ <xml-element java-attribute="name" name="name"/>
+ <xml-element java-attribute="role" name="role"/>
+ <xml-element java-attribute="urls" name="url"/>
+ <xml-element java-attribute="paramsList" name="param"/>
+ </java-attributes>
+ </java-type>
+ <java-type name="Application" xml-accessor-type="NONE"/>
+ <java-type name="Param" xml-accessor-type="NONE">
+ <java-attributes>
+ <xml-element java-attribute="name"/>
+ <xml-element java-attribute="value"/>
+ </java-attributes>
+ </java-type>
+ </java-types>
- </xml-bindings>
++</xml-bindings>
[19/25] knox git commit: Merge branch 'master' into
KNOX-998-Package_Restructuring
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-server/src/main/java/org/apache/knox/gateway/GatewayMessages.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/GatewayMessages.java
index cd2c0eb,0000000..61c5303
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/GatewayMessages.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/GatewayMessages.java
@@@ -1,521 -1,0 +1,553 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway;
+
+import org.apache.commons.cli.ParseException;
+import org.apache.knox.gateway.i18n.messages.Message;
+import org.apache.knox.gateway.i18n.messages.MessageLevel;
+import org.apache.knox.gateway.i18n.messages.Messages;
+import org.apache.knox.gateway.i18n.messages.StackTrace;
+import org.apache.knox.gateway.services.security.KeystoreServiceException;
+
+import java.io.File;
+import java.net.URI;
+import java.util.Date;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ *
+ */
+@Messages(logger="org.apache.knox.gateway")
+public interface GatewayMessages {
+
+ @Message( level = MessageLevel.FATAL, text = "Failed to parse command line: {0}" )
+ void failedToParseCommandLine( @StackTrace( level = MessageLevel.DEBUG ) ParseException e );
+
+ @Message( level = MessageLevel.INFO, text = "Starting gateway..." )
+ void startingGateway();
+
+ @Message( level = MessageLevel.FATAL, text = "Failed to start gateway: {0}" )
+ void failedToStartGateway( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.INFO, text = "Started gateway on port {0}." )
+ void startedGateway( int port );
+
+ @Message( level = MessageLevel.INFO, text = "Stopping gateway..." )
+ void stoppingGateway();
+
+ @Message( level = MessageLevel.INFO, text = "Stopped gateway." )
+ void stoppedGateway();
+
+ @Message( level = MessageLevel.INFO, text = "Loading configuration resource {0}" )
+ void loadingConfigurationResource( String res );
+
+ @Message( level = MessageLevel.INFO, text = "Loading configuration file {0}" )
+ void loadingConfigurationFile( String file );
+
+ @Message( level = MessageLevel.WARN, text = "Failed to load configuration file {0}: {1}" )
+ void failedToLoadConfig( String path, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.INFO, text = "Using {1} as GATEWAY_HOME via {0}." )
+ void settingGatewayHomeDir( String location, String home );
+
+ @Message( level = MessageLevel.INFO, text = "Loading topologies from directory: {0}" )
+ void loadingTopologiesFromDirectory( String topologiesDir );
+
+ @Message( level = MessageLevel.DEBUG, text = "Loading topology file: {0}" )
+ void loadingTopologyFile( String fileName );
+
+ @Message( level = MessageLevel.INFO, text = "Monitoring topologies in directory: {0}" )
+ void monitoringTopologyChangesInDirectory( String topologiesDir );
+
+ @Message( level = MessageLevel.INFO, text = "Deploying topology {0} to {1}" )
+ void deployingTopology( String clusterName, String warDirName );
+
+ @Message( level = MessageLevel.DEBUG, text = "Deployed topology {0}." )
+ void deployedTopology( String clusterName );
+
+ @Message( level = MessageLevel.INFO, text = "Loading topology {0} from {1}" )
+ void redeployingTopology( String clusterName, String warDirName );
+
+ @Message( level = MessageLevel.DEBUG, text = "Redeployed topology {0}." )
+ void redeployedTopology( String clusterName );
+
+ @Message( level = MessageLevel.INFO, text = "Activating topology {0}" )
+ void activatingTopology( String name );
+
+ @Message( level = MessageLevel.INFO, text = "Activating topology {0} archive {1}" )
+ void activatingTopologyArchive( String topology, String archive );
+
+ @Message( level = MessageLevel.INFO, text = "Deactivating topology {0}" )
+ void deactivatingTopology( String name );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to deploy topology {0}: {1}" )
+ void failedToDeployTopology( String name, @StackTrace(level=MessageLevel.DEBUG) Throwable e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to redeploy topology {0}" )
+ void failedToRedeployTopology( String name );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to redeploy topology {0}: {1}" )
+ void failedToRedeployTopology( String name, @StackTrace(level=MessageLevel.DEBUG) Throwable e );
+
+ @Message(level = MessageLevel.ERROR, text = "Failed to load topology {0}: Topology configuration is invalid!")
+ void failedToLoadTopology(String fileName);
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to redeploy topologies: {0}" )
+ void failedToRedeployTopologies( @StackTrace(level=MessageLevel.DEBUG) Throwable e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to undeploy topology {0}: {1}" )
+ void failedToUndeployTopology( String name, @StackTrace(level=MessageLevel.DEBUG) Exception e );
+
+ @Message( level = MessageLevel.INFO, text = "Deleting topology {0}" )
+ void deletingTopology( String topologyName );
+
+ @Message( level = MessageLevel.INFO, text = "Deleting deployed topology {0}" )
+ void deletingDeployment( String warDirName );
+
+ @Message( level = MessageLevel.DEBUG, text = "Purge backups of deployed topology {0}" )
+ void cleanupDeployments( String topologyName );
+
+ @Message( level = MessageLevel.INFO, text = "Deleting backup deployed topology {0}" )
+ void cleanupDeployment( String absolutePath );
+
+ @Message( level = MessageLevel.INFO, text = "Creating gateway home directory: {0}" )
+ void creatingGatewayHomeDir( File homeDir );
+
+ @Message( level = MessageLevel.INFO, text = "Creating gateway deployment directory: {0}" )
+ void creatingGatewayDeploymentDir( File topologiesDir );
+
+ @Message( level = MessageLevel.INFO, text = "Creating default gateway configuration file: {0}" )
+ void creatingDefaultConfigFile( File defaultConfigFile );
+
+ @Message( level = MessageLevel.INFO, text = "Creating sample topology file: {0}" )
+ void creatingDefaultTopologyFile( File defaultConfigFile );
+
+ @Message( level = MessageLevel.WARN, text = "Ignoring service deployment contributor with invalid null name: {0}" )
+ void ignoringServiceContributorWithMissingName( String className );
+
+ @Message( level = MessageLevel.WARN, text = "Ignoring service deployment contributor with invalid null role: {0}" )
+ void ignoringServiceContributorWithMissingRole( String className );
+
+ @Message( level = MessageLevel.WARN, text = "Ignoring service deployment contributor with invalid null version: {0}" )
+ void ignoringServiceContributorWithMissingVersion( String className );
+
+ @Message( level = MessageLevel.WARN, text = "Ignoring provider deployment contributor with invalid null name: {0}" )
+ void ignoringProviderContributorWithMissingName( String className );
+
+ @Message( level = MessageLevel.WARN, text = "Ignoring provider deployment contributor with invalid null role: {0}" )
+ void ignoringProviderContributorWithMissingRole( String className );
+
+ @Message( level = MessageLevel.INFO, text = "Loaded logging configuration: {0}" )
+ void loadedLoggingConfig( String fileName );
+
+ @Message( level = MessageLevel.WARN, text = "Failed to load logging configuration: {0}" )
+ void failedToLoadLoggingConfig( String fileName );
+
+ @Message( level = MessageLevel.INFO, text = "Creating credential store for the gateway instance." )
+ void creatingCredentialStoreForGateway();
+
+ @Message( level = MessageLevel.INFO, text = "Credential store for the gateway instance found - no need to create one." )
+ void credentialStoreForGatewayFoundNotCreating();
+
+ @Message( level = MessageLevel.INFO, text = "Creating keystore for the gateway instance." )
+ void creatingKeyStoreForGateway();
+
+ @Message( level = MessageLevel.INFO, text = "Keystore for the gateway instance found - no need to create one." )
+ void keyStoreForGatewayFoundNotCreating();
+
+ @Message( level = MessageLevel.INFO, text = "Creating credential store for the cluster: {0}" )
+ void creatingCredentialStoreForCluster(String clusterName);
+
+ @Message( level = MessageLevel.INFO, text = "Credential store found for the cluster: {0} - no need to create one." )
+ void credentialStoreForClusterFoundNotCreating(String clusterName);
+
+ @Message( level = MessageLevel.DEBUG, text = "Received request: {0} {1}" )
+ void receivedRequest( String method, String uri );
+
+ @Message( level = MessageLevel.DEBUG, text = "Dispatch request: {0} {1}" )
+ void dispatchRequest( String method, URI uri );
+
+ @Message( level = MessageLevel.WARN, text = "Connection exception dispatching request: {0} {1}" )
+ void dispatchServiceConnectionException( URI uri, @StackTrace(level=MessageLevel.WARN) Exception e );
+
+ @Message( level = MessageLevel.DEBUG, text = "Signature verified: {0}" )
+ void signatureVerified( boolean verified );
+
+ @Message( level = MessageLevel.DEBUG, text = "Apache Knox Gateway {0} ({1})" )
+ void gatewayVersionMessage( String version, String hash );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to inject service {0}: {1}" )
+ void failedToInjectService( String serviceName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to finalize contribution: {0}" )
+ void failedToFinalizeContribution( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to contribute service [role={1}, name={0}]: {2}" )
+ void failedToContributeService( String name, String role, @StackTrace( level = MessageLevel.ERROR ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to contribute provider [role={1}, name={0}]: {2}" )
+ void failedToContributeProvider( String name, String role, @StackTrace( level = MessageLevel.ERROR ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to initialize contribution: {0}" )
+ void failedToInitializeContribution( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to initialize servlet instance: {0}" )
+ void failedToInitializeServletInstace( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Gateway processing failed: {0}" )
+ void failedToExecuteFilter( @StackTrace( level = MessageLevel.INFO ) Throwable t );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to load topology {0}: {1}")
+ void failedToLoadTopology( String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to load topology {0}, retrying after {1}ms: {2}")
+ void failedToLoadTopologyRetrying( String friendlyURI, String delay, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to handle topology events: {0}" )
+ void failedToHandleTopologyEvents( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to reload topologies: {0}" )
+ void failedToReloadTopologies( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.FATAL, text = "Unsupported encoding: {0}" )
+ void unsupportedEncoding( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to persist master secret: {0}" )
+ void failedToPersistMasterSecret( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to encrypt master secret: {0}" )
+ void failedToEncryptMasterSecret( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to initialize master service from persistent master {0}: {1}" )
+ void failedToInitializeFromPersistentMaster( String masterFileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to encode passphrase: {0}" )
+ void failedToEncodePassphrase( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to verify signature: {0}")
+ void failedToVerifySignature( @StackTrace(level=MessageLevel.DEBUG) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to sign the data: {0}")
+ void failedToSignData( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to decrypt password for cluster {0}: {1}" )
+ void failedToDecryptPasswordForCluster( String clusterName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to encrypt password for cluster {0}: {1}")
+ void failedToEncryptPasswordForCluster( String clusterName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to create keystore [filename={0}, type={1}]: {2}" )
+ void failedToCreateKeystore( String fileName, String keyStoreType, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to load keystore [filename={0}, type={1}]: {2}" )
+ void failedToLoadKeystore( String fileName, String keyStoreType, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to add key for cluster {0}: {1}" )
+ void failedToAddKeyForCluster( String clusterName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to add credential for cluster {0}: {1}" )
+ void failedToAddCredentialForCluster( String clusterName, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to get key for Gateway {0}: {1}" )
+ void failedToGetKeyForGateway( String alias, @StackTrace( level=MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to get credential for cluster {0}: {1}" )
+ void failedToGetCredentialForCluster( String clusterName, @StackTrace(level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to get key for cluster {0}: {1}" )
+ void failedToGetKeyForCluster( String clusterName, @StackTrace(level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to add self signed certificate for Gateway {0}: {1}" )
+ void failedToAddSeflSignedCertForGateway( String alias, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to generate secret key from password: {0}" )
+ void failedToGenerateKeyFromPassword( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to establish connection to {0}: {1}" )
+ void failedToEstablishConnectionToUrl( String url, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to interpret property \"{0}\": {1}")
+ void failedToInterpretProperty( String property, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to instantiate the internal gateway services." )
+ void failedToInstantiateGatewayServices();
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to serialize map to Json string {0}: {1}" )
+ void failedToSerializeMapToJSON( Map<String, Object> map, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to get map from Json string {0}: {1}" )
+ void failedToGetMapFromJsonString( String json, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.DEBUG, text = "Successful Knox->Hadoop SPNegotiation authentication for URL: {0}" )
+ void successfulSPNegoAuthn(String uri);
+
+ @Message( level = MessageLevel.ERROR, text = "Failed Knox->Hadoop SPNegotiation authentication for URL: {0}" )
+ void failedSPNegoAuthn(String uri);
+
+ @Message( level = MessageLevel.DEBUG, text = "Dispatch response status: {0}" )
+ void dispatchResponseStatusCode(int statusCode);
+
+ @Message( level = MessageLevel.DEBUG, text = "Dispatch response status: {0}, Location: {1}" )
+ void dispatchResponseCreatedStatusCode( int statusCode, String location );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to decrypt cipher text for cluster {0}: due to inability to retrieve the password." )
+ void failedToDecryptCipherForClusterNullPassword(String clusterName);
+
+ @Message( level = MessageLevel.DEBUG, text = "Gateway services have not been initialized." )
+ void gatewayServicesNotInitialized();
+
+ @Message( level = MessageLevel.INFO, text = "The Gateway SSL certificate is issued to hostname: {0}." )
+ void certificateHostNameForGateway(String cn);
+
+ @Message( level = MessageLevel.INFO, text = "The Gateway SSL certificate is valid between: {0} and {1}." )
+ void certificateValidityPeriod(Date notBefore, Date notAfter);
+
+ @Message( level = MessageLevel.ERROR, text = "Unable to retrieve certificate for Gateway: {0}." )
+ void unableToRetrieveCertificateForGateway(Exception e);
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to generate alias for cluster: {0} {1}." )
+ void failedToGenerateAliasForCluster(String clusterName, KeystoreServiceException e);
+
+ @Message( level = MessageLevel.DEBUG, text = "Key passphrase not found in credential store - using master secret." )
+ void assumingKeyPassphraseIsMaster();
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to remove alias for cluster: {0} {1}." )
+ void failedToRemoveCredentialForCluster(String clusterName, Exception e);
+
+ @Message( level = MessageLevel.WARN, text = "Failed to match path {0}" )
+ void failedToMatchPath( String path );
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to get system ldap connection: {0}" )
+ void failedToGetSystemLdapConnection( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.WARN, text = "Value not found for cluster:{0}, alias: {1}" )
+ void aliasValueNotFound( String cluster, String alias );
+
+ @Message( level = MessageLevel.INFO, text = "Computed userDn: {0} using dnTemplate for principal: {1}" )
+ void computedUserDn(String userDn, String principal);
+
+ @Message( level = MessageLevel.DEBUG, text = "Searching from {0} where {1} scope {2}" )
+ void searchBaseFilterScope( String searchBase, String searchFilter, String searchScope );
+
+ @Message( level = MessageLevel.INFO, text = "Computed userDn: {0} using ldapSearch for principal: {1}" )
+ void searchedAndFoundUserDn(String userDn, String principal);
+
+ @Message( level = MessageLevel.INFO, text = "Computed roles/groups: {0} for principal: {1}" )
+ void lookedUpUserRoles(Set<String> roleNames, String userName);
+
+ @Message( level = MessageLevel.DEBUG, text = "Initialize provider: {1}/{0}" )
+ void initializeProvider( String name, String role );
+
+ @Message( level = MessageLevel.DEBUG, text = "Initialize service: {1}/{0}" )
+ void initializeService( String name, String role );
+
+ @Message( level = MessageLevel.DEBUG, text = "Contribute provider: {1}/{0}" )
+ void contributeProvider( String name, String role );
+
+ @Message( level = MessageLevel.DEBUG, text = "Contribute service: {1}/{0}" )
+ void contributeService( String name, String role );
+
+ @Message( level = MessageLevel.DEBUG, text = "Finalize provider: {1}/{0}" )
+ void finalizeProvider( String name, String role );
+
+ @Message( level = MessageLevel.DEBUG, text = "Finalize service: {1}/{0}" )
+ void finalizeService( String name, String role );
+
+ @Message( level = MessageLevel.DEBUG, text = "Configured services directory is {0}" )
+ void usingServicesDirectory(String path);
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to unmarshall service definition file {0} file : {1}" )
+ void failedToLoadServiceDefinition(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to find service definition file {0} file : {1}" )
+ void failedToFindServiceDefinitionFile(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to find rewrite file {0} file : {1}" )
+ void failedToFindRewriteFile(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+ @Message( level = MessageLevel.ERROR, text = "Failed to unmarshall rewrite file {0} file : {1}" )
+ void failedToLoadRewriteFile(String fileName, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+ @Message( level = MessageLevel.DEBUG, text = "No rewrite file found in service directory {0}" )
+ void noRewriteFileFound(String path);
+
+ @Message( level = MessageLevel.DEBUG, text = "Added Service definition name: {0}, role : {1}, version : {2}" )
+ void addedServiceDefinition(String serviceName, String serviceRole, String version);
+
+ @Message( level = MessageLevel.INFO, text = "System Property: {0}={1}" )
+ void logSysProp( String name, String property );
+
+ @Message( level = MessageLevel.ERROR, text = "Unable to get password: {0}" )
+ void unableToGetPassword(@StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+ @Message( level = MessageLevel.DEBUG, text = "Initialize application: {0}" )
+ void initializeApplication( String name );
+
+ @Message( level = MessageLevel.DEBUG, text = "Contribute application: {0}" )
+ void contributeApplication( String name );
+
+ @Message( level = MessageLevel.DEBUG, text = "Finalize application: {0}" )
+ void finalizeApplication( String name );
+
+ @Message( level = MessageLevel.INFO, text = "Default topology {0} at {1}" )
+ void defaultTopologySetup( String defaultTopologyName, String redirectContext );
+
+ @Message( level = MessageLevel.DEBUG, text = "Default topology forward from {0} to {1}" )
+ void defaultTopologyForward( String oldTarget, String newTarget );
+
+ @Message( level = MessageLevel.ERROR, text = "Unable to setup PagedResults" )
+ void unableToSetupPagedResults();
+
+ @Message( level = MessageLevel.INFO, text = "Ignoring PartialResultException" )
+ void ignoringPartialResultException();
+
+ @Message( level = MessageLevel.WARN, text = "Only retrieved first {0} groups due to SizeLimitExceededException." )
+ void sizeLimitExceededOnlyRetrieved(int numResults);
+
+ @Message( level = MessageLevel.DEBUG, text = "Failed to parse path into Template: {0} : {1}" )
+ void failedToParsePath( String path, @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message( level = MessageLevel.DEBUG, text = "Failed to initialize metrics reporter {0} : {1}" )
+ void failedToInitializeReporter( String name, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+ @Message( level = MessageLevel.DEBUG, text = "Failed to start metrics reporter {0} : {1}" )
+ void failedToStartReporter( String name, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+ @Message( level = MessageLevel.DEBUG, text = "Failed to stop metrics reporter {0} : {1}" )
+ void failedToStopReporter( String name, @StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+ @Message( level = MessageLevel.INFO, text = "Cookie scoping feature enabled: {0}" )
+ void cookieScopingFeatureEnabled( boolean enabled );
+
+ /**
+ * Log whether Topology port mapping feature is enabled/disabled.
+ *
+ * @param enabled
+ */
+ @Message(level = MessageLevel.INFO,
+ text = "Topology port mapping feature enabled: {0}")
+ void gatewayTopologyPortMappingEnabled(final boolean enabled);
+
+ /**
+ * @param topology
+ * @param port
+ */
+ @Message(level = MessageLevel.DEBUG,
+ text = "Creating a connector for topology {0} listening on port {1}.")
+ void createJettyConnector(final String topology, final int port);
+
+ /**
+ * @param topology
+ */
+ @Message(level = MessageLevel.DEBUG,
+ text = "Creating a handler for topology {0}.")
+ void createJettyHandler(final String topology);
+
+ /**
+ * @param oldTarget
+ * @param newTarget
+ */
+ @Message(level = MessageLevel.INFO,
+ text = "Updating request context from {0} to {1}")
+ void topologyPortMappingAddContext(final String oldTarget,
+ final String newTarget);
+
+ /**
+ * @param oldTarget
+ * @param newTarget
+ */
+ @Message(level = MessageLevel.DEBUG,
+ text = "Updating request target from {0} to {1}")
+ void topologyPortMappingUpdateRequest(final String oldTarget,
+ final String newTarget);
+
+ /**
+ * Messages for Topology Port Mapping
+ *
+ * @param port
+ * @param topology
+ */
+ @Message(level = MessageLevel.ERROR,
+ text = "Port {0} configured for Topology - {1} is already in use.")
+ void portAlreadyInUse(final int port, final String topology);
+
+ /**
+ * Messages for Topology Port Mapping
+ *
+ * @param port
+ */
+ @Message(level = MessageLevel.ERROR,
+ text = "Port {0} is already in use.")
+ void portAlreadyInUse(final int port);
+
+ /**
+ * Log topology and port
+ *
+ * @param topology
+ * @param port
+ */
+ @Message(level = MessageLevel.INFO,
+ text = "Started gateway, topology \"{0}\" listening on port \"{1}\".")
+ void startedGateway(final String topology, final int port);
+
+ @Message(level = MessageLevel.ERROR,
+ text =
+ " Could not find topology \"{0}\" mapped to port \"{1}\" configured in gateway-config.xml. "
+ + "This invalid topology mapping will be ignored by the gateway. "
+ + "Gateway restart will be required if in the future \"{0}\" topology is added.")
+ void topologyPortMappingCannotFindTopology(final String topology, final int port);
+
+
++ @Message( level = MessageLevel.INFO, text = "Monitoring simple descriptors in directory: {0}" )
++ void monitoringDescriptorChangesInDirectory(String descriptorsDir);
++
++
++ @Message( level = MessageLevel.INFO, text = "Monitoring shared provider configurations in directory: {0}" )
++ void monitoringProviderConfigChangesInDirectory(String sharedProviderDir);
++
++ @Message( level = MessageLevel.INFO, text = "Prevented deletion of shared provider configuration because there are referencing descriptors: {0}" )
++ void preventedDeletionOfSharedProviderConfiguration(String providerConfigurationPath);
++
++ @Message( level = MessageLevel.INFO, text = "Generated topology {0} because the associated descriptor {1} changed." )
++ void generatedTopologyForDescriptorChange(String topologyName, String descriptorName);
++
+ @Message( level = MessageLevel.ERROR, text = "An error occurred while processing {0} : {1}" )
+ void simpleDescriptorHandlingError(final String simpleDesc,
- @StackTrace( level = MessageLevel.DEBUG ) Exception e );
++ @StackTrace(level = MessageLevel.DEBUG) Exception e);
++
++ @Message(level = MessageLevel.DEBUG, text = "Successfully wrote configuration: {0}")
++ void wroteConfigurationFile(final String filePath);
++
++ @Message(level = MessageLevel.ERROR, text = "Failed to write configuration: {0}")
++ void failedToWriteConfigurationFile(final String filePath,
++ @StackTrace(level = MessageLevel.DEBUG) Exception e );
++
++ @Message( level = MessageLevel.INFO, text = "Deleting topology {0} because the associated descriptor {1} was deleted." )
++ void deletingTopologyForDescriptorDeletion(String topologyName, String descriptorName);
++
++ @Message( level = MessageLevel.INFO, text = "Deleting descriptor {0} because the associated topology {1} was deleted." )
++ void deletingDescriptorForTopologyDeletion(String descriptorName, String topologyName);
++
++ @Message( level = MessageLevel.DEBUG, text = "Added descriptor {0} reference to provider configuration {1}." )
++ void addedProviderConfigurationReference(String descriptorName, String providerConfigurationName);
++
++ @Message( level = MessageLevel.DEBUG, text = "Removed descriptor {0} reference to provider configuration {1}." )
++ void removedProviderConfigurationReference(String descriptorName, String providerConfigurationName);
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-server/src/main/java/org/apache/knox/gateway/config/impl/GatewayConfigImpl.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/config/impl/GatewayConfigImpl.java
index dfe34d4,0000000..c7b8df5
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/config/impl/GatewayConfigImpl.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/config/impl/GatewayConfigImpl.java
@@@ -1,925 -1,0 +1,926 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.config.impl;
+
++import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.knox.gateway.GatewayMessages;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.joda.time.Period;
+import org.joda.time.format.PeriodFormatter;
+import org.joda.time.format.PeriodFormatterBuilder;
+
+import java.io.File;
+import java.net.InetSocketAddress;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * The configuration for the Gateway.
+ *
+ * The Gateway configuration variables are described in gateway-default.xml
+ *
+ * The Gateway specific configuration is split into two layers:
+ *
+ * 1. gateway-default.xml - All the configuration variables that the
+ * Gateway needs. These are the defaults that ship with the app
+ * and should only be changed by the app developers.
+ *
+ * 2. gateway-site.xml - The (possibly empty) configuration that the
+ * system administrator can set variables for their Hadoop cluster.
+ *
+ * To find the gateway configuration files the following process is used.
+ * First, if the GATEWAY_HOME system property contains a valid directory name,
+ * an attempt will be made to read the configuration files from that directory.
+ * Second, if the GATEWAY_HOME environment variable contains a valid directory name,
+ * an attempt will be made to read the configuration files from that directory.
+ * Third, an attempt will be made to load the configuration files from the directory
+ * specified via the "user.dir" system property.
+ * Fourth, an attempt will be made to load the configuration files from the classpath.
+ * Last, defaults will be used for all values will be used.
+ *
+ * If GATEWAY_HOME isn't set via either the system property or environment variable then
+ * a value for this will be defaulted. The default selected will be the directory that
+ * contained the last loaded configuration file that was not contained in a JAR. If
+ * no such configuration file is loaded the value of the "user.dir" system property will be used
+ * as the value of GATEWAY_HOME. This is important to consider for any relative file names as they
+ * will be resolved relative to the value of GATEWAY_HOME. One such relative value is the
+ * name of the directory containing cluster topologies. This value default to "clusters".
+ */
+public class GatewayConfigImpl extends Configuration implements GatewayConfig {
+
+ private static final String GATEWAY_DEFAULT_TOPOLOGY_NAME_PARAM = "default.app.topology.name";
+ private static final String GATEWAY_DEFAULT_TOPOLOGY_NAME = null;
+
+ private static final GatewayMessages log = MessagesFactory.get( GatewayMessages.class );
+
+ private static final String GATEWAY_CONFIG_DIR_PREFIX = "conf";
+
+ private static final String GATEWAY_CONFIG_FILE_PREFIX = "gateway";
+
+ private static final String DEFAULT_STACKS_SERVICES_DIR = "services";
+
+ private static final String DEFAULT_APPLICATIONS_DIR = "applications";
+
+ public static final String[] GATEWAY_CONFIG_FILENAMES = {
+ GATEWAY_CONFIG_DIR_PREFIX + "/" + GATEWAY_CONFIG_FILE_PREFIX + "-default.xml",
+ GATEWAY_CONFIG_DIR_PREFIX + "/" + GATEWAY_CONFIG_FILE_PREFIX + "-site.xml"
+ };
+
+// private static final String[] HADOOP_CONF_FILENAMES = {
+// "core-default.xml",
+// "core-site.xml"
+//// "hdfs-default.xml",
+//// "hdfs-site.xml",
+//// "mapred-default.xml",
+//// "mapred-site.xml"
+// };
+
+// private static final String[] HADOOP_PREFIX_VARS = {
+// "HADOOP_PREFIX",
+// "HADOOP_HOME"
+// };
+
+ public static final String HTTP_HOST = GATEWAY_CONFIG_FILE_PREFIX + ".host";
+ public static final String HTTP_PORT = GATEWAY_CONFIG_FILE_PREFIX + ".port";
+ public static final String HTTP_PATH = GATEWAY_CONFIG_FILE_PREFIX + ".path";
+ public static final String DEPLOYMENT_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".deployment.dir";
+ public static final String SECURITY_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".security.dir";
+ public static final String DATA_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".data.dir";
+ public static final String STACKS_SERVICES_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".services.dir";
+ public static final String GLOBAL_RULES_SERVICES = GATEWAY_CONFIG_FILE_PREFIX + ".global.rules.services";
+ public static final String APPLICATIONS_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".applications.dir";
+ public static final String HADOOP_CONF_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".hadoop.conf.dir";
+ public static final String FRONTEND_URL = GATEWAY_CONFIG_FILE_PREFIX + ".frontend.url";
+ private static final String TRUST_ALL_CERTS = GATEWAY_CONFIG_FILE_PREFIX + ".trust.all.certs";
+ private static final String CLIENT_AUTH_NEEDED = GATEWAY_CONFIG_FILE_PREFIX + ".client.auth.needed";
+ private static final String CLIENT_AUTH_WANTED = GATEWAY_CONFIG_FILE_PREFIX + ".client.auth.wanted";
+ private static final String TRUSTSTORE_PATH = GATEWAY_CONFIG_FILE_PREFIX + ".truststore.path";
+ private static final String TRUSTSTORE_TYPE = GATEWAY_CONFIG_FILE_PREFIX + ".truststore.type";
+ private static final String KEYSTORE_TYPE = GATEWAY_CONFIG_FILE_PREFIX + ".keystore.type";
+ private static final String XFORWARDED_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".xforwarded.enabled";
+ private static final String EPHEMERAL_DH_KEY_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".jdk.tls.ephemeralDHKeySize";
+ private static final String HTTP_CLIENT_MAX_CONNECTION = GATEWAY_CONFIG_FILE_PREFIX + ".httpclient.maxConnections";
+ private static final String HTTP_CLIENT_CONNECTION_TIMEOUT = GATEWAY_CONFIG_FILE_PREFIX + ".httpclient.connectionTimeout";
+ private static final String HTTP_CLIENT_SOCKET_TIMEOUT = GATEWAY_CONFIG_FILE_PREFIX + ".httpclient.socketTimeout";
+ private static final String THREAD_POOL_MAX = GATEWAY_CONFIG_FILE_PREFIX + ".threadpool.max";
+ public static final String HTTP_SERVER_REQUEST_BUFFER = GATEWAY_CONFIG_FILE_PREFIX + ".httpserver.requestBuffer";
+ public static final String HTTP_SERVER_REQUEST_HEADER_BUFFER = GATEWAY_CONFIG_FILE_PREFIX + ".httpserver.requestHeaderBuffer";
+ public static final String HTTP_SERVER_RESPONSE_BUFFER = GATEWAY_CONFIG_FILE_PREFIX + ".httpserver.responseBuffer";
+ public static final String HTTP_SERVER_RESPONSE_HEADER_BUFFER = GATEWAY_CONFIG_FILE_PREFIX + ".httpserver.responseHeaderBuffer";
+ public static final String DEPLOYMENTS_BACKUP_VERSION_LIMIT = GATEWAY_CONFIG_FILE_PREFIX + ".deployment.backup.versionLimit";
+ public static final String DEPLOYMENTS_BACKUP_AGE_LIMIT = GATEWAY_CONFIG_FILE_PREFIX + ".deployment.backup.ageLimit";
+ public static final String METRICS_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".metrics.enabled";
+ public static final String JMX_METRICS_REPORTING_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".jmx.metrics.reporting.enabled";
+ public static final String GRAPHITE_METRICS_REPORTING_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".graphite.metrics.reporting.enabled";
+ public static final String GRAPHITE_METRICS_REPORTING_HOST = GATEWAY_CONFIG_FILE_PREFIX + ".graphite.metrics.reporting.host";
+ public static final String GRAPHITE_METRICS_REPORTING_PORT = GATEWAY_CONFIG_FILE_PREFIX + ".graphite.metrics.reporting.port";
+ public static final String GRAPHITE_METRICS_REPORTING_FREQUENCY = GATEWAY_CONFIG_FILE_PREFIX + ".graphite.metrics.reporting.frequency";
+ public static final String GATEWAY_IDLE_TIMEOUT = GATEWAY_CONFIG_FILE_PREFIX + ".idle.timeout";
+ public static final String REMOTE_IP_HEADER_NAME = GATEWAY_CONFIG_FILE_PREFIX + ".remote.ip.header.name";
+
+ /* @since 0.10 Websocket config variables */
+ public static final String WEBSOCKET_FEATURE_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.feature.enabled";
+ public static final String WEBSOCKET_MAX_TEXT_MESSAGE_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.max.text.size";
+ public static final String WEBSOCKET_MAX_BINARY_MESSAGE_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.max.binary.size";
+ public static final String WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.max.text.buffer.size";
+ public static final String WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.max.binary.buffer.size";
+ public static final String WEBSOCKET_INPUT_BUFFER_SIZE = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.input.buffer.size";
+ public static final String WEBSOCKET_ASYNC_WRITE_TIMEOUT = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.async.write.timeout";
+ public static final String WEBSOCKET_IDLE_TIMEOUT = GATEWAY_CONFIG_FILE_PREFIX + ".websocket.idle.timeout";
+
+ /**
+ * Properties for for gateway port mapping feature
+ */
+ public static final String GATEWAY_PORT_MAPPING_PREFIX = GATEWAY_CONFIG_FILE_PREFIX + ".port.mapping.";
+ public static final String GATEWAY_PORT_MAPPING_REGEX = GATEWAY_CONFIG_FILE_PREFIX + "\\.port\\.mapping\\..*";
+ public static final String GATEWAY_PORT_MAPPING_ENABLED = GATEWAY_PORT_MAPPING_PREFIX + "enabled";
+
+ /**
+ * Comma seperated list of MIME Types to be compressed by Knox on the way out.
+ *
+ * @since 0.12
+ */
+ public static final String MIME_TYPES_TO_COMPRESS = GATEWAY_CONFIG_FILE_PREFIX
+ + ".gzip.compress.mime.types";
+
+ // These config property names are not inline with the convention of using the
+ // GATEWAY_CONFIG_FILE_PREFIX as is done by those above. These are left for
+ // backward compatibility.
+ // LET'S NOT CONTINUE THIS PATTERN BUT LEAVE THEM FOR NOW.
+ private static final String SSL_ENABLED = "ssl.enabled";
+ private static final String SSL_EXCLUDE_PROTOCOLS = "ssl.exclude.protocols";
+ private static final String SSL_INCLUDE_CIPHERS = "ssl.include.ciphers";
+ private static final String SSL_EXCLUDE_CIPHERS = "ssl.exclude.ciphers";
+ // END BACKWARD COMPATIBLE BLOCK
+
+ public static final String DEFAULT_HTTP_PORT = "8888";
+ public static final String DEFAULT_HTTP_PATH = "gateway";
+ public static final String DEFAULT_DEPLOYMENT_DIR = "deployments";
+ public static final String DEFAULT_SECURITY_DIR = "security";
+ public static final String DEFAULT_DATA_DIR = "data";
+
+ /* Websocket defaults */
+ public static final boolean DEFAULT_WEBSOCKET_FEATURE_ENABLED = false;
+ public static final int DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE = Integer.MAX_VALUE;;
+ public static final int DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_SIZE = Integer.MAX_VALUE;;
+ public static final int DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE = 32768;
+ public static final int DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE = 32768;
+ public static final int DEFAULT_WEBSOCKET_INPUT_BUFFER_SIZE = 4096;
+ public static final int DEFAULT_WEBSOCKET_ASYNC_WRITE_TIMEOUT = 60000;
+ public static final int DEFAULT_WEBSOCKET_IDLE_TIMEOUT = 300000;
+
+ public static final boolean DEFAULT_GATEWAY_PORT_MAPPING_ENABLED = true;
+
+ /**
+ * Default list of MIME Type to be compressed.
+ * @since 0.12
+ */
+ public static final String DEFAULT_MIME_TYPES_TO_COMPRESS = "text/html, text/plain, text/xml, text/css, "
+ + "application/javascript, application/x-javascript, text/javascript";
+
+ public static final String COOKIE_SCOPING_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".scope.cookies.feature.enabled";
+ public static final boolean DEFAULT_COOKIE_SCOPING_FEATURE_ENABLED = false;
+ private static final String CRYPTO_ALGORITHM = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.algorithm";
+ private static final String CRYPTO_PBE_ALGORITHM = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.pbe.algorithm";
+ private static final String CRYPTO_TRANSFORMATION = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.transformation";
+ private static final String CRYPTO_SALTSIZE = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.salt.size";
+ private static final String CRYPTO_ITERATION_COUNT = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.iteration.count";
+ private static final String CRYPTO_KEY_LENGTH = GATEWAY_CONFIG_FILE_PREFIX + ".crypto.key.length";
+ public static final String SERVER_HEADER_ENABLED = GATEWAY_CONFIG_FILE_PREFIX + ".server.header.enabled";
+
+ private static List<String> DEFAULT_GLOBAL_RULES_SERVICES;
+
+
+ public GatewayConfigImpl() {
+ init();
+ }
+
+ private String getVar( String variableName, String defaultValue ) {
+ String value = get( variableName );
+ if( value == null ) {
+ value = System.getProperty( variableName );
+ }
+ if( value == null ) {
+ value = System.getenv( variableName );
+ }
+ if( value == null ) {
+ value = defaultValue;
+ }
+ return value;
+ }
+
+ private String getGatewayHomeDir() {
+ String home = get(
+ GATEWAY_HOME_VAR,
+ System.getProperty(
+ GATEWAY_HOME_VAR,
+ System.getenv( GATEWAY_HOME_VAR ) ) );
+ return home;
+ }
+
+ private void setGatewayHomeDir( String dir ) {
+ set( GATEWAY_HOME_VAR, dir );
+ }
+
+ @Override
+ public String getGatewayConfDir() {
+ String value = getVar( GATEWAY_CONF_HOME_VAR, getGatewayHomeDir() + File.separator + "conf" );
- return value;
++ return FilenameUtils.normalize(value);
+ }
+
+ @Override
+ public String getGatewayDataDir() {
+ String systemValue =
+ System.getProperty(GATEWAY_DATA_HOME_VAR, System.getenv(GATEWAY_DATA_HOME_VAR));
+ String dataDir = null;
+ if (systemValue != null) {
+ dataDir = systemValue;
+ } else {
+ dataDir = get(DATA_DIR, getGatewayHomeDir() + File.separator + DEFAULT_DATA_DIR);
+ }
+ return dataDir;
+ }
+
+ @Override
+ public String getGatewayServicesDir() {
+ return get(STACKS_SERVICES_DIR, getGatewayDataDir() + File.separator + DEFAULT_STACKS_SERVICES_DIR);
+ }
+
+ @Override
+ public String getGatewayApplicationsDir() {
+ return get(APPLICATIONS_DIR, getGatewayDataDir() + File.separator + DEFAULT_APPLICATIONS_DIR);
+ }
+
+ @Override
+ public String getHadoopConfDir() {
+ return get( HADOOP_CONF_DIR );
+ }
+
+ private void init() {
+ // Load environment variables.
+ for( Map.Entry<String, String> e : System.getenv().entrySet() ) {
+ set( "env." + e.getKey(), e.getValue() );
+ }
+ // Load system properties.
+ for( Map.Entry<Object, Object> p : System.getProperties().entrySet() ) {
+ set( "sys." + p.getKey().toString(), p.getValue().toString() );
+ }
+
+ URL lastFileUrl = null;
+ for( String fileName : GATEWAY_CONFIG_FILENAMES ) {
+ lastFileUrl = loadConfig( fileName, lastFileUrl );
+ }
+ //set default services list
+ setDefaultGlobalRulesServices();
+
+ initGatewayHomeDir( lastFileUrl );
+
+ // log whether the scoping cookies to the gateway.path feature is enabled
+ log.cookieScopingFeatureEnabled(isCookieScopingToPathEnabled());
+ }
+
+ private void setDefaultGlobalRulesServices() {
+ DEFAULT_GLOBAL_RULES_SERVICES = new ArrayList<>();
+ DEFAULT_GLOBAL_RULES_SERVICES.add("NAMENODE");
+ DEFAULT_GLOBAL_RULES_SERVICES.add("JOBTRACKER");
+ DEFAULT_GLOBAL_RULES_SERVICES.add("WEBHDFS");
+ DEFAULT_GLOBAL_RULES_SERVICES.add("WEBHCAT");
+ DEFAULT_GLOBAL_RULES_SERVICES.add("OOZIE");
+ DEFAULT_GLOBAL_RULES_SERVICES.add("WEBHBASE");
+ DEFAULT_GLOBAL_RULES_SERVICES.add("HIVE");
+ DEFAULT_GLOBAL_RULES_SERVICES.add("RESOURCEMANAGER");
+ }
+
+ private void initGatewayHomeDir( URL lastFileUrl ) {
+ String home = System.getProperty( GATEWAY_HOME_VAR );
+ if( home != null ) {
+ set( GATEWAY_HOME_VAR, home );
+ log.settingGatewayHomeDir( "system property", home );
+ return;
+ }
+ home = System.getenv( GATEWAY_HOME_VAR );
+ if( home != null ) {
+ set( GATEWAY_HOME_VAR, home );
+ log.settingGatewayHomeDir( "environment variable", home );
+ return;
+ }
+ if( lastFileUrl != null ) {
+ File file = new File( lastFileUrl.getFile() ).getAbsoluteFile();
+ File dir = file.getParentFile().getParentFile(); // Move up two levels to get to parent of conf.
+ if( dir.exists() && dir.canRead() )
+ home = dir.getAbsolutePath();
+ set( GATEWAY_HOME_VAR, home );
+ log.settingGatewayHomeDir( "configuration file location", home );
+ return;
+ }
+ home = System.getProperty( "user.dir" );
+ if( home != null ) {
+ set( GATEWAY_HOME_VAR, home );
+ log.settingGatewayHomeDir( "user.dir system property", home );
+ return;
+ }
+ }
+
+ // 1. GATEWAY_HOME system property
+ // 2. GATEWAY_HOME environment variable
+ // 3. user.dir system property
+ // 4. class path
+ private URL loadConfig( String fileName, URL lastFileUrl ) {
+ lastFileUrl = loadConfigFile( System.getProperty( GATEWAY_HOME_VAR ), fileName );
+ if( lastFileUrl == null ) {
+ lastFileUrl = loadConfigFile( System.getenv( GATEWAY_HOME_VAR ), fileName );
+ }
+ if( lastFileUrl == null ) {
+ lastFileUrl = loadConfigFile( System.getProperty( "user.dir" ), fileName );
+ }
+ if( lastFileUrl == null ) {
+ lastFileUrl = loadConfigResource( fileName );
+ }
+ if( lastFileUrl != null && !"file".equals( lastFileUrl.getProtocol() ) ) {
+ lastFileUrl = null;
+ }
+ return lastFileUrl;
+ }
+
+ private URL loadConfigFile( String dir, String file ) {
+ URL url = null;
+ if( dir != null ) {
+ File f = new File( dir, file );
+ if( f.exists() ) {
+ String path = f.getAbsolutePath();
+ try {
+ url = f.toURI().toURL();
+ addResource( new Path( path ) );
+ log.loadingConfigurationFile( path );
+ } catch ( MalformedURLException e ) {
+ log.failedToLoadConfig( path, e );
+ }
+ }
+ }
+ return url;
+ }
+
+ private URL loadConfigResource( String file ) {
+ URL url = getResource( file );
+ if( url != null ) {
+ log.loadingConfigurationResource( url.toExternalForm() );
+ addResource( url );
+ }
+ return url;
+ }
+
+ @Override
+ public String getGatewayHost() {
+ String host = get( HTTP_HOST, "0.0.0.0" );
+ return host;
+ }
+
+ @Override
+ public int getGatewayPort() {
+ return Integer.parseInt( get( HTTP_PORT, DEFAULT_HTTP_PORT ) );
+ }
+
+ @Override
+ public String getGatewayPath() {
+ return get( HTTP_PATH, DEFAULT_HTTP_PATH );
+ }
+
+ @Override
+ public String getGatewayTopologyDir() {
+ return getGatewayConfDir() + File.separator + "topologies";
+ }
+
+ @Override
+ public String getGatewayDeploymentDir() {
+ return get(DEPLOYMENT_DIR, getGatewayDataDir() + File.separator + DEFAULT_DEPLOYMENT_DIR);
+ }
+
+ @Override
+ public String getGatewaySecurityDir() {
+ return get(SECURITY_DIR, getGatewayDataDir() + File.separator + DEFAULT_SECURITY_DIR);
+ }
+
+ @Override
+ public InetSocketAddress getGatewayAddress() throws UnknownHostException {
+ String host = getGatewayHost();
+ int port = getGatewayPort();
+ InetSocketAddress address = new InetSocketAddress( host, port );
+ return address;
+ }
+
+ @Override
+ public boolean isSSLEnabled() {
+ String enabled = get( SSL_ENABLED, "true" );
+
+ return "true".equals(enabled);
+ }
+
+ @Override
+ public boolean isHadoopKerberosSecured() {
+ String hadoopKerberosSecured = get( HADOOP_KERBEROS_SECURED, "false" );
+ return "true".equals(hadoopKerberosSecured);
+ }
+
+ @Override
+ public String getKerberosConfig() {
+ return get( KRB5_CONFIG ) ;
+ }
+
+ @Override
+ public boolean isKerberosDebugEnabled() {
+ String kerberosDebugEnabled = get( KRB5_DEBUG, "false" );
+ return "true".equals(kerberosDebugEnabled);
+ }
+
+ @Override
+ public String getKerberosLoginConfig() {
+ return get( KRB5_LOGIN_CONFIG );
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#getDefaultTopologyName()
+ */
+ @Override
+ public String getDefaultTopologyName() {
+ String name = get(GATEWAY_DEFAULT_TOPOLOGY_NAME_PARAM);
+ return name != null ? name : GATEWAY_DEFAULT_TOPOLOGY_NAME;
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#getDefaultAppRedirectPath()
+ */
+ @Override
+ public String getDefaultAppRedirectPath() {
+ String defTopo = getDefaultTopologyName();
+ if( defTopo == null ) {
+ return null;
+ } else {
+ return "/" + getGatewayPath() + "/" + defTopo;
+ }
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#getFrontendUrl()
+ */
+ @Override
+ public String getFrontendUrl() {
+ String url = get( FRONTEND_URL, null );
+ return url;
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#getExcludedSSLProtocols()
+ */
+ @Override
+ public List<String> getExcludedSSLProtocols() {
+ List<String> protocols = null;
+ String value = get(SSL_EXCLUDE_PROTOCOLS);
+ if (!"none".equals(value)) {
+ protocols = Arrays.asList(value.split("\\s*,\\s*"));
+ }
+ return protocols;
+ }
+
+ @Override
+ public List<String> getIncludedSSLCiphers() {
+ List<String> list = null;
+ String value = get(SSL_INCLUDE_CIPHERS);
+ if (value != null && !value.isEmpty() && !"none".equalsIgnoreCase(value.trim())) {
+ list = Arrays.asList(value.trim().split("\\s*,\\s*"));
+ }
+ return list;
+ }
+
+ @Override
+ public List<String> getExcludedSSLCiphers() {
+ List<String> list = null;
+ String value = get(SSL_EXCLUDE_CIPHERS);
+ if (value != null && !value.isEmpty() && !"none".equalsIgnoreCase(value.trim())) {
+ list = Arrays.asList(value.trim().split("\\s*,\\s*"));
+ }
+ return list;
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#isClientAuthNeeded()
+ */
+ @Override
+ public boolean isClientAuthNeeded() {
+ String clientAuthNeeded = get( CLIENT_AUTH_NEEDED, "false" );
+ return "true".equals(clientAuthNeeded);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.knox.gateway.config.GatewayConfig#isClientAuthWanted()
+ */
+ @Override
+ public boolean isClientAuthWanted() {
+ String clientAuthWanted = get( CLIENT_AUTH_WANTED, "false" );
+ return "true".equals(clientAuthWanted);
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#getTruststorePath()
+ */
+ @Override
+ public String getTruststorePath() {
+ return get( TRUSTSTORE_PATH, null);
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#getTrustAllCerts()
+ */
+ @Override
+ public boolean getTrustAllCerts() {
+ String trustAllCerts = get( TRUST_ALL_CERTS, "false" );
+ return "true".equals(trustAllCerts);
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#getTruststorePath()
+ */
+ @Override
+ public String getTruststoreType() {
+ return get( TRUSTSTORE_TYPE, "JKS");
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#getTruststorePath()
+ */
+ @Override
+ public String getKeystoreType() {
+ return get( KEYSTORE_TYPE, "JKS");
+ }
+
+ @Override
+ public boolean isXForwardedEnabled() {
+ String xForwardedEnabled = get( XFORWARDED_ENABLED, "true" );
+ return "true".equals(xForwardedEnabled);
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#getEphemeralDHKeySize()
+ */
+ @Override
+ public String getEphemeralDHKeySize() {
+ return get( EPHEMERAL_DH_KEY_SIZE, "2048");
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#getHttpClientMaxConnections()
+ */
+ @Override
+ public int getHttpClientMaxConnections() {
+ return getInt( HTTP_CLIENT_MAX_CONNECTION, 32 );
+ }
+
+ @Override
+ public int getHttpClientConnectionTimeout() {
+ int t = -1;
+ String s = get( HTTP_CLIENT_CONNECTION_TIMEOUT, null );
+ if ( s != null ) {
+ try {
+ t = (int)parseNetworkTimeout( s );
+ } catch ( Exception e ) {
+ // Ignore it and use the default.
+ }
+ }
+ return t;
+ }
+
+ @Override
+ public int getHttpClientSocketTimeout() {
+ int t = -1;
+ String s = get( HTTP_CLIENT_SOCKET_TIMEOUT, null );
+ if ( s != null ) {
+ try {
+ t = (int)parseNetworkTimeout( s );
+ } catch ( Exception e ) {
+ // Ignore it and use the default.
+ }
+ }
+ return t;
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#getThreadPoolMax()
+ */
+ @Override
+ public int getThreadPoolMax() {
+ int i = getInt( THREAD_POOL_MAX, 254 );
+ // Testing has shown that a value lower than 5 prevents Jetty from servicing request.
+ if( i < 5 ) {
+ i = 5;
+ }
+ return i;
+ }
+
+ @Override
+ public int getHttpServerRequestBuffer() {
+ int i = getInt( HTTP_SERVER_REQUEST_BUFFER, 16 * 1024 );
+ return i;
+ }
+
+ @Override
+ public int getHttpServerRequestHeaderBuffer() {
+ int i = getInt( HTTP_SERVER_REQUEST_HEADER_BUFFER, 8 * 1024 );
+ return i;
+ }
+
+ @Override
+ public int getHttpServerResponseBuffer() {
+ int i = getInt( HTTP_SERVER_RESPONSE_BUFFER, 32 * 1024 );
+ return i;
+ }
+
+ @Override
+ public int getHttpServerResponseHeaderBuffer() {
+ int i = getInt( HTTP_SERVER_RESPONSE_HEADER_BUFFER, 8 * 1024 );
+ return i;
+ }
+
+ @Override
+ public int getGatewayDeploymentsBackupVersionLimit() {
+ int i = getInt( DEPLOYMENTS_BACKUP_VERSION_LIMIT, 5 );
+ if( i < 0 ) {
+ i = -1;
+ }
+ return i;
+ }
+
+ @Override
+ public long getGatewayIdleTimeout() {
+ return getLong(GATEWAY_IDLE_TIMEOUT, 300000l);
+ }
+
+ @Override
+ public long getGatewayDeploymentsBackupAgeLimit() {
+ PeriodFormatter f = new PeriodFormatterBuilder().appendDays().toFormatter();
+ String s = get( DEPLOYMENTS_BACKUP_AGE_LIMIT, "-1" );
+ long d;
+ try {
+ Period p = Period.parse( s, f );
+ d = p.toStandardDuration().getMillis();
+ if( d < 0 ) {
+ d = -1;
+ }
+ } catch( Exception e ) {
+ d = -1;
+ }
+ return d;
+ }
+
+ @Override
+ public String getSigningKeystoreName() {
+ return get(SIGNING_KEYSTORE_NAME);
+ }
+
+ @Override
+ public String getSigningKeyAlias() {
+ return get(SIGNING_KEY_ALIAS);
+ }
+
+ @Override
+ public List<String> getGlobalRulesServices() {
+ String value = get( GLOBAL_RULES_SERVICES );
+ if ( value != null && !value.isEmpty() && !"none".equalsIgnoreCase(value.trim()) ) {
+ return Arrays.asList( value.trim().split("\\s*,\\s*") );
+ }
+ return DEFAULT_GLOBAL_RULES_SERVICES;
+ }
+
+ @Override
+ public boolean isMetricsEnabled() {
+ String metricsEnabled = get( METRICS_ENABLED, "false" );
+ return "true".equals(metricsEnabled);
+ }
+
+ @Override
+ public boolean isJmxMetricsReportingEnabled() {
+ String enabled = get( JMX_METRICS_REPORTING_ENABLED, "false" );
+ return "true".equals(enabled);
+ }
+
+ @Override
+ public boolean isGraphiteMetricsReportingEnabled() {
+ String enabled = get( GRAPHITE_METRICS_REPORTING_ENABLED, "false" );
+ return "true".equals(enabled);
+ }
+
+ @Override
+ public String getGraphiteHost() {
+ String host = get( GRAPHITE_METRICS_REPORTING_HOST, "localhost" );
+ return host;
+ }
+
+ @Override
+ public int getGraphitePort() {
+ int i = getInt( GRAPHITE_METRICS_REPORTING_PORT, 32772 );
+ return i;
+ }
+
+ @Override
+ public int getGraphiteReportingFrequency() {
+ int i = getInt( GRAPHITE_METRICS_REPORTING_FREQUENCY, 1 );
+ return i;
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#isWebsocketEnabled()
+ */
+ @Override
+ public boolean isWebsocketEnabled() {
+ final String result = get( WEBSOCKET_FEATURE_ENABLED, Boolean.toString(DEFAULT_WEBSOCKET_FEATURE_ENABLED));
+ return Boolean.parseBoolean(result);
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#websocketMaxTextMessageSize()
+ */
+ @Override
+ public int getWebsocketMaxTextMessageSize() {
+ return getInt( WEBSOCKET_MAX_TEXT_MESSAGE_SIZE, DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE);
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#websocketMaxBinaryMessageSize()
+ */
+ @Override
+ public int getWebsocketMaxBinaryMessageSize() {
+ return getInt( WEBSOCKET_MAX_BINARY_MESSAGE_SIZE, DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_SIZE);
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#websocketMaxTextMessageBufferSize()
+ */
+ @Override
+ public int getWebsocketMaxTextMessageBufferSize() {
+ return getInt( WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE, DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE);
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#websocketMaxBinaryMessageBufferSize()
+ */
+ @Override
+ public int getWebsocketMaxBinaryMessageBufferSize() {
+ return getInt( WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE, DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE);
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#websocketInputBufferSize()
+ */
+ @Override
+ public int getWebsocketInputBufferSize() {
+ return getInt( WEBSOCKET_INPUT_BUFFER_SIZE, DEFAULT_WEBSOCKET_INPUT_BUFFER_SIZE);
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#websocketAsyncWriteTimeout()
+ */
+ @Override
+ public int getWebsocketAsyncWriteTimeout() {
+ return getInt( WEBSOCKET_ASYNC_WRITE_TIMEOUT, DEFAULT_WEBSOCKET_ASYNC_WRITE_TIMEOUT);
+ }
+
+ /* (non-Javadoc)
+ * @see GatewayConfig#websocketIdleTimeout()
+ */
+ @Override
+ public int getWebsocketIdleTimeout() {
+ return getInt( WEBSOCKET_IDLE_TIMEOUT, DEFAULT_WEBSOCKET_IDLE_TIMEOUT);
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ * GatewayConfig#getMimeTypesToCompress()
+ */
+ @Override
+ public List<String> getMimeTypesToCompress() {
+ List<String> mimeTypes = null;
+ String value = get(MIME_TYPES_TO_COMPRESS, DEFAULT_MIME_TYPES_TO_COMPRESS);
+ if (value != null && !value.isEmpty()) {
+ mimeTypes = Arrays.asList(value.trim().split("\\s*,\\s*"));
+ }
+ return mimeTypes;
+ }
+
+ /**
+ * Map of Topology names and their ports.
+ *
+ * @return
+ */
+ @Override
+ public Map<String, Integer> getGatewayPortMappings() {
+
+ final Map<String, Integer> result = new ConcurrentHashMap<String, Integer>();
+ final Map<String, String> properties = getValByRegex(GATEWAY_PORT_MAPPING_REGEX);
+
+ // Convert port no. from string to int
+ for(final Map.Entry<String, String> e : properties.entrySet()) {
+ // ignore the GATEWAY_PORT_MAPPING_ENABLED property
+ if(!e.getKey().equalsIgnoreCase(GATEWAY_PORT_MAPPING_ENABLED)) {
+ // extract the topology name and use it as a key
+ result.put(StringUtils.substringAfter(e.getKey(), GATEWAY_PORT_MAPPING_PREFIX), Integer.parseInt(e.getValue()) );
+ }
+
+ }
+
+ return Collections.unmodifiableMap(result);
+ }
+
+ /**
+ * Is the Port Mapping feature on ?
+ *
+ * @return
+ */
+ @Override
+ public boolean isGatewayPortMappingEnabled() {
+ final String result = get( GATEWAY_PORT_MAPPING_ENABLED, Boolean.toString(DEFAULT_GATEWAY_PORT_MAPPING_ENABLED));
+ return Boolean.parseBoolean(result);
+ }
+
+ private static long parseNetworkTimeout(String s ) {
+ PeriodFormatter f = new PeriodFormatterBuilder()
+ .appendMinutes().appendSuffix("m"," min")
+ .appendSeconds().appendSuffix("s"," sec")
+ .appendMillis().toFormatter();
+ Period p = Period.parse( s, f );
+ return p.toStandardDuration().getMillis();
+ }
+
+ @Override
+ public boolean isCookieScopingToPathEnabled() {
+ final boolean result = Boolean.parseBoolean(get(COOKIE_SCOPING_ENABLED,
+ Boolean.toString(DEFAULT_COOKIE_SCOPING_FEATURE_ENABLED)));
+ return result;
+ }
+
+ @Override
+ public String getHeaderNameForRemoteAddress() {
+ String value = getVar(REMOTE_IP_HEADER_NAME, "X-Forwarded-For");
+ return value;
+ }
+
+ @Override
+ public String getAlgorithm() {
+ return getVar(CRYPTO_ALGORITHM, null);
+ }
+
+ @Override
+ public String getPBEAlgorithm() {
+ return getVar(CRYPTO_PBE_ALGORITHM, null);
+ }
+
+ @Override
+ public String getTransformation() {
+ return getVar(CRYPTO_TRANSFORMATION, null);
+ }
+
+ @Override
+ public String getSaltSize() {
+ return getVar(CRYPTO_SALTSIZE, null);
+ }
+
+ @Override
+ public String getIterationCount() {
+ return getVar(CRYPTO_ITERATION_COUNT, null);
+ }
+
+ @Override
+ public String getKeyLength() {
+ return getVar(CRYPTO_KEY_LENGTH, null);
+ }
+
+ @Override
+ public boolean isGatewayServerHeaderEnabled() {
+ return Boolean.parseBoolean(getVar(SERVER_HEADER_ENABLED, "true"));
+ }
+}
[18/25] knox git commit: Merge branch 'master' into
KNOX-998-Package_Restructuring
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultTopologyService.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultTopologyService.java
index 455b0fa,0000000..38653f4
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultTopologyService.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/services/topology/impl/DefaultTopologyService.java
@@@ -1,689 -1,0 +1,818 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.knox.gateway.services.topology.impl;
+
+
+import org.apache.commons.digester3.Digester;
+import org.apache.commons.digester3.binder.DigesterLoader;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.io.monitor.FileAlterationListener;
+import org.apache.commons.io.monitor.FileAlterationListenerAdaptor;
+import org.apache.commons.io.monitor.FileAlterationMonitor;
+import org.apache.commons.io.monitor.FileAlterationObserver;
+import org.apache.knox.gateway.GatewayMessages;
+import org.apache.knox.gateway.audit.api.Action;
+import org.apache.knox.gateway.audit.api.ActionOutcome;
+import org.apache.knox.gateway.audit.api.AuditServiceFactory;
+import org.apache.knox.gateway.audit.api.Auditor;
+import org.apache.knox.gateway.audit.api.ResourceType;
+import org.apache.knox.gateway.audit.log4j.audit.AuditConstants;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.service.definition.ServiceDefinition;
+import org.apache.knox.gateway.services.ServiceLifecycleException;
+import org.apache.knox.gateway.services.topology.TopologyService;
+import org.apache.knox.gateway.topology.Topology;
+import org.apache.knox.gateway.topology.TopologyEvent;
+import org.apache.knox.gateway.topology.TopologyListener;
+import org.apache.knox.gateway.topology.TopologyMonitor;
+import org.apache.knox.gateway.topology.TopologyProvider;
+import org.apache.knox.gateway.topology.builder.TopologyBuilder;
+import org.apache.knox.gateway.topology.validation.TopologyValidator;
+import org.apache.knox.gateway.topology.xml.AmbariFormatXmlTopologyRules;
+import org.apache.knox.gateway.topology.xml.KnoxFormatXmlTopologyRules;
+import org.apache.knox.gateway.util.ServiceDefinitionsLoader;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.topology.simple.SimpleDescriptorHandler;
+import org.eclipse.persistence.jaxb.JAXBContextProperties;
+import org.xml.sax.SAXException;
+
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Marshaller;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
++import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static org.apache.commons.digester3.binder.DigesterLoader.newLoader;
+
+
+public class DefaultTopologyService
+ extends FileAlterationListenerAdaptor
+ implements TopologyService, TopologyMonitor, TopologyProvider, FileFilter, FileAlterationListener {
+
+ private static Auditor auditor = AuditServiceFactory.getAuditService().getAuditor(
+ AuditConstants.DEFAULT_AUDITOR_NAME, AuditConstants.KNOX_SERVICE_NAME,
+ AuditConstants.KNOX_COMPONENT_NAME);
+
+ private static final List<String> SUPPORTED_TOPOLOGY_FILE_EXTENSIONS = new ArrayList<String>();
+ static {
+ SUPPORTED_TOPOLOGY_FILE_EXTENSIONS.add("xml");
+ SUPPORTED_TOPOLOGY_FILE_EXTENSIONS.add("conf");
+ }
+
+ private static GatewayMessages log = MessagesFactory.get(GatewayMessages.class);
+ private static DigesterLoader digesterLoader = newLoader(new KnoxFormatXmlTopologyRules(), new AmbariFormatXmlTopologyRules());
+ private List<FileAlterationMonitor> monitors = new ArrayList<>();
+ private File topologiesDirectory;
++ private File sharedProvidersDirectory;
+ private File descriptorsDirectory;
+
++ private DescriptorsMonitor descriptorsMonitor;
++
+ private Set<TopologyListener> listeners;
+ private volatile Map<File, Topology> topologies;
+ private AliasService aliasService;
+
+
+ private Topology loadTopology(File file) throws IOException, SAXException, URISyntaxException, InterruptedException {
+ final long TIMEOUT = 250; //ms
+ final long DELAY = 50; //ms
+ log.loadingTopologyFile(file.getAbsolutePath());
+ Topology topology;
+ long start = System.currentTimeMillis();
+ while (true) {
+ try {
+ topology = loadTopologyAttempt(file);
+ break;
+ } catch (IOException e) {
+ if (System.currentTimeMillis() - start < TIMEOUT) {
+ log.failedToLoadTopologyRetrying(file.getAbsolutePath(), Long.toString(DELAY), e);
+ Thread.sleep(DELAY);
+ } else {
+ throw e;
+ }
+ } catch (SAXException e) {
+ if (System.currentTimeMillis() - start < TIMEOUT) {
+ log.failedToLoadTopologyRetrying(file.getAbsolutePath(), Long.toString(DELAY), e);
+ Thread.sleep(DELAY);
+ } else {
+ throw e;
+ }
+ }
+ }
+ return topology;
+ }
+
+ private Topology loadTopologyAttempt(File file) throws IOException, SAXException, URISyntaxException {
+ Topology topology;
+ Digester digester = digesterLoader.newDigester();
+ TopologyBuilder topologyBuilder = digester.parse(FileUtils.openInputStream(file));
+ if (null == topologyBuilder) {
+ return null;
+ }
+ topology = topologyBuilder.build();
+ topology.setUri(file.toURI());
+ topology.setName(FilenameUtils.removeExtension(file.getName()));
+ topology.setTimestamp(file.lastModified());
+ return topology;
+ }
+
+ private void redeployTopology(Topology topology) {
+ File topologyFile = new File(topology.getUri());
+ try {
+ TopologyValidator tv = new TopologyValidator(topology);
+
+ if(tv.validateTopology()) {
+ throw new SAXException(tv.getErrorString());
+ }
+
+ long start = System.currentTimeMillis();
+ long limit = 1000L; // One second.
+ long elapsed = 1;
+ while (elapsed <= limit) {
+ try {
+ long origTimestamp = topologyFile.lastModified();
+ long setTimestamp = Math.max(System.currentTimeMillis(), topologyFile.lastModified() + elapsed);
+ if(topologyFile.setLastModified(setTimestamp)) {
+ long newTimstamp = topologyFile.lastModified();
+ if(newTimstamp > origTimestamp) {
+ break;
+ } else {
+ Thread.sleep(10);
+ elapsed = System.currentTimeMillis() - start;
+ continue;
+ }
+ } else {
+ auditor.audit(Action.REDEPLOY, topology.getName(), ResourceType.TOPOLOGY,
+ ActionOutcome.FAILURE);
+ log.failedToRedeployTopology(topology.getName());
+ break;
+ }
+ } catch (InterruptedException e) {
+ auditor.audit(Action.REDEPLOY, topology.getName(), ResourceType.TOPOLOGY,
+ ActionOutcome.FAILURE);
+ log.failedToRedeployTopology(topology.getName(), e);
+ e.printStackTrace();
+ }
+ }
+ } catch (SAXException e) {
+ auditor.audit(Action.REDEPLOY, topology.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
+ log.failedToRedeployTopology(topology.getName(), e);
+ }
+ }
+
+ private List<TopologyEvent> createChangeEvents(
+ Map<File, Topology> oldTopologies,
+ Map<File, Topology> newTopologies) {
+ ArrayList<TopologyEvent> events = new ArrayList<TopologyEvent>();
+ // Go through the old topologies and find anything that was deleted.
+ for (File file : oldTopologies.keySet()) {
+ if (!newTopologies.containsKey(file)) {
+ events.add(new TopologyEvent(TopologyEvent.Type.DELETED, oldTopologies.get(file)));
+ }
+ }
+ // Go through the new topologies and figure out what was updated vs added.
+ for (File file : newTopologies.keySet()) {
+ if (oldTopologies.containsKey(file)) {
+ Topology oldTopology = oldTopologies.get(file);
+ Topology newTopology = newTopologies.get(file);
+ if (newTopology.getTimestamp() > oldTopology.getTimestamp()) {
+ events.add(new TopologyEvent(TopologyEvent.Type.UPDATED, newTopologies.get(file)));
+ }
+ } else {
+ events.add(new TopologyEvent(TopologyEvent.Type.CREATED, newTopologies.get(file)));
+ }
+ }
+ return events;
+ }
+
+ private File calculateAbsoluteTopologiesDir(GatewayConfig config) {
- String normalizedTopologyDir = FilenameUtils.normalize(config.getGatewayTopologyDir());
- File topoDir = new File(normalizedTopologyDir);
++ File topoDir = new File(config.getGatewayTopologyDir());
+ topoDir = topoDir.getAbsoluteFile();
+ return topoDir;
+ }
+
+ private File calculateAbsoluteConfigDir(GatewayConfig config) {
+ File configDir = null;
+
- String path = FilenameUtils.normalize(config.getGatewayConfDir());
- if (path != null) {
- configDir = new File(config.getGatewayConfDir());
- } else {
- configDir = (new File(config.getGatewayTopologyDir())).getParentFile();
- }
- configDir = configDir.getAbsoluteFile();
++ String path = config.getGatewayConfDir();
++ configDir = (path != null) ? new File(path) : (new File(config.getGatewayTopologyDir())).getParentFile();
+
- return configDir;
++ return configDir.getAbsoluteFile();
+ }
+
+ private void initListener(FileAlterationMonitor monitor,
+ File directory,
+ FileFilter filter,
+ FileAlterationListener listener) {
+ monitors.add(monitor);
+ FileAlterationObserver observer = new FileAlterationObserver(directory, filter);
+ observer.addListener(listener);
+ monitor.addObserver(observer);
+ }
+
+ private void initListener(File directory, FileFilter filter, FileAlterationListener listener) throws IOException, SAXException {
+ // Increasing the monitoring interval to 5 seconds as profiling has shown
+ // this is rather expensive in terms of generated garbage objects.
+ initListener(new FileAlterationMonitor(5000L), directory, filter, listener);
+ }
+
+ private Map<File, Topology> loadTopologies(File directory) {
+ Map<File, Topology> map = new HashMap<>();
+ if (directory.isDirectory() && directory.canRead()) {
- for (File file : directory.listFiles(this)) {
- try {
- Topology loadTopology = loadTopology(file);
- if (null != loadTopology) {
- map.put(file, loadTopology);
- } else {
++ File[] existingTopologies = directory.listFiles(this);
++ if (existingTopologies != null) {
++ for (File file : existingTopologies) {
++ try {
++ Topology loadTopology = loadTopology(file);
++ if (null != loadTopology) {
++ map.put(file, loadTopology);
++ } else {
++ auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
++ ActionOutcome.FAILURE);
++ log.failedToLoadTopology(file.getAbsolutePath());
++ }
++ } catch (IOException e) {
++ // Maybe it makes sense to throw exception
+ auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
- ActionOutcome.FAILURE);
- log.failedToLoadTopology(file.getAbsolutePath());
++ ActionOutcome.FAILURE);
++ log.failedToLoadTopology(file.getAbsolutePath(), e);
++ } catch (SAXException e) {
++ // Maybe it makes sense to throw exception
++ auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
++ ActionOutcome.FAILURE);
++ log.failedToLoadTopology(file.getAbsolutePath(), e);
++ } catch (Exception e) {
++ // Maybe it makes sense to throw exception
++ auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
++ ActionOutcome.FAILURE);
++ log.failedToLoadTopology(file.getAbsolutePath(), e);
+ }
- } catch (IOException e) {
- // Maybe it makes sense to throw exception
- auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
- ActionOutcome.FAILURE);
- log.failedToLoadTopology(file.getAbsolutePath(), e);
- } catch (SAXException e) {
- // Maybe it makes sense to throw exception
- auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
- ActionOutcome.FAILURE);
- log.failedToLoadTopology(file.getAbsolutePath(), e);
- } catch (Exception e) {
- // Maybe it makes sense to throw exception
- auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
- ActionOutcome.FAILURE);
- log.failedToLoadTopology(file.getAbsolutePath(), e);
+ }
+ }
+ }
+ return map;
+ }
+
+ public void setAliasService(AliasService as) {
+ this.aliasService = as;
+ }
+
+ public void deployTopology(Topology t){
+
+ try {
+ File temp = new File(topologiesDirectory.getAbsolutePath() + "/" + t.getName() + ".xml.temp");
+ Package topologyPkg = Topology.class.getPackage();
+ String pkgName = topologyPkg.getName();
+ String bindingFile = pkgName.replace(".", "/") + "/topology_binding-xml.xml";
+
+ Map<String, Object> properties = new HashMap<>(1);
+ properties.put(JAXBContextProperties.OXM_METADATA_SOURCE, bindingFile);
+ JAXBContext jc = JAXBContext.newInstance(pkgName, Topology.class.getClassLoader(), properties);
+ Marshaller mr = jc.createMarshaller();
+
+ mr.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
+ mr.marshal(t, temp);
+
+ File topology = new File(topologiesDirectory.getAbsolutePath() + "/" + t.getName() + ".xml");
+ if(!temp.renameTo(topology)) {
+ FileUtils.forceDelete(temp);
+ throw new IOException("Could not rename temp file");
+ }
+
+ // This code will check if the topology is valid, and retrieve the errors if it is not.
+ TopologyValidator validator = new TopologyValidator( topology.getAbsolutePath() );
+ if( !validator.validateTopology() ){
+ throw new SAXException( validator.getErrorString() );
+ }
+
+
+ } catch (JAXBException e) {
+ auditor.audit(Action.DEPLOY, t.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
+ log.failedToDeployTopology(t.getName(), e);
+ } catch (IOException io) {
+ auditor.audit(Action.DEPLOY, t.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
+ log.failedToDeployTopology(t.getName(), io);
+ } catch (SAXException sx){
+ auditor.audit(Action.DEPLOY, t.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
+ log.failedToDeployTopology(t.getName(), sx);
+ }
+ reloadTopologies();
+ }
+
+ public void redeployTopologies(String topologyName) {
+
+ for (Topology topology : getTopologies()) {
+ if (topologyName == null || topologyName.equals(topology.getName())) {
+ redeployTopology(topology);
+ }
+ }
+
+ }
+
+ public void reloadTopologies() {
+ try {
+ synchronized (this) {
+ Map<File, Topology> oldTopologies = topologies;
+ Map<File, Topology> newTopologies = loadTopologies(topologiesDirectory);
+ List<TopologyEvent> events = createChangeEvents(oldTopologies, newTopologies);
+ topologies = newTopologies;
+ notifyChangeListeners(events);
+ }
+ } catch (Exception e) {
+ // Maybe it makes sense to throw exception
+ log.failedToReloadTopologies(e);
+ }
+ }
+
+ public void deleteTopology(Topology t) {
+ File topoDir = topologiesDirectory;
+
+ if(topoDir.isDirectory() && topoDir.canRead()) {
- File[] results = topoDir.listFiles();
- for (File f : results) {
++ for (File f : listFiles(topoDir)) {
+ String fName = FilenameUtils.getBaseName(f.getName());
+ if(fName.equals(t.getName())) {
+ f.delete();
+ }
+ }
+ }
+ reloadTopologies();
+ }
+
+ private void notifyChangeListeners(List<TopologyEvent> events) {
+ for (TopologyListener listener : listeners) {
+ try {
+ listener.handleTopologyEvent(events);
+ } catch (RuntimeException e) {
+ auditor.audit(Action.LOAD, "Topology_Event", ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
+ log.failedToHandleTopologyEvents(e);
+ }
+ }
+ }
+
+ public Map<String, List<String>> getServiceTestURLs(Topology t, GatewayConfig config) {
+ File tFile = null;
+ Map<String, List<String>> urls = new HashMap<>();
- if(topologiesDirectory.isDirectory() && topologiesDirectory.canRead()) {
- for(File f : topologiesDirectory.listFiles()){
- if(FilenameUtils.removeExtension(f.getName()).equals(t.getName())){
++ if (topologiesDirectory.isDirectory() && topologiesDirectory.canRead()) {
++ for (File f : listFiles(topologiesDirectory)) {
++ if (FilenameUtils.removeExtension(f.getName()).equals(t.getName())) {
+ tFile = f;
+ }
+ }
+ }
+ Set<ServiceDefinition> defs;
+ if(tFile != null) {
+ defs = ServiceDefinitionsLoader.getServiceDefinitions(new File(config.getGatewayServicesDir()));
+
+ for(ServiceDefinition def : defs) {
+ urls.put(def.getRole(), def.getTestURLs());
+ }
+ }
+ return urls;
+ }
+
+ public Collection<Topology> getTopologies() {
+ Map<File, Topology> map = topologies;
+ return Collections.unmodifiableCollection(map.values());
+ }
+
+ @Override
++ public boolean deployProviderConfiguration(String name, String content) {
++ return writeConfig(sharedProvidersDirectory, name, content);
++ }
++
++ @Override
++ public Collection<File> getProviderConfigurations() {
++ List<File> providerConfigs = new ArrayList<>();
++ for (File providerConfig : listFiles(sharedProvidersDirectory)) {
++ if (SharedProviderConfigMonitor.SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(providerConfig.getName()))) {
++ providerConfigs.add(providerConfig);
++ }
++ }
++ return providerConfigs;
++ }
++
++ @Override
++ public boolean deleteProviderConfiguration(String name) {
++ boolean result = false;
++
++ File providerConfig = getExistingFile(sharedProvidersDirectory, name);
++ if (providerConfig != null) {
++ List<String> references = descriptorsMonitor.getReferencingDescriptors(providerConfig.getAbsolutePath());
++ if (references.isEmpty()) {
++ result = providerConfig.delete();
++ } else {
++ log.preventedDeletionOfSharedProviderConfiguration(providerConfig.getAbsolutePath());
++ }
++ } else {
++ result = true; // If it already does NOT exist, then the delete effectively succeeded
++ }
++
++ return result;
++ }
++
++ @Override
++ public boolean deployDescriptor(String name, String content) {
++ return writeConfig(descriptorsDirectory, name, content);
++ }
++
++ @Override
++ public Collection<File> getDescriptors() {
++ List<File> descriptors = new ArrayList<>();
++ for (File descriptor : listFiles(descriptorsDirectory)) {
++ if (DescriptorsMonitor.SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(descriptor.getName()))) {
++ descriptors.add(descriptor);
++ }
++ }
++ return descriptors;
++ }
++
++ @Override
++ public boolean deleteDescriptor(String name) {
++ File descriptor = getExistingFile(descriptorsDirectory, name);
++ return (descriptor == null) || descriptor.delete();
++ }
++
++ @Override
+ public void addTopologyChangeListener(TopologyListener listener) {
+ listeners.add(listener);
+ }
+
+ @Override
+ public void startMonitor() throws Exception {
+ for (FileAlterationMonitor monitor : monitors) {
+ monitor.start();
+ }
+ }
+
+ @Override
+ public void stopMonitor() throws Exception {
+ for (FileAlterationMonitor monitor : monitors) {
+ monitor.stop();
+ }
+ }
+
+ @Override
+ public boolean accept(File file) {
+ boolean accept = false;
+ if (!file.isDirectory() && file.canRead()) {
+ String extension = FilenameUtils.getExtension(file.getName());
+ if (SUPPORTED_TOPOLOGY_FILE_EXTENSIONS.contains(extension)) {
+ accept = true;
+ }
+ }
+ return accept;
+ }
+
+ @Override
+ public void onFileCreate(File file) {
+ onFileChange(file);
+ }
+
+ @Override
+ public void onFileDelete(java.io.File file) {
+ // For full topology descriptors, we need to make sure to delete any corresponding simple descriptors to prevent
+ // unintended subsequent generation of the topology descriptor
+ for (String ext : DescriptorsMonitor.SUPPORTED_EXTENSIONS) {
+ File simpleDesc =
+ new File(descriptorsDirectory, FilenameUtils.getBaseName(file.getName()) + "." + ext);
+ if (simpleDesc.exists()) {
++ log.deletingDescriptorForTopologyDeletion(simpleDesc.getName(), file.getName());
+ simpleDesc.delete();
+ }
+ }
+
+ onFileChange(file);
+ }
+
+ @Override
+ public void onFileChange(File file) {
+ reloadTopologies();
+ }
+
+ @Override
+ public void stop() {
+
+ }
+
+ @Override
+ public void start() {
+
+ }
+
+ @Override
+ public void init(GatewayConfig config, Map<String, String> options) throws ServiceLifecycleException {
+
+ try {
+ listeners = new HashSet<>();
+ topologies = new HashMap<>();
+
+ topologiesDirectory = calculateAbsoluteTopologiesDir(config);
+
+ File configDirectory = calculateAbsoluteConfigDir(config);
+ descriptorsDirectory = new File(configDirectory, "descriptors");
- File sharedProvidersDirectory = new File(configDirectory, "shared-providers");
++ sharedProvidersDirectory = new File(configDirectory, "shared-providers");
+
+ // Add support for conf/topologies
+ initListener(topologiesDirectory, this, this);
+
+ // Add support for conf/descriptors
- DescriptorsMonitor dm = new DescriptorsMonitor(topologiesDirectory, aliasService);
++ descriptorsMonitor = new DescriptorsMonitor(topologiesDirectory, aliasService);
+ initListener(descriptorsDirectory,
- dm,
- dm);
++ descriptorsMonitor,
++ descriptorsMonitor);
++ log.monitoringDescriptorChangesInDirectory(descriptorsDirectory.getAbsolutePath());
+
+ // Add support for conf/shared-providers
- SharedProviderConfigMonitor spm = new SharedProviderConfigMonitor(dm, descriptorsDirectory);
++ SharedProviderConfigMonitor spm = new SharedProviderConfigMonitor(descriptorsMonitor, descriptorsDirectory);
+ initListener(sharedProvidersDirectory, spm, spm);
++ log.monitoringProviderConfigChangesInDirectory(sharedProvidersDirectory.getAbsolutePath());
+
+ // For all the descriptors currently in the descriptors dir at start-up time, trigger topology generation.
+ // This happens prior to the start-up loading of the topologies.
+ String[] descriptorFilenames = descriptorsDirectory.list();
+ if (descriptorFilenames != null) {
+ for (String descriptorFilename : descriptorFilenames) {
+ if (DescriptorsMonitor.isDescriptorFile(descriptorFilename)) {
- dm.onFileChange(new File(descriptorsDirectory, descriptorFilename));
++ descriptorsMonitor.onFileChange(new File(descriptorsDirectory, descriptorFilename));
+ }
+ }
+ }
+
+ } catch (IOException | SAXException io) {
+ throw new ServiceLifecycleException(io.getMessage());
+ }
+ }
+
+
+ /**
++ * Utility method for listing the files in the specified directory.
++ * This method is "nicer" than the File#listFiles() because it will not return null.
++ *
++ * @param directory The directory whose files should be returned.
++ *
++ * @return A List of the Files on the directory.
++ */
++ private static List<File> listFiles(File directory) {
++ List<File> result = null;
++ File[] files = directory.listFiles();
++ if (files != null) {
++ result = Arrays.asList(files);
++ } else {
++ result = Collections.emptyList();
++ }
++ return result;
++ }
++
++ /**
++ * Search for a file in the specified directory whose base name (filename without extension) matches the
++ * specified basename.
++ *
++ * @param directory The directory in which to search.
++ * @param basename The basename of interest.
++ *
++ * @return The matching File
++ */
++ private static File getExistingFile(File directory, String basename) {
++ File match = null;
++ for (File file : listFiles(directory)) {
++ if (FilenameUtils.getBaseName(file.getName()).equals(basename)) {
++ match = file;
++ break;
++ }
++ }
++ return match;
++ }
++
++ /**
++ * Write the specified content to a file.
++ *
++ * @param dest The destination directory.
++ * @param name The name of the file.
++ * @param content The contents of the file.
++ *
++ * @return true, if the write succeeds; otherwise, false.
++ */
++ private static boolean writeConfig(File dest, String name, String content) {
++ boolean result = false;
++
++ File destFile = new File(dest, name);
++ try {
++ FileUtils.writeStringToFile(destFile, content);
++ log.wroteConfigurationFile(destFile.getAbsolutePath());
++ result = true;
++ } catch (IOException e) {
++ log.failedToWriteConfigurationFile(destFile.getAbsolutePath(), e);
++ }
++
++ return result;
++ }
++
++
++ /**
+ * Change handler for simple descriptors
+ */
+ public static class DescriptorsMonitor extends FileAlterationListenerAdaptor
+ implements FileFilter {
+
+ static final List<String> SUPPORTED_EXTENSIONS = new ArrayList<String>();
+ static {
+ SUPPORTED_EXTENSIONS.add("json");
+ SUPPORTED_EXTENSIONS.add("yml");
+ SUPPORTED_EXTENSIONS.add("yaml");
+ }
+
+ private File topologiesDir;
+
+ private AliasService aliasService;
+
+ private Map<String, List<String>> providerConfigReferences = new HashMap<>();
+
+
+ static boolean isDescriptorFile(String filename) {
+ return SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(filename));
+ }
+
+ public DescriptorsMonitor(File topologiesDir, AliasService aliasService) {
+ this.topologiesDir = topologiesDir;
+ this.aliasService = aliasService;
+ }
+
+ List<String> getReferencingDescriptors(String providerConfigPath) {
- List<String> result = providerConfigReferences.get(providerConfigPath);
++ List<String> result = providerConfigReferences.get(FilenameUtils.normalize(providerConfigPath));
+ if (result == null) {
+ result = Collections.emptyList();
+ }
+ return result;
+ }
+
+ @Override
+ public void onFileCreate(File file) {
+ onFileChange(file);
+ }
+
+ @Override
+ public void onFileDelete(File file) {
+ // For simple descriptors, we need to make sure to delete any corresponding full topology descriptors to trigger undeployment
+ for (String ext : DefaultTopologyService.SUPPORTED_TOPOLOGY_FILE_EXTENSIONS) {
+ File topologyFile =
+ new File(topologiesDir, FilenameUtils.getBaseName(file.getName()) + "." + ext);
+ if (topologyFile.exists()) {
++ log.deletingTopologyForDescriptorDeletion(topologyFile.getName(), file.getName());
+ topologyFile.delete();
+ }
+ }
+
+ String normalizedFilePath = FilenameUtils.normalize(file.getAbsolutePath());
+ String reference = null;
+ for (Map.Entry<String, List<String>> entry : providerConfigReferences.entrySet()) {
+ if (entry.getValue().contains(normalizedFilePath)) {
+ reference = entry.getKey();
+ break;
+ }
+ }
++
+ if (reference != null) {
+ providerConfigReferences.get(reference).remove(normalizedFilePath);
++ log.removedProviderConfigurationReference(normalizedFilePath, reference);
+ }
+ }
+
+ @Override
+ public void onFileChange(File file) {
+ try {
+ // When a simple descriptor has been created or modified, generate the new topology descriptor
+ Map<String, File> result = SimpleDescriptorHandler.handle(file, topologiesDir, aliasService);
++ log.generatedTopologyForDescriptorChange(result.get("topology").getName(), file.getName());
+
+ // Add the provider config reference relationship for handling updates to the provider config
+ String providerConfig = FilenameUtils.normalize(result.get("reference").getAbsolutePath());
+ if (!providerConfigReferences.containsKey(providerConfig)) {
+ providerConfigReferences.put(providerConfig, new ArrayList<String>());
+ }
+ List<String> refs = providerConfigReferences.get(providerConfig);
+ String descriptorName = FilenameUtils.normalize(file.getAbsolutePath());
+ if (!refs.contains(descriptorName)) {
+ // Need to check if descriptor had previously referenced another provider config, so it can be removed
+ for (List<String> descs : providerConfigReferences.values()) {
+ if (descs.contains(descriptorName)) {
+ descs.remove(descriptorName);
+ }
+ }
+
+ // Add the current reference relationship
+ refs.add(descriptorName);
++ log.addedProviderConfigurationReference(descriptorName, providerConfig);
+ }
+ } catch (Exception e) {
+ log.simpleDescriptorHandlingError(file.getName(), e);
+ }
+ }
+
+ @Override
+ public boolean accept(File file) {
+ boolean accept = false;
+ if (!file.isDirectory() && file.canRead()) {
+ String extension = FilenameUtils.getExtension(file.getName());
+ if (SUPPORTED_EXTENSIONS.contains(extension)) {
+ accept = true;
+ }
+ }
+ return accept;
+ }
+ }
+
+ /**
+ * Change handler for shared provider configurations
+ */
+ public static class SharedProviderConfigMonitor extends FileAlterationListenerAdaptor
+ implements FileFilter {
+
+ static final List<String> SUPPORTED_EXTENSIONS = new ArrayList<>();
+ static {
+ SUPPORTED_EXTENSIONS.add("xml");
+ }
+
+ private DescriptorsMonitor descriptorsMonitor;
+ private File descriptorsDir;
+
+
+ SharedProviderConfigMonitor(DescriptorsMonitor descMonitor, File descriptorsDir) {
+ this.descriptorsMonitor = descMonitor;
+ this.descriptorsDir = descriptorsDir;
+ }
+
+ @Override
+ public void onFileCreate(File file) {
+ onFileChange(file);
+ }
+
+ @Override
+ public void onFileDelete(File file) {
+ onFileChange(file);
+ }
+
+ @Override
+ public void onFileChange(File file) {
+ // For shared provider configuration, we need to update any simple descriptors that reference it
+ for (File descriptor : getReferencingDescriptors(file)) {
+ descriptor.setLastModified(System.currentTimeMillis());
+ }
+ }
+
+ private List<File> getReferencingDescriptors(File sharedProviderConfig) {
+ List<File> references = new ArrayList<>();
+
- for (File descriptor : descriptorsDir.listFiles()) {
++ for (File descriptor : listFiles(descriptorsDir)) {
+ if (DescriptorsMonitor.SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(descriptor.getName()))) {
+ for (String reference : descriptorsMonitor.getReferencingDescriptors(FilenameUtils.normalize(sharedProviderConfig.getAbsolutePath()))) {
+ references.add(new File(reference));
+ }
+ }
+ }
+
+ return references;
+ }
+
+ @Override
+ public boolean accept(File file) {
+ boolean accept = false;
+ if (!file.isDirectory() && file.canRead()) {
+ String extension = FilenameUtils.getExtension(file.getName());
+ if (SUPPORTED_EXTENSIONS.contains(extension)) {
+ accept = true;
+ }
+ }
+ return accept;
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-server/src/main/java/org/apache/knox/gateway/topology/builder/BeanPropertyTopologyBuilder.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/topology/builder/BeanPropertyTopologyBuilder.java
index 1caa946,0000000..a1a2609
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/topology/builder/BeanPropertyTopologyBuilder.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/builder/BeanPropertyTopologyBuilder.java
@@@ -1,94 -1,0 +1,105 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.builder;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.knox.gateway.topology.Application;
+import org.apache.knox.gateway.topology.Provider;
+import org.apache.knox.gateway.topology.Service;
+import org.apache.knox.gateway.topology.Topology;
+
+public class BeanPropertyTopologyBuilder implements TopologyBuilder {
+
+ private String name;
++ private String defaultService;
+ private List<Provider> providers;
+ private List<Service> services;
+ private List<Application> applications;
+
+ public BeanPropertyTopologyBuilder() {
+ providers = new ArrayList<Provider>();
+ services = new ArrayList<Service>();
+ applications = new ArrayList<Application>();
+ }
+
+ public BeanPropertyTopologyBuilder name(String name) {
+ this.name = name;
+ return this;
+ }
+
+ public String name() {
+ return name;
+ }
+
++ public BeanPropertyTopologyBuilder defaultService(String defaultService) {
++ this.defaultService = defaultService;
++ return this;
++ }
++
++ public String defaultService() {
++ return defaultService;
++ }
++
+ public BeanPropertyTopologyBuilder addProvider(Provider provider) {
+ providers.add(provider);
+ return this;
+ }
+
+ public List<Provider> providers() {
+ return providers;
+ }
+
+ public BeanPropertyTopologyBuilder addService(Service service) {
+ services.add(service);
+ return this;
+ }
+
+ public List<Service> services() {
+ return services;
+ }
+
+ public BeanPropertyTopologyBuilder addApplication( Application application ) {
+ applications.add(application);
+ return this;
+ }
+
+ public List<Application> applications() {
+ return applications;
+ }
+
+ public Topology build() {
+ Topology topology = new Topology();
+ topology.setName(name);
++ topology.setDefaultServicePath(defaultService);
+
+ for (Provider provider : providers) {
+ topology.addProvider(provider);
+ }
+
+ for (Service service : services) {
+ topology.addService(service);
+ }
+
+ for (Application application : applications) {
+ topology.addApplication(application);
+ }
+
+ return topology;
+ }
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-server/src/main/java/org/apache/knox/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
index 6b51ab8,0000000..81aedec
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
@@@ -1,93 -1,0 +1,95 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.topology.xml;
+
+import org.apache.commons.digester3.Rule;
+import org.apache.commons.digester3.binder.AbstractRulesModule;
+import org.apache.knox.gateway.topology.Application;
+import org.apache.knox.gateway.topology.Param;
+import org.apache.knox.gateway.topology.Provider;
+import org.apache.knox.gateway.topology.Service;
+import org.apache.knox.gateway.topology.Version;
+import org.apache.knox.gateway.topology.builder.BeanPropertyTopologyBuilder;
+import org.xml.sax.Attributes;
+
+public class KnoxFormatXmlTopologyRules extends AbstractRulesModule {
+
+ private static final String ROOT_TAG = "topology";
+ private static final String NAME_TAG = "name";
+ private static final String VERSION_TAG = "version";
++ private static final String DEFAULT_SERVICE_TAG = "path";
+ private static final String APPLICATION_TAG = "application";
+ private static final String SERVICE_TAG = "service";
+ private static final String ROLE_TAG = "role";
+ private static final String URL_TAG = "url";
+ private static final String PROVIDER_TAG = "gateway/provider";
+ private static final String ENABLED_TAG = "enabled";
+ private static final String PARAM_TAG = "param";
+ private static final String VALUE_TAG = "value";
+
+ private static final Rule paramRule = new ParamRule();
+
+ @Override
+ protected void configure() {
+ forPattern( ROOT_TAG ).createObject().ofType( BeanPropertyTopologyBuilder.class );
+ forPattern( ROOT_TAG + "/" + NAME_TAG ).callMethod("name").usingElementBodyAsArgument();
+ forPattern( ROOT_TAG + "/" + VERSION_TAG ).callMethod("version").usingElementBodyAsArgument();
++ forPattern( ROOT_TAG + "/" + DEFAULT_SERVICE_TAG ).callMethod("defaultService").usingElementBodyAsArgument();
+
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG ).createObject().ofType( Application.class ).then().setNext( "addApplication" );
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG + "/" + ROLE_TAG ).setBeanProperty();
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG + "/" + NAME_TAG ).setBeanProperty();
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG + "/" + VERSION_TAG ).createObject().ofType(Version.class).then().setBeanProperty().then().setNext("setVersion");
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG + "/" + URL_TAG ).callMethod( "addUrl" ).usingElementBodyAsArgument();
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG + "/" + PARAM_TAG ).createObject().ofType( Param.class ).then().addRule( paramRule ).then().setNext( "addParam" );
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG + "/" + PARAM_TAG + "/" + NAME_TAG ).setBeanProperty();
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG + "/" + PARAM_TAG + "/" + VALUE_TAG ).setBeanProperty();
+
+ forPattern( ROOT_TAG + "/" + SERVICE_TAG ).createObject().ofType( Service.class ).then().setNext( "addService" );
+ forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + ROLE_TAG ).setBeanProperty();
+ forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + NAME_TAG ).setBeanProperty();
+ forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + VERSION_TAG ).createObject().ofType(Version.class).then().setBeanProperty().then().setNext("setVersion");
+ forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + URL_TAG ).callMethod( "addUrl" ).usingElementBodyAsArgument();
+ forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + PARAM_TAG ).createObject().ofType( Param.class ).then().addRule( paramRule ).then().setNext( "addParam" );
+ forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + PARAM_TAG + "/" + NAME_TAG ).setBeanProperty();
+ forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + PARAM_TAG + "/" + VALUE_TAG ).setBeanProperty();
+
+ forPattern( ROOT_TAG + "/" + PROVIDER_TAG ).createObject().ofType( Provider.class ).then().setNext( "addProvider" );
+ forPattern( ROOT_TAG + "/" + PROVIDER_TAG + "/" + ROLE_TAG ).setBeanProperty();
+ forPattern( ROOT_TAG + "/" + PROVIDER_TAG + "/" + ENABLED_TAG ).setBeanProperty();
+ forPattern( ROOT_TAG + "/" + PROVIDER_TAG + "/" + NAME_TAG ).setBeanProperty();
+ forPattern( ROOT_TAG + "/" + PROVIDER_TAG + "/" + PARAM_TAG ).createObject().ofType( Param.class ).then().addRule( paramRule ).then().setNext( "addParam" );
+ forPattern( ROOT_TAG + "/" + PROVIDER_TAG + "/" + PARAM_TAG + "/" + NAME_TAG ).setBeanProperty();
+ forPattern( ROOT_TAG + "/" + PROVIDER_TAG + "/" + PARAM_TAG + "/" + VALUE_TAG ).setBeanProperty();
+ }
+
+ private static class ParamRule extends Rule {
+
+ @Override
+ public void begin( String namespace, String name, Attributes attributes ) {
+ Param param = getDigester().peek();
+ String paramName = attributes.getValue( "name" );
+ if( paramName != null ) {
+ param.setName( paramName );
+ param.setValue( attributes.getValue( "value" ) );
+ }
+ }
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-server/src/test/java/org/apache/knox/gateway/GatewayFilterTest.java
----------------------------------------------------------------------
diff --cc gateway-server/src/test/java/org/apache/knox/gateway/GatewayFilterTest.java
index 178ff5e,0000000..ac22400
mode 100644,000000..100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/GatewayFilterTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/GatewayFilterTest.java
@@@ -1,171 -1,0 +1,220 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway;
+
+import org.apache.knox.gateway.audit.api.AuditServiceFactory;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.filter.AbstractGatewayFilter;
++import org.apache.knox.gateway.topology.Topology;
+import org.apache.hadoop.test.category.FastTests;
+import org.apache.hadoop.test.category.UnitTests;
+import org.easymock.EasyMock;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import javax.servlet.*;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.net.URISyntaxException;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+/**
+ *
+ */
+@Category( { UnitTests.class, FastTests.class } )
+public class GatewayFilterTest {
+
+ @Before
+ public void setup() {
+ AuditServiceFactory.getAuditService().createContext();
+ }
+
+ @After
+ public void reset() {
+ AuditServiceFactory.getAuditService().detachContext();
+ }
+
+ @Test
+ public void testNoFilters() throws ServletException, IOException {
+
+ FilterConfig config = EasyMock.createNiceMock( FilterConfig.class );
+ EasyMock.replay( config );
+
+ HttpServletRequest request = EasyMock.createNiceMock( HttpServletRequest.class );
+ ServletContext context = EasyMock.createNiceMock( ServletContext.class );
+ GatewayConfig gatewayConfig = EasyMock.createNiceMock( GatewayConfig.class );
+ EasyMock.expect( request.getPathInfo() ).andReturn( "source" ).anyTimes();
+ EasyMock.expect( request.getServletContext() ).andReturn( context ).anyTimes();
+ EasyMock.expect( context.getAttribute(
+ GatewayConfig.GATEWAY_CONFIG_ATTRIBUTE)).andReturn(gatewayConfig).anyTimes();
+ EasyMock.expect(gatewayConfig.getHeaderNameForRemoteAddress()).andReturn(
+ "Custom-Forwarded-For").anyTimes();
+ EasyMock.replay( request );
+ EasyMock.replay( context );
+ EasyMock.replay( gatewayConfig );
+
+ HttpServletResponse response = EasyMock.createNiceMock( HttpServletResponse.class );
+ EasyMock.replay( response );
+
+ FilterChain chain = EasyMock.createNiceMock( FilterChain.class );
+ EasyMock.replay( chain );
+
+ GatewayFilter gateway = new GatewayFilter();
+ gateway.init( config );
+ gateway.doFilter( request, response, chain );
+ gateway.destroy();
+ }
+
+ @Test
+ public void testNoopFilter() throws ServletException, IOException, URISyntaxException {
+
+ FilterConfig config = EasyMock.createNiceMock( FilterConfig.class );
+ EasyMock.replay( config );
+
+ HttpServletRequest request = EasyMock.createNiceMock( HttpServletRequest.class );
+ ServletContext context = EasyMock.createNiceMock( ServletContext.class );
+ GatewayConfig gatewayConfig = EasyMock.createNiceMock( GatewayConfig.class );
+ EasyMock.expect( request.getPathInfo() ).andReturn( "source" ).anyTimes();
+ EasyMock.expect( request.getServletContext() ).andReturn( context ).anyTimes();
+ EasyMock.expect( context.getAttribute(
+ GatewayConfig.GATEWAY_CONFIG_ATTRIBUTE)).andReturn(gatewayConfig).anyTimes();
+ EasyMock.expect(gatewayConfig.getHeaderNameForRemoteAddress()).andReturn(
+ "Custom-Forwarded-For").anyTimes();
+ EasyMock.replay( request );
+ EasyMock.replay( context );
+ EasyMock.replay( gatewayConfig );
+
+ HttpServletResponse response = EasyMock.createNiceMock( HttpServletResponse.class );
+ EasyMock.replay( response );
+
+ FilterChain chain = EasyMock.createNiceMock( FilterChain.class );
+ EasyMock.replay( chain );
+
+ Filter filter = EasyMock.createNiceMock( Filter.class );
+ EasyMock.replay( filter );
+
+ GatewayFilter gateway = new GatewayFilter();
+ gateway.addFilter( "path", "filter", filter, null, null );
+ gateway.init( config );
+ gateway.doFilter( request, response, chain );
+ gateway.destroy();
+
+ }
+
+ public static class TestRoleFilter extends AbstractGatewayFilter {
+
+ public Object role;
++ public String defaultServicePath;
++ public String url;
+
+ @Override
+ protected void doFilter( HttpServletRequest request, HttpServletResponse response, FilterChain chain ) throws IOException, ServletException {
+ this.role = request.getAttribute( AbstractGatewayFilter.TARGET_SERVICE_ROLE );
++ Topology topology = (Topology)request.getServletContext().getAttribute( "org.apache.knox.gateway.topology" );
++ if (topology != null) {
++ this.defaultServicePath = (String) topology.getDefaultServicePath();
++ url = new String(request.getRequestURL());
++ }
+ }
+
+ }
+
+ @Test
+ public void testTargetServiceRoleRequestAttribute() throws Exception {
+
+ FilterConfig config = EasyMock.createNiceMock( FilterConfig.class );
+ EasyMock.replay( config );
+
+ HttpServletRequest request = EasyMock.createNiceMock( HttpServletRequest.class );
+ ServletContext context = EasyMock.createNiceMock( ServletContext.class );
+ GatewayConfig gatewayConfig = EasyMock.createNiceMock( GatewayConfig.class );
+ EasyMock.expect( request.getPathInfo() ).andReturn( "test-path/test-resource" ).anyTimes();
+ EasyMock.expect( request.getServletContext() ).andReturn( context ).anyTimes();
+ EasyMock.expect( context.getAttribute(
+ GatewayConfig.GATEWAY_CONFIG_ATTRIBUTE)).andReturn(gatewayConfig).anyTimes();
+ EasyMock.expect(gatewayConfig.getHeaderNameForRemoteAddress()).andReturn(
+ "Custom-Forwarded-For").anyTimes();
+ request.setAttribute( AbstractGatewayFilter.TARGET_SERVICE_ROLE, "test-role" );
+ EasyMock.expectLastCall().anyTimes();
+ EasyMock.expect( request.getAttribute( AbstractGatewayFilter.TARGET_SERVICE_ROLE ) ).andReturn( "test-role" ).anyTimes();
+ EasyMock.replay( request );
+ EasyMock.replay( context );
+ EasyMock.replay( gatewayConfig );
+
+ HttpServletResponse response = EasyMock.createNiceMock( HttpServletResponse.class );
+ EasyMock.replay( response );
+
+ TestRoleFilter filter = new TestRoleFilter();
+
+ GatewayFilter gateway = new GatewayFilter();
+ gateway.addFilter( "test-path/**", "test-filter", filter, null, "test-role" );
+ gateway.init( config );
+ gateway.doFilter( request, response );
+ gateway.destroy();
+
+ assertThat( (String)filter.role, is( "test-role" ) );
+
+ }
+
++ @Test
++ public void testDefaultServicePathTopologyRequestAttribute() throws Exception {
++
++ FilterConfig config = EasyMock.createNiceMock( FilterConfig.class );
++ EasyMock.replay( config );
++
++ Topology topology = EasyMock.createNiceMock( Topology.class );
++ topology.setDefaultServicePath("test-role/");
++ HttpServletRequest request = EasyMock.createNiceMock( HttpServletRequest.class );
++ ServletContext context = EasyMock.createNiceMock( ServletContext.class );
++ GatewayConfig gatewayConfig = EasyMock.createNiceMock( GatewayConfig.class );
++ EasyMock.expect( topology.getDefaultServicePath() ).andReturn( "test-role" ).anyTimes();
++ EasyMock.expect( request.getPathInfo() ).andReturn( "/test-path/test-resource" ).anyTimes();
++ EasyMock.expect( request.getServletContext() ).andReturn( context ).anyTimes();
++ EasyMock.expect( context.getAttribute(
++ GatewayConfig.GATEWAY_CONFIG_ATTRIBUTE)).andReturn(gatewayConfig).anyTimes();
++ EasyMock.expect(gatewayConfig.getHeaderNameForRemoteAddress()).andReturn(
++ "Custom-Forwarded-For").anyTimes();
++ EasyMock.expect( request.getRequestURL() ).andReturn( new StringBuffer("http://host:8443/gateway/sandbox/test-path/test-resource/") ).anyTimes();
++
++ EasyMock.expect( context.getAttribute( "org.apache.hadoop.gateway.topology" ) ).andReturn( topology ).anyTimes();
++ EasyMock.replay( request );
++ EasyMock.replay( context );
++ EasyMock.replay( topology );
++ EasyMock.replay( gatewayConfig );
++
++ HttpServletResponse response = EasyMock.createNiceMock( HttpServletResponse.class );
++ EasyMock.replay( response );
++
++ TestRoleFilter filter = new TestRoleFilter();
++
++ GatewayFilter gateway = new GatewayFilter();
++ gateway.addFilter( "test-role/**/**", "test-filter", filter, null, "test-role" );
++ gateway.init( config );
++ gateway.doFilter( request, response );
++ gateway.destroy();
++
++ assertThat( (String)filter.defaultServicePath, is( "test-role" ) );
++ assertThat( (String)filter.url, is("http://host:8443/gateway/sandbox/test-role/test-path/test-resource"));
++
++ }
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
----------------------------------------------------------------------
diff --cc gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
index d28ad7f,0000000..95d6f9d
mode 100644,000000..100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
@@@ -1,266 -1,0 +1,610 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.services.topology;
+
+import org.apache.commons.io.FileUtils;
++import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.io.IOUtils;
++import org.apache.commons.io.monitor.FileAlterationListener;
+import org.apache.commons.io.monitor.FileAlterationMonitor;
+import org.apache.commons.io.monitor.FileAlterationObserver;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.services.topology.impl.DefaultTopologyService;
++import org.apache.knox.gateway.config.GatewayConfig;
++import org.apache.knox.gateway.services.security.AliasService;
++import org.apache.knox.gateway.services.topology.impl.DefaultTopologyService;
++import org.apache.knox.gateway.topology.*;
+import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.gateway.topology.Param;
+import org.apache.knox.gateway.topology.Provider;
+import org.apache.knox.gateway.topology.Topology;
+import org.apache.knox.gateway.topology.TopologyEvent;
+import org.apache.knox.gateway.topology.TopologyListener;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.easymock.EasyMock;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
- import java.util.*;
++import java.util.ArrayList;
++import java.util.Arrays;
++import java.util.Collection;
++import java.util.HashMap;
++import java.util.HashSet;
++import java.util.Iterator;
++import java.util.List;
++import java.util.Map;
++import java.util.Set;
+
+import static org.easymock.EasyMock.anyObject;
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.Matchers.hasItem;
+import static org.hamcrest.core.IsNull.notNullValue;
++import static org.junit.Assert.assertEquals;
++import static org.junit.Assert.assertFalse;
++import static org.junit.Assert.assertNotEquals;
++import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+
+public class DefaultTopologyServiceTest {
+
+ @Before
+ public void setUp() throws Exception {
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ }
+
+ private File createDir() throws IOException {
+ return TestUtils.createTempDir(this.getClass().getSimpleName() + "-");
+ }
+
+ private File createFile(File parent, String name, String resource, long timestamp) throws IOException {
+ File file = new File(parent, name);
+ if (!file.exists()) {
+ FileUtils.touch(file);
+ }
+ InputStream input = ClassLoader.getSystemResourceAsStream(resource);
+ OutputStream output = FileUtils.openOutputStream(file);
+ IOUtils.copy(input, output);
+ //KNOX-685: output.flush();
+ input.close();
+ output.close();
+ file.setLastModified(timestamp);
+ assertTrue("Failed to create test file " + file.getAbsolutePath(), file.exists());
+ assertTrue("Failed to populate test file " + file.getAbsolutePath(), file.length() > 0);
+
+ return file;
+ }
+
+ @Test
+ public void testGetTopologies() throws Exception {
+
+ File dir = createDir();
+ File topologyDir = new File(dir, "topologies");
+
- File descriptorsDir = new File(dir, "descriptors");
- descriptorsDir.mkdirs();
-
- File sharedProvidersDir = new File(dir, "shared-providers");
- sharedProvidersDir.mkdirs();
-
+ long time = topologyDir.lastModified();
+ try {
+ createFile(topologyDir, "one.xml", "org/apache/knox/gateway/topology/file/topology-one.xml", time);
+
+ TestTopologyListener topoListener = new TestTopologyListener();
+ FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
+
+ TopologyService provider = new DefaultTopologyService();
+ Map<String, String> c = new HashMap<>();
+
+ GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+ EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
- EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
++ EasyMock.expect(config.getGatewayConfDir()).andReturn(topologyDir.getParentFile().getAbsolutePath()).anyTimes();
+ EasyMock.replay(config);
+
+ provider.init(config, c);
+
+ provider.addTopologyChangeListener(topoListener);
+
+ provider.reloadTopologies();
+
+ Collection<Topology> topologies = provider.getTopologies();
+ assertThat(topologies, notNullValue());
+ assertThat(topologies.size(), is(1));
+ Topology topology = topologies.iterator().next();
+ assertThat(topology.getName(), is("one"));
+ assertThat(topology.getTimestamp(), is(time));
+ assertThat(topoListener.events.size(), is(1));
+ topoListener.events.clear();
+
+ // Add a file to the directory.
+ File two = createFile(topologyDir, "two.xml",
+ "org/apache/knox/gateway/topology/file/topology-two.xml", 1L);
+ provider.reloadTopologies();
+ topologies = provider.getTopologies();
+ assertThat(topologies.size(), is(2));
+ Set<String> names = new HashSet<>(Arrays.asList("one", "two"));
+ Iterator<Topology> iterator = topologies.iterator();
+ topology = iterator.next();
+ assertThat(names, hasItem(topology.getName()));
+ names.remove(topology.getName());
+ topology = iterator.next();
+ assertThat(names, hasItem(topology.getName()));
+ names.remove(topology.getName());
+ assertThat(names.size(), is(0));
+ assertThat(topoListener.events.size(), is(1));
+ List<TopologyEvent> events = topoListener.events.get(0);
+ assertThat(events.size(), is(1));
+ TopologyEvent event = events.get(0);
+ assertThat(event.getType(), is(TopologyEvent.Type.CREATED));
+ assertThat(event.getTopology(), notNullValue());
+
+ // Update a file in the directory.
+ two = createFile(topologyDir, "two.xml",
+ "org/apache/knox/gateway/topology/file/topology-three.xml", 2L);
+ provider.reloadTopologies();
+ topologies = provider.getTopologies();
+ assertThat(topologies.size(), is(2));
+ names = new HashSet<>(Arrays.asList("one", "two"));
+ iterator = topologies.iterator();
+ topology = iterator.next();
+ assertThat(names, hasItem(topology.getName()));
+ names.remove(topology.getName());
+ topology = iterator.next();
+ assertThat(names, hasItem(topology.getName()));
+ names.remove(topology.getName());
+ assertThat(names.size(), is(0));
+
+ // Remove a file from the directory.
+ two.delete();
+ provider.reloadTopologies();
+ topologies = provider.getTopologies();
+ assertThat(topologies.size(), is(1));
+ topology = topologies.iterator().next();
+ assertThat(topology.getName(), is("one"));
+ assertThat(topology.getTimestamp(), is(time));
+
++ } finally {
++ FileUtils.deleteQuietly(dir);
++ }
++ }
++
++ /**
++ * KNOX-1014
++ *
++ * Test the lifecycle relationship between simple descriptors and topology files.
++ *
++ * N.B. This test depends on the DummyServiceDiscovery extension being configured:
++ * org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
++ */
++ @Test
++ public void testSimpleDescriptorsTopologyGeneration() throws Exception {
++
++ File dir = createDir();
++ File topologyDir = new File(dir, "topologies");
++ topologyDir.mkdirs();
++
++ File descriptorsDir = new File(dir, "descriptors");
++ descriptorsDir.mkdirs();
++
++ File sharedProvidersDir = new File(dir, "shared-providers");
++ sharedProvidersDir.mkdirs();
++
++ try {
++ TestTopologyListener topoListener = new TestTopologyListener();
++ FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
++
++ TopologyService provider = new DefaultTopologyService();
++ Map<String, String> c = new HashMap<>();
++
++ GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
++ EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
++ EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
++ EasyMock.replay(config);
++
++ provider.init(config, c);
++ provider.addTopologyChangeListener(topoListener);
++ provider.reloadTopologies();
++
++
+ // Add a simple descriptor to the descriptors dir to verify topology generation and loading (KNOX-1006)
- // N.B. This part of the test depends on the DummyServiceDiscovery extension being configured:
- // org.apache.knox.gateway.topology.discovery.test.extension.DummyServiceDiscovery
+ AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
+ EasyMock.expect(aliasService.getPasswordFromAliasForGateway(anyObject(String.class))).andReturn(null).anyTimes();
+ EasyMock.replay(aliasService);
+ DefaultTopologyService.DescriptorsMonitor dm =
- new DefaultTopologyService.DescriptorsMonitor(topologyDir, aliasService);
++ new DefaultTopologyService.DescriptorsMonitor(topologyDir, aliasService);
++
++ // Listener to simulate the topologies directory monitor, to notice when a topology has been deleted
++ provider.addTopologyChangeListener(new TestTopologyDeleteListener((DefaultTopologyService)provider));
+
+ // Write out the referenced provider config first
+ File provCfgFile = createFile(sharedProvidersDir,
+ "ambari-cluster-policy.xml",
- "org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml",
- 1L);
++ "org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml",
++ System.currentTimeMillis());
+ try {
+ // Create the simple descriptor in the descriptors dir
- File simpleDesc =
- createFile(descriptorsDir,
- "four.json",
- "org/apache/knox/gateway/topology/file/simple-topology-four.json",
- 1L);
++ File simpleDesc = createFile(descriptorsDir,
++ "four.json",
++ "org/apache/knox/gateway/topology/file/simple-topology-four.json",
++ System.currentTimeMillis());
+
+ // Trigger the topology generation by noticing the simple descriptor
+ dm.onFileChange(simpleDesc);
+
+ // Load the generated topology
+ provider.reloadTopologies();
++ Collection<Topology> topologies = provider.getTopologies();
++ assertThat(topologies.size(), is(1));
++ Iterator<Topology> iterator = topologies.iterator();
++ Topology topology = iterator.next();
++ assertThat("four", is(topology.getName()));
++ int serviceCount = topology.getServices().size();
++ assertEquals("Expected the same number of services as are declared in the simple dscriptor.", 10, serviceCount);
++
++ // Overwrite the simple descriptor with a different set of services, and check that the changes are
++ // propagated to the associated topology
++ simpleDesc = createFile(descriptorsDir,
++ "four.json",
++ "org/apache/knox/gateway/topology/file/simple-descriptor-five.json",
++ System.currentTimeMillis());
++ dm.onFileChange(simpleDesc);
++ provider.reloadTopologies();
++ topologies = provider.getTopologies();
++ topology = topologies.iterator().next();
++ assertNotEquals(serviceCount, topology.getServices().size());
++ assertEquals(6, topology.getServices().size());
++
++ // Delete the simple descriptor, and make sure that the associated topology file is deleted
++ simpleDesc.delete();
++ dm.onFileDelete(simpleDesc);
++ provider.reloadTopologies();
+ topologies = provider.getTopologies();
- assertThat(topologies.size(), is(2));
- names = new HashSet<>(Arrays.asList("one", "four"));
- iterator = topologies.iterator();
- topology = iterator.next();
- assertThat(names, hasItem(topology.getName()));
- names.remove(topology.getName());
- topology = iterator.next();
- assertThat(names, hasItem(topology.getName()));
- names.remove(topology.getName());
- assertThat(names.size(), is(0));
++ assertTrue(topologies.isEmpty());
++
++ // Delete a topology file, and make sure that the associated simple descriptor is deleted
++ // Overwrite the simple descriptor with a different set of services, and check that the changes are
++ // propagated to the associated topology
++ simpleDesc = createFile(descriptorsDir,
++ "deleteme.json",
++ "org/apache/knox/gateway/topology/file/simple-descriptor-five.json",
++ System.currentTimeMillis());
++ dm.onFileChange(simpleDesc);
++ provider.reloadTopologies();
++ topologies = provider.getTopologies();
++ assertFalse(topologies.isEmpty());
++ topology = topologies.iterator().next();
++ assertEquals("deleteme", topology.getName());
++ File topologyFile = new File(topologyDir, topology.getName() + ".xml");
++ assertTrue(topologyFile.exists());
++ topologyFile.delete();
++ provider.reloadTopologies();
++ assertFalse("Simple descriptor should have been deleted because the associated topology was.",
++ simpleDesc.exists());
++
+ } finally {
+ provCfgFile.delete();
-
+ }
+ } finally {
+ FileUtils.deleteQuietly(dir);
+ }
+ }
+
++ /**
++ * KNOX-1014
++ *
++ * Test the lifecycle relationship between provider configuration files, simple descriptors, and topology files.
++ *
++ * N.B. This test depends on the DummyServiceDiscovery extension being configured:
++ * org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
++ */
++ @Test
++ public void testTopologiesUpdateFromProviderConfigChange() throws Exception {
++ File dir = createDir();
++ File topologyDir = new File(dir, "topologies");
++ topologyDir.mkdirs();
++
++ File descriptorsDir = new File(dir, "descriptors");
++ descriptorsDir.mkdirs();
++
++ File sharedProvidersDir = new File(dir, "shared-providers");
++ sharedProvidersDir.mkdirs();
++
++ try {
++ TestTopologyListener topoListener = new TestTopologyListener();
++ FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
++
++ TopologyService ts = new DefaultTopologyService();
++ Map<String, String> c = new HashMap<>();
++
++ GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
++ EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
++ EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
++ EasyMock.replay(config);
++
++ ts.init(config, c);
++ ts.addTopologyChangeListener(topoListener);
++ ts.reloadTopologies();
++
++ java.lang.reflect.Field dmField = ts.getClass().getDeclaredField("descriptorsMonitor");
++ dmField.setAccessible(true);
++ DefaultTopologyService.DescriptorsMonitor dm = (DefaultTopologyService.DescriptorsMonitor) dmField.get(ts);
++
++ // Write out the referenced provider configs first
++ createFile(sharedProvidersDir,
++ "provider-config-one.xml",
++ "org/apache/knox/gateway/topology/file/provider-config-one.xml",
++ System.currentTimeMillis());
++
++ // Create the simple descriptor, which depends on provider-config-one.xml
++ File simpleDesc = createFile(descriptorsDir,
++ "six.json",
++ "org/apache/knox/gateway/topology/file/simple-descriptor-six.json",
++ System.currentTimeMillis());
++
++ // "Notice" the simple descriptor change, and generate a topology based on it
++ dm.onFileChange(simpleDesc);
++
++ // Load the generated topology
++ ts.reloadTopologies();
++ Collection<Topology> topologies = ts.getTopologies();
++ assertThat(topologies.size(), is(1));
++ Iterator<Topology> iterator = topologies.iterator();
++ Topology topology = iterator.next();
++ assertFalse("The Shiro provider is disabled in provider-config-one.xml",
++ topology.getProvider("authentication", "ShiroProvider").isEnabled());
++
++ // Overwrite the referenced provider configuration with a different ShiroProvider config, and check that the
++ // changes are propagated to the associated topology
++ File providerConfig = createFile(sharedProvidersDir,
++ "provider-config-one.xml",
++ "org/apache/knox/gateway/topology/file/ambari-cluster-policy.xml",
++ System.currentTimeMillis());
++
++ // "Notice" the simple descriptor change as a result of the referenced config change
++ dm.onFileChange(simpleDesc);
++
++ // Load the generated topology
++ ts.reloadTopologies();
++ topologies = ts.getTopologies();
++ assertFalse(topologies.isEmpty());
++ topology = topologies.iterator().next();
++ assertTrue("The Shiro provider is enabled in ambari-cluster-policy.xml",
++ topology.getProvider("authentication", "ShiroProvider").isEnabled());
++
++ // Delete the provider configuration, and make sure that the associated topology file is unaffected.
++ // The topology file should not be affected because the simple descriptor handling will fail to resolve the
++ // referenced provider configuration.
++ providerConfig.delete(); // Delete the file
++ dm.onFileChange(simpleDesc); // The provider config deletion will trigger a descriptor change notification
++ ts.reloadTopologies();
++ topologies = ts.getTopologies();
++ assertFalse(topologies.isEmpty());
++ assertTrue("The Shiro provider is enabled in ambari-cluster-policy.xml",
++ topology.getProvider("authentication", "ShiroProvider").isEnabled());
++
++ } finally {
++ FileUtils.deleteQuietly(dir);
++ }
++ }
++
++ /**
++ * KNOX-1039
++ */
++ @Test
++ public void testConfigurationCRUDAPI() throws Exception {
++ File dir = createDir();
++ File topologyDir = new File(dir, "topologies");
++ topologyDir.mkdirs();
++
++ File descriptorsDir = new File(dir, "descriptors");
++ descriptorsDir.mkdirs();
++
++ File sharedProvidersDir = new File(dir, "shared-providers");
++ sharedProvidersDir.mkdirs();
++
++ try {
++ TestTopologyListener topoListener = new TestTopologyListener();
++ FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
++
++ TopologyService ts = new DefaultTopologyService();
++ Map<String, String> c = new HashMap<>();
++
++ GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
++ EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
++ EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
++ EasyMock.replay(config);
++
++ ts.init(config, c);
++ ts.addTopologyChangeListener(topoListener);
++ ts.reloadTopologies();
++
++ java.lang.reflect.Field dmField = ts.getClass().getDeclaredField("descriptorsMonitor");
++ dmField.setAccessible(true);
++ DefaultTopologyService.DescriptorsMonitor dm = (DefaultTopologyService.DescriptorsMonitor) dmField.get(ts);
++
++ final String simpleDescName = "six.json";
++ final String provConfOne = "provider-config-one.xml";
++ final String provConfTwo = "ambari-cluster-policy.xml";
++
++ // "Deploy" the referenced provider configs first
++ boolean isDeployed =
++ ts.deployProviderConfiguration(provConfOne,
++ FileUtils.readFileToString(new File(ClassLoader.getSystemResource("org/apache/hadoop/gateway/topology/file/provider-config-one.xml").toURI())));
++ assertTrue(isDeployed);
++ File provConfOneFile = new File(sharedProvidersDir, provConfOne);
++ assertTrue(provConfOneFile.exists());
++
++ isDeployed =
++ ts.deployProviderConfiguration(provConfTwo,
++ FileUtils.readFileToString(new File(ClassLoader.getSystemResource("org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml").toURI())));
++ assertTrue(isDeployed);
++ File provConfTwoFile = new File(sharedProvidersDir, provConfTwo);
++ assertTrue(provConfTwoFile.exists());
++
++ // Validate the provider configurations known by the topology service
++ Collection<File> providerConfigurations = ts.getProviderConfigurations();
++ assertNotNull(providerConfigurations);
++ assertEquals(2, providerConfigurations.size());
++ assertTrue(providerConfigurations.contains(provConfOneFile));
++ assertTrue(providerConfigurations.contains(provConfTwoFile));
++
++ // "Deploy" the simple descriptor, which depends on provConfOne
++ isDeployed =
++ ts.deployDescriptor(simpleDescName,
++ FileUtils.readFileToString(new File(ClassLoader.getSystemResource("org/apache/hadoop/gateway/topology/file/simple-descriptor-six.json").toURI())));
++ assertTrue(isDeployed);
++ File simpleDesc = new File(descriptorsDir, simpleDescName);
++ assertTrue(simpleDesc.exists());
++
++ // Validate the simple descriptors known by the topology service
++ Collection<File> descriptors = ts.getDescriptors();
++ assertNotNull(descriptors);
++ assertEquals(1, descriptors.size());
++ assertTrue(descriptors.contains(simpleDesc));
++
++ // "Notice" the simple descriptor, so the provider configuration dependency relationship is recorded
++ dm.onFileChange(simpleDesc);
++
++ // Attempt to delete the referenced provConfOne
++ assertFalse("Should not be able to delete a provider configuration that is referenced by one or more descriptors",
++ ts.deleteProviderConfiguration(FilenameUtils.getBaseName(provConfOne)));
++
++ // Overwrite the simple descriptor with content that changes the provider config reference to provConfTwo
++ isDeployed =
++ ts.deployDescriptor(simpleDescName,
++ FileUtils.readFileToString(new File(ClassLoader.getSystemResource("org/apache/hadoop/gateway/topology/file/simple-descriptor-five.json").toURI())));
++ assertTrue(isDeployed);
++ assertTrue(simpleDesc.exists());
++ ts.getProviderConfigurations();
++
++ // "Notice" the simple descriptor, so the provider configuration dependency relationship is updated
++ dm.onFileChange(simpleDesc);
++
++ // Attempt to delete the referenced provConfOne
++ assertTrue("Should be able to delete the provider configuration, now that it's not referenced by any descriptors",
++ ts.deleteProviderConfiguration(FilenameUtils.getBaseName(provConfOne)));
++
++ // Re-validate the provider configurations known by the topology service
++ providerConfigurations = ts.getProviderConfigurations();
++ assertNotNull(providerConfigurations);
++ assertEquals(1, providerConfigurations.size());
++ assertFalse(providerConfigurations.contains(provConfOneFile));
++ assertTrue(providerConfigurations.contains(provConfTwoFile));
++
++ // Attempt to delete the referenced provConfTwo
++ assertFalse("Should not be able to delete a provider configuration that is referenced by one or more descriptors",
++ ts.deleteProviderConfiguration(FilenameUtils.getBaseName(provConfTwo)));
++
++ // Delete the referencing simple descriptor
++ assertTrue(ts.deleteDescriptor(FilenameUtils.getBaseName(simpleDescName)));
++ assertFalse(simpleDesc.exists());
++
++ // Re-validate the simple descriptors known by the topology service
++ descriptors = ts.getDescriptors();
++ assertNotNull(descriptors);
++ assertTrue(descriptors.isEmpty());
++
++ // "Notice" the simple descriptor, so the provider configuration dependency relationship is updated
++ dm.onFileDelete(simpleDesc);
++
++ // Attempt to delete the referenced provConfTwo
++ assertTrue("Should be able to delete the provider configuration, now that it's not referenced by any descriptors",
++ ts.deleteProviderConfiguration(FilenameUtils.getBaseName(provConfTwo)));
++
++ // Re-validate the provider configurations known by the topology service
++ providerConfigurations = ts.getProviderConfigurations();
++ assertNotNull(providerConfigurations);
++ assertTrue(providerConfigurations.isEmpty());
++
++ } finally {
++ FileUtils.deleteQuietly(dir);
++ }
++ }
++
+ private void kickMonitor(FileAlterationMonitor monitor) {
+ for (FileAlterationObserver observer : monitor.getObservers()) {
+ observer.checkAndNotify();
+ }
+ }
+
++
+ @Test
+ public void testProviderParamsOrderIsPreserved() {
+
+ Provider provider = new Provider();
+ String names[] = {"ldapRealm=",
+ "ldapContextFactory",
+ "ldapRealm.contextFactory",
+ "ldapGroupRealm",
+ "ldapGroupRealm.contextFactory",
+ "ldapGroupRealm.contextFactory.systemAuthenticationMechanism"
+ };
+
+ Param param = null;
+ for (String name : names) {
+ param = new Param();
+ param.setName(name);
+ param.setValue(name);
+ provider.addParam(param);
+
+ }
+ Map<String, String> params = provider.getParams();
+ Set<String> keySet = params.keySet();
+ Iterator<String> iter = keySet.iterator();
+ int i = 0;
+ while (iter.hasNext()) {
+ assertTrue(iter.next().equals(names[i++]));
+ }
+
+ }
+
+ private class TestTopologyListener implements TopologyListener {
+
- public ArrayList<List<TopologyEvent>> events = new ArrayList<List<TopologyEvent>>();
++ ArrayList<List<TopologyEvent>> events = new ArrayList<List<TopologyEvent>>();
+
+ @Override
+ public void handleTopologyEvent(List<TopologyEvent> events) {
+ this.events.add(events);
+ }
+
+ }
+
++
++ private class TestTopologyDeleteListener implements TopologyListener {
++
++ FileAlterationListener delegate;
++
++ TestTopologyDeleteListener(FileAlterationListener delegate) {
++ this.delegate = delegate;
++ }
++
++ @Override
++ public void handleTopologyEvent(List<TopologyEvent> events) {
++ for (TopologyEvent event : events) {
++ if (event.getType().equals(TopologyEvent.Type.DELETED)) {
++ delegate.onFileDelete(new File(event.getTopology().getUri()));
++ }
++ }
++ }
++
++ }
++
+}
[14/25] knox git commit: KNOX-1017 - Add support for enabling
"Strict-Transport-Security" header in Knox responses (Latha Appanna via
lmccay)
Posted by mo...@apache.org.
KNOX-1017 - Add support for enabling "Strict-Transport-Security" header in Knox responses (Latha Appanna via lmccay)
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/b60322a6
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/b60322a6
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/b60322a6
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: b60322a6e2ba97b9f989d1efbe5183bfbc009c56
Parents: 710e784
Author: Larry McCay <lm...@hortonworks.com>
Authored: Sun Oct 29 16:05:16 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Sun Oct 29 16:05:16 2017 -0400
----------------------------------------------------------------------
.../webappsec/deploy/WebAppSecContributor.java | 11 ++
.../webappsec/filter/StrictTranportFilter.java | 137 ++++++++++++++++
.../webappsec/StrictTranportFilterTest.java | 164 +++++++++++++++++++
.../home/conf/topologies/manager.xml | 1 +
gateway-release/home/templates/sandbox-apps.xml | 1 +
5 files changed, 314 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/b60322a6/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/deploy/WebAppSecContributor.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/deploy/WebAppSecContributor.java b/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/deploy/WebAppSecContributor.java
index 3904ff1..9a1d174 100644
--- a/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/deploy/WebAppSecContributor.java
+++ b/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/deploy/WebAppSecContributor.java
@@ -42,6 +42,9 @@ public class WebAppSecContributor extends
private static final String XFRAME_OPTIONS_SUFFIX = "_XFRAMEOPTIONS";
private static final String XFRAME_OPTIONS_FILTER_CLASSNAME = "org.apache.hadoop.gateway.webappsec.filter.XFrameOptionsFilter";
private static final String XFRAME_OPTIONS_ENABLED = "xframe.options.enabled";
+ private static final String STRICT_TRANSPORT_SUFFIX = "_STRICTTRANSPORT";
+ private static final String STRICT_TRANSPORT_FILTER_CLASSNAME = "org.apache.hadoop.gateway.webappsec.filter.StrictTranportFilter";
+ private static final String STRICT_TRANSPORT_ENABLED = "strict.transport.enabled";
@Override
public String getRole() {
@@ -92,6 +95,14 @@ public class WebAppSecContributor extends
provisionConfig(resource, providerParams, params, "xframe.");
resource.addFilter().name( getName() + XFRAME_OPTIONS_SUFFIX ).role( getRole() ).impl( XFRAME_OPTIONS_FILTER_CLASSNAME ).params( params );
}
+
+ // HTTP Strict-Transport-Security
+ params = new ArrayList<FilterParamDescriptor>();
+ String strictTranportEnabled = map.get(STRICT_TRANSPORT_ENABLED);
+ if ( strictTranportEnabled != null && "true".equals(strictTranportEnabled)) {
+ provisionConfig(resource, providerParams, params, "strict.");
+ resource.addFilter().name( getName() + STRICT_TRANSPORT_SUFFIX).role( getRole() ).impl(STRICT_TRANSPORT_FILTER_CLASSNAME).params( params );
+ }
}
}
http://git-wip-us.apache.org/repos/asf/knox/blob/b60322a6/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/StrictTranportFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/StrictTranportFilter.java b/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/StrictTranportFilter.java
new file mode 100644
index 0000000..28ac18a
--- /dev/null
+++ b/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/StrictTranportFilter.java
@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.webappsec.filter;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpServletResponseWrapper;
+
+/**
+ * This filter protects proxied webapps from protocol downgrade attacks
+ * and cookie hijacking.
+ */
+public class StrictTranportFilter implements Filter {
+ private static final String STRICT_TRANSPORT = "Strict-Transport-Security";
+ private static final String CUSTOM_HEADER_PARAM = "strict.transport";
+
+ private String option = "max-age=31536000";
+
+ /* (non-Javadoc)
+ * @see javax.servlet.Filter#destroy()
+ */
+ @Override
+ public void destroy() {
+ }
+
+ /* (non-Javadoc)
+ * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, javax.servlet.FilterChain)
+ */
+ @Override
+ public void doFilter(ServletRequest req, ServletResponse res,
+ FilterChain chain) throws IOException, ServletException {
+ ((HttpServletResponse) res).setHeader(STRICT_TRANSPORT, option);
+ chain.doFilter(req, new StrictTranportResponseWrapper((HttpServletResponse) res));
+ }
+
+ /* (non-Javadoc)
+ * @see javax.servlet.Filter#init(javax.servlet.FilterConfig)
+ */
+ @Override
+ public void init(FilterConfig config) throws ServletException {
+ String customOption = config.getInitParameter(CUSTOM_HEADER_PARAM);
+ if (customOption != null) {
+ option = customOption;
+ }
+ }
+
+ public class StrictTranportResponseWrapper extends HttpServletResponseWrapper {
+ @Override
+ public void addHeader(String name, String value) {
+ // don't allow additional values to be added to
+ // the configured options value in topology
+ if (!name.equals(STRICT_TRANSPORT)) {
+ super.addHeader(name, value);
+ }
+ }
+
+ @Override
+ public void setHeader(String name, String value) {
+ // don't allow overwriting of configured value
+ if (!name.equals(STRICT_TRANSPORT)) {
+ super.setHeader(name, value);
+ }
+ }
+
+ /**
+ * construct a wrapper for this request
+ *
+ * @param request
+ */
+ public StrictTranportResponseWrapper(HttpServletResponse response) {
+ super(response);
+ }
+
+ @Override
+ public String getHeader(String name) {
+ String headerValue = null;
+ if (name.equals(STRICT_TRANSPORT)) {
+ headerValue = option;
+ }
+ else {
+ headerValue = super.getHeader(name);
+ }
+ return headerValue;
+ }
+
+ /**
+ * get the Header names
+ */
+ @Override
+ public Collection<String> getHeaderNames() {
+ List<String> names = (List<String>) super.getHeaderNames();
+ if (names == null) {
+ names = new ArrayList<String>();
+ }
+ names.add(STRICT_TRANSPORT);
+ return names;
+ }
+
+ @Override
+ public Collection<String> getHeaders(String name) {
+ List<String> values = (List<String>) super.getHeaders(name);
+ if (name.equals(STRICT_TRANSPORT)) {
+ if (values == null) {
+ values = new ArrayList<String>();
+ }
+ values.add(option);
+ }
+ return values;
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/b60322a6/gateway-provider-security-webappsec/src/test/java/org/apache/hadoop/gateway/webappsec/StrictTranportFilterTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-webappsec/src/test/java/org/apache/hadoop/gateway/webappsec/StrictTranportFilterTest.java b/gateway-provider-security-webappsec/src/test/java/org/apache/hadoop/gateway/webappsec/StrictTranportFilterTest.java
new file mode 100644
index 0000000..0c63d7f
--- /dev/null
+++ b/gateway-provider-security-webappsec/src/test/java/org/apache/hadoop/gateway/webappsec/StrictTranportFilterTest.java
@@ -0,0 +1,164 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.webappsec;
+
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Enumeration;
+import java.util.Properties;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.hadoop.gateway.webappsec.filter.StrictTranportFilter;
+import org.easymock.EasyMock;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ *
+ */
+public class StrictTranportFilterTest {
+ /**
+ *
+ */
+ private static final String STRICT_TRANSPORT = "Strict-Transport-Security";
+ String options = null;
+ Collection<String> headerNames = null;
+ Collection<String> headers = null;
+
+ @Test
+ public void testDefaultOptionsValue() throws Exception {
+ try {
+ StrictTranportFilter filter = new StrictTranportFilter();
+ Properties props = new Properties();
+ props.put("strict.transport.enabled", "true");
+ filter.init(new TestFilterConfig(props));
+
+ HttpServletRequest request = EasyMock.createNiceMock(
+ HttpServletRequest.class);
+ HttpServletResponse response = EasyMock.createNiceMock(
+ HttpServletResponse.class);
+ EasyMock.replay(request);
+ EasyMock.replay(response);
+
+ TestFilterChain chain = new TestFilterChain();
+ filter.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be false.",
+ chain.doFilterCalled );
+ Assert.assertTrue("Options value incorrect should be max-age=31536000 but is: "
+ + options, "max-age=31536000".equals(options));
+
+ Assert.assertTrue("Strict-Transport-Security count not equal to 1.", headers.size() == 1);
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ @Test
+ public void testConfiguredOptionsValue() throws Exception {
+ try {
+ StrictTranportFilter filter = new StrictTranportFilter();
+ Properties props = new Properties();
+ props.put("strict.transport.enabled", "true");
+ props.put("strict.transport", "max-age=31536010; includeSubDomains");
+ filter.init(new TestFilterConfig(props));
+
+ HttpServletRequest request = EasyMock.createNiceMock(
+ HttpServletRequest.class);
+ HttpServletResponse response = EasyMock.createNiceMock(
+ HttpServletResponse.class);
+ EasyMock.replay(request);
+ EasyMock.replay(response);
+
+ TestFilterChain chain = new TestFilterChain();
+ filter.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be false.",
+ chain.doFilterCalled );
+ Assert.assertTrue("Options value incorrect should be max-age=31536010; includeSubDomains but is: "
+ + options, "max-age=31536010; includeSubDomains".equals(options));
+
+ Assert.assertTrue("Strict-Transport-Security count not equal to 1.", headers.size() == 1);
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ class TestFilterConfig implements FilterConfig {
+ Properties props = null;
+
+ public TestFilterConfig(Properties props) {
+ this.props = props;
+ }
+
+ @Override
+ public String getFilterName() {
+ return null;
+ }
+
+ /* (non-Javadoc)
+ * @see javax.servlet.FilterConfig#getServletContext()
+ */
+ @Override
+ public ServletContext getServletContext() {
+ return null;
+ }
+
+ /* (non-Javadoc)
+ * @see javax.servlet.FilterConfig#getInitParameter(java.lang.String)
+ */
+ @Override
+ public String getInitParameter(String name) {
+ return props.getProperty(name, null);
+ }
+
+ /* (non-Javadoc)
+ * @see javax.servlet.FilterConfig#getInitParameterNames()
+ */
+ @Override
+ public Enumeration<String> getInitParameterNames() {
+ return null;
+ }
+
+ }
+
+ class TestFilterChain implements FilterChain {
+ boolean doFilterCalled = false;
+
+ /* (non-Javadoc)
+ * @see javax.servlet.FilterChain#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse)
+ */
+ @Override
+ public void doFilter(ServletRequest request, ServletResponse response)
+ throws IOException, ServletException {
+ doFilterCalled = true;
+ options = ((HttpServletResponse)response).getHeader(STRICT_TRANSPORT);
+ headerNames = ((HttpServletResponse)response).getHeaderNames();
+ headers = ((HttpServletResponse)response).getHeaders(STRICT_TRANSPORT);
+ }
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/b60322a6/gateway-release/home/conf/topologies/manager.xml
----------------------------------------------------------------------
diff --git a/gateway-release/home/conf/topologies/manager.xml b/gateway-release/home/conf/topologies/manager.xml
index 22f8a91..08416c3 100644
--- a/gateway-release/home/conf/topologies/manager.xml
+++ b/gateway-release/home/conf/topologies/manager.xml
@@ -27,6 +27,7 @@
<param><name>csrf.customHeader</name><value>X-XSRF-Header</value></param>
<param><name>csrf.methodsToIgnore</name><value>GET,OPTIONS,HEAD</value></param>
<param><name>xframe.options.enabled</name><value>true</value></param>
+ <param><name>strict.transport.enabled</name><value>true</value></param>
</provider>
<provider>
http://git-wip-us.apache.org/repos/asf/knox/blob/b60322a6/gateway-release/home/templates/sandbox-apps.xml
----------------------------------------------------------------------
diff --git a/gateway-release/home/templates/sandbox-apps.xml b/gateway-release/home/templates/sandbox-apps.xml
index bed6470..44cb5f2 100644
--- a/gateway-release/home/templates/sandbox-apps.xml
+++ b/gateway-release/home/templates/sandbox-apps.xml
@@ -22,6 +22,7 @@
<enabled>true</enabled>
<param><name>xframe.options.enabled</name><value>true</value></param>
<param><name>csrf.enabled</name><value>true</value></param>
+ <param><name>strict.transport.enabled</name><value>true</value></param>
</provider>
<gateway>
[10/25] knox git commit: KNOX-1039 - Added admin APIs for managing
shared provider configurations and descriptors (Phil Zampino via Sandeep
More)
Posted by mo...@apache.org.
KNOX-1039 - Added admin APIs for managing shared provider configurations and descriptors (Phil Zampino via Sandeep More)
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/9ad9bcdb
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/9ad9bcdb
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/9ad9bcdb
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 9ad9bcdbbdb82acdabd05fe1500da9a6f8d22634
Parents: 41952dd
Author: Sandeep More <mo...@apache.org>
Authored: Thu Oct 26 13:20:35 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Thu Oct 26 13:20:35 2017 -0400
----------------------------------------------------------------------
.../ambari/AmbariServiceDiscovery.java | 3 +-
.../apache/hadoop/gateway/GatewayMessages.java | 34 +-
.../gateway/config/impl/GatewayConfigImpl.java | 3 +-
.../topology/impl/DefaultTopologyService.java | 221 +++++--
.../topology/DefaultTopologyServiceTest.java | 402 +++++++++++--
.../topology/file/provider-config-one.xml | 74 +++
.../topology/file/simple-descriptor-five.json | 14 +
.../topology/file/simple-descriptor-six.json | 18 +
.../service/admin/HrefListingMarshaller.java | 75 +++
.../service/admin/TopologiesResource.java | 379 +++++++++++-
.../hadoop/gateway/i18n/GatewaySpiMessages.java | 10 +-
.../services/topology/TopologyService.java | 33 +-
.../gateway/GatewayAdminTopologyFuncTest.java | 586 +++++++++++++++++++
13 files changed, 1736 insertions(+), 116 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/9ad9bcdb/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
index 37f68ae..b7f9f53 100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
@@ -247,10 +247,11 @@ class AmbariServiceDiscovery implements ServiceDiscovery {
}
if (aliasService != null) {
- // If not password alias is configured, then try the default alias
+ // If no password alias is configured, then try the default alias
if (passwordAlias == null) {
passwordAlias = DEFAULT_PWD_ALIAS;
}
+
try {
char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
if (pwd != null) {
http://git-wip-us.apache.org/repos/asf/knox/blob/9ad9bcdb/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
index 6f73c1e..4cb4c40 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
@@ -514,8 +514,40 @@ public interface GatewayMessages {
void topologyPortMappingCannotFindTopology(final String topology, final int port);
+ @Message( level = MessageLevel.INFO, text = "Monitoring simple descriptors in directory: {0}" )
+ void monitoringDescriptorChangesInDirectory(String descriptorsDir);
+
+
+ @Message( level = MessageLevel.INFO, text = "Monitoring shared provider configurations in directory: {0}" )
+ void monitoringProviderConfigChangesInDirectory(String sharedProviderDir);
+
+ @Message( level = MessageLevel.INFO, text = "Prevented deletion of shared provider configuration because there are referencing descriptors: {0}" )
+ void preventedDeletionOfSharedProviderConfiguration(String providerConfigurationPath);
+
+ @Message( level = MessageLevel.INFO, text = "Generated topology {0} because the associated descriptor {1} changed." )
+ void generatedTopologyForDescriptorChange(String topologyName, String descriptorName);
+
@Message( level = MessageLevel.ERROR, text = "An error occurred while processing {0} : {1}" )
void simpleDescriptorHandlingError(final String simpleDesc,
- @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+ @StackTrace(level = MessageLevel.DEBUG) Exception e);
+
+ @Message(level = MessageLevel.DEBUG, text = "Successfully wrote configuration: {0}")
+ void wroteConfigurationFile(final String filePath);
+
+ @Message(level = MessageLevel.ERROR, text = "Failed to write configuration: {0}")
+ void failedToWriteConfigurationFile(final String filePath,
+ @StackTrace(level = MessageLevel.DEBUG) Exception e );
+
+ @Message( level = MessageLevel.INFO, text = "Deleting topology {0} because the associated descriptor {1} was deleted." )
+ void deletingTopologyForDescriptorDeletion(String topologyName, String descriptorName);
+
+ @Message( level = MessageLevel.INFO, text = "Deleting descriptor {0} because the associated topology {1} was deleted." )
+ void deletingDescriptorForTopologyDeletion(String descriptorName, String topologyName);
+
+ @Message( level = MessageLevel.DEBUG, text = "Added descriptor {0} reference to provider configuration {1}." )
+ void addedProviderConfigurationReference(String descriptorName, String providerConfigurationName);
+
+ @Message( level = MessageLevel.DEBUG, text = "Removed descriptor {0} reference to provider configuration {1}." )
+ void removedProviderConfigurationReference(String descriptorName, String providerConfigurationName);
}
http://git-wip-us.apache.org/repos/asf/knox/blob/9ad9bcdb/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
index 0956a4a..4202a18 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.gateway.config.impl;
+import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
@@ -250,7 +251,7 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
@Override
public String getGatewayConfDir() {
String value = getVar( GATEWAY_CONF_HOME_VAR, getGatewayHomeDir() + File.separator + "conf" );
- return value;
+ return FilenameUtils.normalize(value);
}
@Override
http://git-wip-us.apache.org/repos/asf/knox/blob/9ad9bcdb/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
index 13e1a3d..39e8029 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/topology/impl/DefaultTopologyService.java
@@ -62,6 +62,7 @@ import java.io.FileFilter;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
@@ -91,8 +92,11 @@ public class DefaultTopologyService
private static DigesterLoader digesterLoader = newLoader(new KnoxFormatXmlTopologyRules(), new AmbariFormatXmlTopologyRules());
private List<FileAlterationMonitor> monitors = new ArrayList<>();
private File topologiesDirectory;
+ private File sharedProvidersDirectory;
private File descriptorsDirectory;
+ private DescriptorsMonitor descriptorsMonitor;
+
private Set<TopologyListener> listeners;
private volatile Map<File, Topology> topologies;
private AliasService aliasService;
@@ -211,8 +215,7 @@ public class DefaultTopologyService
}
private File calculateAbsoluteTopologiesDir(GatewayConfig config) {
- String normalizedTopologyDir = FilenameUtils.normalize(config.getGatewayTopologyDir());
- File topoDir = new File(normalizedTopologyDir);
+ File topoDir = new File(config.getGatewayTopologyDir());
topoDir = topoDir.getAbsoluteFile();
return topoDir;
}
@@ -220,15 +223,10 @@ public class DefaultTopologyService
private File calculateAbsoluteConfigDir(GatewayConfig config) {
File configDir = null;
- String path = FilenameUtils.normalize(config.getGatewayConfDir());
- if (path != null) {
- configDir = new File(config.getGatewayConfDir());
- } else {
- configDir = (new File(config.getGatewayTopologyDir())).getParentFile();
- }
- configDir = configDir.getAbsoluteFile();
+ String path = config.getGatewayConfDir();
+ configDir = (path != null) ? new File(path) : (new File(config.getGatewayTopologyDir())).getParentFile();
- return configDir;
+ return configDir.getAbsoluteFile();
}
private void initListener(FileAlterationMonitor monitor,
@@ -250,31 +248,34 @@ public class DefaultTopologyService
private Map<File, Topology> loadTopologies(File directory) {
Map<File, Topology> map = new HashMap<>();
if (directory.isDirectory() && directory.canRead()) {
- for (File file : directory.listFiles(this)) {
- try {
- Topology loadTopology = loadTopology(file);
- if (null != loadTopology) {
- map.put(file, loadTopology);
- } else {
+ File[] existingTopologies = directory.listFiles(this);
+ if (existingTopologies != null) {
+ for (File file : existingTopologies) {
+ try {
+ Topology loadTopology = loadTopology(file);
+ if (null != loadTopology) {
+ map.put(file, loadTopology);
+ } else {
+ auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
+ ActionOutcome.FAILURE);
+ log.failedToLoadTopology(file.getAbsolutePath());
+ }
+ } catch (IOException e) {
+ // Maybe it makes sense to throw exception
auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
- ActionOutcome.FAILURE);
- log.failedToLoadTopology(file.getAbsolutePath());
+ ActionOutcome.FAILURE);
+ log.failedToLoadTopology(file.getAbsolutePath(), e);
+ } catch (SAXException e) {
+ // Maybe it makes sense to throw exception
+ auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
+ ActionOutcome.FAILURE);
+ log.failedToLoadTopology(file.getAbsolutePath(), e);
+ } catch (Exception e) {
+ // Maybe it makes sense to throw exception
+ auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
+ ActionOutcome.FAILURE);
+ log.failedToLoadTopology(file.getAbsolutePath(), e);
}
- } catch (IOException e) {
- // Maybe it makes sense to throw exception
- auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
- ActionOutcome.FAILURE);
- log.failedToLoadTopology(file.getAbsolutePath(), e);
- } catch (SAXException e) {
- // Maybe it makes sense to throw exception
- auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
- ActionOutcome.FAILURE);
- log.failedToLoadTopology(file.getAbsolutePath(), e);
- } catch (Exception e) {
- // Maybe it makes sense to throw exception
- auditor.audit(Action.LOAD, file.getAbsolutePath(), ResourceType.TOPOLOGY,
- ActionOutcome.FAILURE);
- log.failedToLoadTopology(file.getAbsolutePath(), e);
}
}
}
@@ -356,8 +357,7 @@ public class DefaultTopologyService
File topoDir = topologiesDirectory;
if(topoDir.isDirectory() && topoDir.canRead()) {
- File[] results = topoDir.listFiles();
- for (File f : results) {
+ for (File f : listFiles(topoDir)) {
String fName = FilenameUtils.getBaseName(f.getName());
if(fName.equals(t.getName())) {
f.delete();
@@ -381,9 +381,9 @@ public class DefaultTopologyService
public Map<String, List<String>> getServiceTestURLs(Topology t, GatewayConfig config) {
File tFile = null;
Map<String, List<String>> urls = new HashMap<>();
- if(topologiesDirectory.isDirectory() && topologiesDirectory.canRead()) {
- for(File f : topologiesDirectory.listFiles()){
- if(FilenameUtils.removeExtension(f.getName()).equals(t.getName())){
+ if (topologiesDirectory.isDirectory() && topologiesDirectory.canRead()) {
+ for (File f : listFiles(topologiesDirectory)) {
+ if (FilenameUtils.removeExtension(f.getName()).equals(t.getName())) {
tFile = f;
}
}
@@ -405,6 +405,63 @@ public class DefaultTopologyService
}
@Override
+ public boolean deployProviderConfiguration(String name, String content) {
+ return writeConfig(sharedProvidersDirectory, name, content);
+ }
+
+ @Override
+ public Collection<File> getProviderConfigurations() {
+ List<File> providerConfigs = new ArrayList<>();
+ for (File providerConfig : listFiles(sharedProvidersDirectory)) {
+ if (SharedProviderConfigMonitor.SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(providerConfig.getName()))) {
+ providerConfigs.add(providerConfig);
+ }
+ }
+ return providerConfigs;
+ }
+
+ @Override
+ public boolean deleteProviderConfiguration(String name) {
+ boolean result = false;
+
+ File providerConfig = getExistingFile(sharedProvidersDirectory, name);
+ if (providerConfig != null) {
+ List<String> references = descriptorsMonitor.getReferencingDescriptors(providerConfig.getAbsolutePath());
+ if (references.isEmpty()) {
+ result = providerConfig.delete();
+ } else {
+ log.preventedDeletionOfSharedProviderConfiguration(providerConfig.getAbsolutePath());
+ }
+ } else {
+ result = true; // If it already does NOT exist, then the delete effectively succeeded
+ }
+
+ return result;
+ }
+
+ @Override
+ public boolean deployDescriptor(String name, String content) {
+ return writeConfig(descriptorsDirectory, name, content);
+ }
+
+ @Override
+ public Collection<File> getDescriptors() {
+ List<File> descriptors = new ArrayList<>();
+ for (File descriptor : listFiles(descriptorsDirectory)) {
+ if (DescriptorsMonitor.SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(descriptor.getName()))) {
+ descriptors.add(descriptor);
+ }
+ }
+ return descriptors;
+ }
+
+ @Override
+ public boolean deleteDescriptor(String name) {
+ File descriptor = getExistingFile(descriptorsDirectory, name);
+ return (descriptor == null) || descriptor.delete();
+ }
+
+ @Override
public void addTopologyChangeListener(TopologyListener listener) {
listeners.add(listener);
}
@@ -448,6 +505,7 @@ public class DefaultTopologyService
File simpleDesc =
new File(descriptorsDirectory, FilenameUtils.getBaseName(file.getName()) + "." + ext);
if (simpleDesc.exists()) {
+ log.deletingDescriptorForTopologyDeletion(simpleDesc.getName(), file.getName());
simpleDesc.delete();
}
}
@@ -481,20 +539,22 @@ public class DefaultTopologyService
File configDirectory = calculateAbsoluteConfigDir(config);
descriptorsDirectory = new File(configDirectory, "descriptors");
- File sharedProvidersDirectory = new File(configDirectory, "shared-providers");
+ sharedProvidersDirectory = new File(configDirectory, "shared-providers");
// Add support for conf/topologies
initListener(topologiesDirectory, this, this);
// Add support for conf/descriptors
- DescriptorsMonitor dm = new DescriptorsMonitor(topologiesDirectory, aliasService);
+ descriptorsMonitor = new DescriptorsMonitor(topologiesDirectory, aliasService);
initListener(descriptorsDirectory,
- dm,
- dm);
+ descriptorsMonitor,
+ descriptorsMonitor);
+ log.monitoringDescriptorChangesInDirectory(descriptorsDirectory.getAbsolutePath());
// Add support for conf/shared-providers
- SharedProviderConfigMonitor spm = new SharedProviderConfigMonitor(dm, descriptorsDirectory);
+ SharedProviderConfigMonitor spm = new SharedProviderConfigMonitor(descriptorsMonitor, descriptorsDirectory);
initListener(sharedProvidersDirectory, spm, spm);
+ log.monitoringProviderConfigChangesInDirectory(sharedProvidersDirectory.getAbsolutePath());
// For all the descriptors currently in the descriptors dir at start-up time, trigger topology generation.
// This happens prior to the start-up loading of the topologies.
@@ -502,7 +562,7 @@ public class DefaultTopologyService
if (descriptorFilenames != null) {
for (String descriptorFilename : descriptorFilenames) {
if (DescriptorsMonitor.isDescriptorFile(descriptorFilename)) {
- dm.onFileChange(new File(descriptorsDirectory, descriptorFilename));
+ descriptorsMonitor.onFileChange(new File(descriptorsDirectory, descriptorFilename));
}
}
}
@@ -514,6 +574,70 @@ public class DefaultTopologyService
/**
+ * Utility method for listing the files in the specified directory.
+ * This method is "nicer" than the File#listFiles() because it will not return null.
+ *
+ * @param directory The directory whose files should be returned.
+ *
+ * @return A List of the Files on the directory.
+ */
+ private static List<File> listFiles(File directory) {
+ List<File> result = null;
+ File[] files = directory.listFiles();
+ if (files != null) {
+ result = Arrays.asList(files);
+ } else {
+ result = Collections.emptyList();
+ }
+ return result;
+ }
+
+ /**
+ * Search for a file in the specified directory whose base name (filename without extension) matches the
+ * specified basename.
+ *
+ * @param directory The directory in which to search.
+ * @param basename The basename of interest.
+ *
+ * @return The matching File
+ */
+ private static File getExistingFile(File directory, String basename) {
+ File match = null;
+ for (File file : listFiles(directory)) {
+ if (FilenameUtils.getBaseName(file.getName()).equals(basename)) {
+ match = file;
+ break;
+ }
+ }
+ return match;
+ }
+
+ /**
+ * Write the specified content to a file.
+ *
+ * @param dest The destination directory.
+ * @param name The name of the file.
+ * @param content The contents of the file.
+ *
+ * @return true, if the write succeeds; otherwise, false.
+ */
+ private static boolean writeConfig(File dest, String name, String content) {
+ boolean result = false;
+
+ File destFile = new File(dest, name);
+ try {
+ FileUtils.writeStringToFile(destFile, content);
+ log.wroteConfigurationFile(destFile.getAbsolutePath());
+ result = true;
+ } catch (IOException e) {
+ log.failedToWriteConfigurationFile(destFile.getAbsolutePath(), e);
+ }
+
+ return result;
+ }
+
+
+ /**
* Change handler for simple descriptors
*/
public static class DescriptorsMonitor extends FileAlterationListenerAdaptor
@@ -543,7 +667,7 @@ public class DefaultTopologyService
}
List<String> getReferencingDescriptors(String providerConfigPath) {
- List<String> result = providerConfigReferences.get(providerConfigPath);
+ List<String> result = providerConfigReferences.get(FilenameUtils.normalize(providerConfigPath));
if (result == null) {
result = Collections.emptyList();
}
@@ -562,6 +686,7 @@ public class DefaultTopologyService
File topologyFile =
new File(topologiesDir, FilenameUtils.getBaseName(file.getName()) + "." + ext);
if (topologyFile.exists()) {
+ log.deletingTopologyForDescriptorDeletion(topologyFile.getName(), file.getName());
topologyFile.delete();
}
}
@@ -574,8 +699,10 @@ public class DefaultTopologyService
break;
}
}
+
if (reference != null) {
providerConfigReferences.get(reference).remove(normalizedFilePath);
+ log.removedProviderConfigurationReference(normalizedFilePath, reference);
}
}
@@ -584,6 +711,7 @@ public class DefaultTopologyService
try {
// When a simple descriptor has been created or modified, generate the new topology descriptor
Map<String, File> result = SimpleDescriptorHandler.handle(file, topologiesDir, aliasService);
+ log.generatedTopologyForDescriptorChange(result.get("topology").getName(), file.getName());
// Add the provider config reference relationship for handling updates to the provider config
String providerConfig = FilenameUtils.normalize(result.get("reference").getAbsolutePath());
@@ -602,6 +730,7 @@ public class DefaultTopologyService
// Add the current reference relationship
refs.add(descriptorName);
+ log.addedProviderConfigurationReference(descriptorName, providerConfig);
}
} catch (Exception e) {
log.simpleDescriptorHandlingError(file.getName(), e);
@@ -662,7 +791,7 @@ public class DefaultTopologyService
private List<File> getReferencingDescriptors(File sharedProviderConfig) {
List<File> references = new ArrayList<>();
- for (File descriptor : descriptorsDir.listFiles()) {
+ for (File descriptor : listFiles(descriptorsDir)) {
if (DescriptorsMonitor.SUPPORTED_EXTENSIONS.contains(FilenameUtils.getExtension(descriptor.getName()))) {
for (String reference : descriptorsMonitor.getReferencingDescriptors(FilenameUtils.normalize(sharedProviderConfig.getAbsolutePath()))) {
references.add(new File(reference));
http://git-wip-us.apache.org/repos/asf/knox/blob/9ad9bcdb/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
index 498d750..2357ad6 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/services/topology/DefaultTopologyServiceTest.java
@@ -18,16 +18,15 @@
package org.apache.hadoop.gateway.services.topology;
import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
+import org.apache.commons.io.monitor.FileAlterationListener;
import org.apache.commons.io.monitor.FileAlterationMonitor;
import org.apache.commons.io.monitor.FileAlterationObserver;
import org.apache.hadoop.gateway.config.GatewayConfig;
import org.apache.hadoop.gateway.services.security.AliasService;
import org.apache.hadoop.gateway.services.topology.impl.DefaultTopologyService;
import org.apache.hadoop.gateway.topology.*;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryConfig;
-import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryFactory;
import org.apache.hadoop.test.TestUtils;
import org.easymock.EasyMock;
import org.junit.After;
@@ -38,13 +37,24 @@ import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.isA;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.core.IsNull.notNullValue;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
@@ -86,12 +96,6 @@ public class DefaultTopologyServiceTest {
File dir = createDir();
File topologyDir = new File(dir, "topologies");
- File descriptorsDir = new File(dir, "descriptors");
- descriptorsDir.mkdirs();
-
- File sharedProvidersDir = new File(dir, "shared-providers");
- sharedProvidersDir.mkdirs();
-
long time = topologyDir.lastModified();
try {
createFile(topologyDir, "one.xml", "org/apache/hadoop/gateway/topology/file/topology-one.xml", time);
@@ -104,7 +108,7 @@ public class DefaultTopologyServiceTest {
GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
- EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
+ EasyMock.expect(config.getGatewayConfDir()).andReturn(topologyDir.getParentFile().getAbsolutePath()).anyTimes();
EasyMock.replay(config);
provider.init(config, c);
@@ -167,59 +171,371 @@ public class DefaultTopologyServiceTest {
assertThat(topology.getName(), is("one"));
assertThat(topology.getTimestamp(), is(time));
+ } finally {
+ FileUtils.deleteQuietly(dir);
+ }
+ }
+
+ /**
+ * KNOX-1014
+ *
+ * Test the lifecycle relationship between simple descriptors and topology files.
+ *
+ * N.B. This test depends on the DummyServiceDiscovery extension being configured:
+ * org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
+ */
+ @Test
+ public void testSimpleDescriptorsTopologyGeneration() throws Exception {
+
+ File dir = createDir();
+ File topologyDir = new File(dir, "topologies");
+ topologyDir.mkdirs();
+
+ File descriptorsDir = new File(dir, "descriptors");
+ descriptorsDir.mkdirs();
+
+ File sharedProvidersDir = new File(dir, "shared-providers");
+ sharedProvidersDir.mkdirs();
+
+ try {
+ TestTopologyListener topoListener = new TestTopologyListener();
+ FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
+
+ TopologyService provider = new DefaultTopologyService();
+ Map<String, String> c = new HashMap<>();
+
+ GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+ EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
+ EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
+ EasyMock.replay(config);
+
+ provider.init(config, c);
+ provider.addTopologyChangeListener(topoListener);
+ provider.reloadTopologies();
+
+
// Add a simple descriptor to the descriptors dir to verify topology generation and loading (KNOX-1006)
- // N.B. This part of the test depends on the DummyServiceDiscovery extension being configured:
- // org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
AliasService aliasService = EasyMock.createNiceMock(AliasService.class);
EasyMock.expect(aliasService.getPasswordFromAliasForGateway(anyObject(String.class))).andReturn(null).anyTimes();
EasyMock.replay(aliasService);
DefaultTopologyService.DescriptorsMonitor dm =
- new DefaultTopologyService.DescriptorsMonitor(topologyDir, aliasService);
+ new DefaultTopologyService.DescriptorsMonitor(topologyDir, aliasService);
+
+ // Listener to simulate the topologies directory monitor, to notice when a topology has been deleted
+ provider.addTopologyChangeListener(new TestTopologyDeleteListener((DefaultTopologyService)provider));
// Write out the referenced provider config first
File provCfgFile = createFile(sharedProvidersDir,
"ambari-cluster-policy.xml",
"org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml",
- 1L);
+ System.currentTimeMillis());
try {
// Create the simple descriptor in the descriptors dir
- File simpleDesc =
- createFile(descriptorsDir,
- "four.json",
- "org/apache/hadoop/gateway/topology/file/simple-topology-four.json",
- 1L);
+ File simpleDesc = createFile(descriptorsDir,
+ "four.json",
+ "org/apache/hadoop/gateway/topology/file/simple-topology-four.json",
+ System.currentTimeMillis());
// Trigger the topology generation by noticing the simple descriptor
dm.onFileChange(simpleDesc);
// Load the generated topology
provider.reloadTopologies();
+ Collection<Topology> topologies = provider.getTopologies();
+ assertThat(topologies.size(), is(1));
+ Iterator<Topology> iterator = topologies.iterator();
+ Topology topology = iterator.next();
+ assertThat("four", is(topology.getName()));
+ int serviceCount = topology.getServices().size();
+ assertEquals("Expected the same number of services as are declared in the simple dscriptor.", 10, serviceCount);
+
+ // Overwrite the simple descriptor with a different set of services, and check that the changes are
+ // propagated to the associated topology
+ simpleDesc = createFile(descriptorsDir,
+ "four.json",
+ "org/apache/hadoop/gateway/topology/file/simple-descriptor-five.json",
+ System.currentTimeMillis());
+ dm.onFileChange(simpleDesc);
+ provider.reloadTopologies();
+ topologies = provider.getTopologies();
+ topology = topologies.iterator().next();
+ assertNotEquals(serviceCount, topology.getServices().size());
+ assertEquals(6, topology.getServices().size());
+
+ // Delete the simple descriptor, and make sure that the associated topology file is deleted
+ simpleDesc.delete();
+ dm.onFileDelete(simpleDesc);
+ provider.reloadTopologies();
+ topologies = provider.getTopologies();
+ assertTrue(topologies.isEmpty());
+
+ // Delete a topology file, and make sure that the associated simple descriptor is deleted
+ // Overwrite the simple descriptor with a different set of services, and check that the changes are
+ // propagated to the associated topology
+ simpleDesc = createFile(descriptorsDir,
+ "deleteme.json",
+ "org/apache/hadoop/gateway/topology/file/simple-descriptor-five.json",
+ System.currentTimeMillis());
+ dm.onFileChange(simpleDesc);
+ provider.reloadTopologies();
topologies = provider.getTopologies();
- assertThat(topologies.size(), is(2));
- names = new HashSet<>(Arrays.asList("one", "four"));
- iterator = topologies.iterator();
- topology = iterator.next();
- assertThat(names, hasItem(topology.getName()));
- names.remove(topology.getName());
- topology = iterator.next();
- assertThat(names, hasItem(topology.getName()));
- names.remove(topology.getName());
- assertThat(names.size(), is(0));
+ assertFalse(topologies.isEmpty());
+ topology = topologies.iterator().next();
+ assertEquals("deleteme", topology.getName());
+ File topologyFile = new File(topologyDir, topology.getName() + ".xml");
+ assertTrue(topologyFile.exists());
+ topologyFile.delete();
+ provider.reloadTopologies();
+ assertFalse("Simple descriptor should have been deleted because the associated topology was.",
+ simpleDesc.exists());
+
} finally {
provCfgFile.delete();
-
}
} finally {
FileUtils.deleteQuietly(dir);
}
}
+ /**
+ * KNOX-1014
+ *
+ * Test the lifecycle relationship between provider configuration files, simple descriptors, and topology files.
+ *
+ * N.B. This test depends on the DummyServiceDiscovery extension being configured:
+ * org.apache.hadoop.gateway.topology.discovery.test.extension.DummyServiceDiscovery
+ */
+ @Test
+ public void testTopologiesUpdateFromProviderConfigChange() throws Exception {
+ File dir = createDir();
+ File topologyDir = new File(dir, "topologies");
+ topologyDir.mkdirs();
+
+ File descriptorsDir = new File(dir, "descriptors");
+ descriptorsDir.mkdirs();
+
+ File sharedProvidersDir = new File(dir, "shared-providers");
+ sharedProvidersDir.mkdirs();
+
+ try {
+ TestTopologyListener topoListener = new TestTopologyListener();
+ FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
+
+ TopologyService ts = new DefaultTopologyService();
+ Map<String, String> c = new HashMap<>();
+
+ GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+ EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
+ EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
+ EasyMock.replay(config);
+
+ ts.init(config, c);
+ ts.addTopologyChangeListener(topoListener);
+ ts.reloadTopologies();
+
+ java.lang.reflect.Field dmField = ts.getClass().getDeclaredField("descriptorsMonitor");
+ dmField.setAccessible(true);
+ DefaultTopologyService.DescriptorsMonitor dm = (DefaultTopologyService.DescriptorsMonitor) dmField.get(ts);
+
+ // Write out the referenced provider configs first
+ createFile(sharedProvidersDir,
+ "provider-config-one.xml",
+ "org/apache/hadoop/gateway/topology/file/provider-config-one.xml",
+ System.currentTimeMillis());
+
+ // Create the simple descriptor, which depends on provider-config-one.xml
+ File simpleDesc = createFile(descriptorsDir,
+ "six.json",
+ "org/apache/hadoop/gateway/topology/file/simple-descriptor-six.json",
+ System.currentTimeMillis());
+
+ // "Notice" the simple descriptor change, and generate a topology based on it
+ dm.onFileChange(simpleDesc);
+
+ // Load the generated topology
+ ts.reloadTopologies();
+ Collection<Topology> topologies = ts.getTopologies();
+ assertThat(topologies.size(), is(1));
+ Iterator<Topology> iterator = topologies.iterator();
+ Topology topology = iterator.next();
+ assertFalse("The Shiro provider is disabled in provider-config-one.xml",
+ topology.getProvider("authentication", "ShiroProvider").isEnabled());
+
+ // Overwrite the referenced provider configuration with a different ShiroProvider config, and check that the
+ // changes are propagated to the associated topology
+ File providerConfig = createFile(sharedProvidersDir,
+ "provider-config-one.xml",
+ "org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml",
+ System.currentTimeMillis());
+
+ // "Notice" the simple descriptor change as a result of the referenced config change
+ dm.onFileChange(simpleDesc);
+
+ // Load the generated topology
+ ts.reloadTopologies();
+ topologies = ts.getTopologies();
+ assertFalse(topologies.isEmpty());
+ topology = topologies.iterator().next();
+ assertTrue("The Shiro provider is enabled in ambari-cluster-policy.xml",
+ topology.getProvider("authentication", "ShiroProvider").isEnabled());
+
+ // Delete the provider configuration, and make sure that the associated topology file is unaffected.
+ // The topology file should not be affected because the simple descriptor handling will fail to resolve the
+ // referenced provider configuration.
+ providerConfig.delete(); // Delete the file
+ dm.onFileChange(simpleDesc); // The provider config deletion will trigger a descriptor change notification
+ ts.reloadTopologies();
+ topologies = ts.getTopologies();
+ assertFalse(topologies.isEmpty());
+ assertTrue("The Shiro provider is enabled in ambari-cluster-policy.xml",
+ topology.getProvider("authentication", "ShiroProvider").isEnabled());
+
+ } finally {
+ FileUtils.deleteQuietly(dir);
+ }
+ }
+
+ /**
+ * KNOX-1039
+ */
+ @Test
+ public void testConfigurationCRUDAPI() throws Exception {
+ File dir = createDir();
+ File topologyDir = new File(dir, "topologies");
+ topologyDir.mkdirs();
+
+ File descriptorsDir = new File(dir, "descriptors");
+ descriptorsDir.mkdirs();
+
+ File sharedProvidersDir = new File(dir, "shared-providers");
+ sharedProvidersDir.mkdirs();
+
+ try {
+ TestTopologyListener topoListener = new TestTopologyListener();
+ FileAlterationMonitor monitor = new FileAlterationMonitor(Long.MAX_VALUE);
+
+ TopologyService ts = new DefaultTopologyService();
+ Map<String, String> c = new HashMap<>();
+
+ GatewayConfig config = EasyMock.createNiceMock(GatewayConfig.class);
+ EasyMock.expect(config.getGatewayTopologyDir()).andReturn(topologyDir.getAbsolutePath()).anyTimes();
+ EasyMock.expect(config.getGatewayConfDir()).andReturn(descriptorsDir.getParentFile().getAbsolutePath()).anyTimes();
+ EasyMock.replay(config);
+
+ ts.init(config, c);
+ ts.addTopologyChangeListener(topoListener);
+ ts.reloadTopologies();
+
+ java.lang.reflect.Field dmField = ts.getClass().getDeclaredField("descriptorsMonitor");
+ dmField.setAccessible(true);
+ DefaultTopologyService.DescriptorsMonitor dm = (DefaultTopologyService.DescriptorsMonitor) dmField.get(ts);
+
+ final String simpleDescName = "six.json";
+ final String provConfOne = "provider-config-one.xml";
+ final String provConfTwo = "ambari-cluster-policy.xml";
+
+ // "Deploy" the referenced provider configs first
+ boolean isDeployed =
+ ts.deployProviderConfiguration(provConfOne,
+ FileUtils.readFileToString(new File(ClassLoader.getSystemResource("org/apache/hadoop/gateway/topology/file/provider-config-one.xml").toURI())));
+ assertTrue(isDeployed);
+ File provConfOneFile = new File(sharedProvidersDir, provConfOne);
+ assertTrue(provConfOneFile.exists());
+
+ isDeployed =
+ ts.deployProviderConfiguration(provConfTwo,
+ FileUtils.readFileToString(new File(ClassLoader.getSystemResource("org/apache/hadoop/gateway/topology/file/ambari-cluster-policy.xml").toURI())));
+ assertTrue(isDeployed);
+ File provConfTwoFile = new File(sharedProvidersDir, provConfTwo);
+ assertTrue(provConfTwoFile.exists());
+
+ // Validate the provider configurations known by the topology service
+ Collection<File> providerConfigurations = ts.getProviderConfigurations();
+ assertNotNull(providerConfigurations);
+ assertEquals(2, providerConfigurations.size());
+ assertTrue(providerConfigurations.contains(provConfOneFile));
+ assertTrue(providerConfigurations.contains(provConfTwoFile));
+
+ // "Deploy" the simple descriptor, which depends on provConfOne
+ isDeployed =
+ ts.deployDescriptor(simpleDescName,
+ FileUtils.readFileToString(new File(ClassLoader.getSystemResource("org/apache/hadoop/gateway/topology/file/simple-descriptor-six.json").toURI())));
+ assertTrue(isDeployed);
+ File simpleDesc = new File(descriptorsDir, simpleDescName);
+ assertTrue(simpleDesc.exists());
+
+ // Validate the simple descriptors known by the topology service
+ Collection<File> descriptors = ts.getDescriptors();
+ assertNotNull(descriptors);
+ assertEquals(1, descriptors.size());
+ assertTrue(descriptors.contains(simpleDesc));
+
+ // "Notice" the simple descriptor, so the provider configuration dependency relationship is recorded
+ dm.onFileChange(simpleDesc);
+
+ // Attempt to delete the referenced provConfOne
+ assertFalse("Should not be able to delete a provider configuration that is referenced by one or more descriptors",
+ ts.deleteProviderConfiguration(FilenameUtils.getBaseName(provConfOne)));
+
+ // Overwrite the simple descriptor with content that changes the provider config reference to provConfTwo
+ isDeployed =
+ ts.deployDescriptor(simpleDescName,
+ FileUtils.readFileToString(new File(ClassLoader.getSystemResource("org/apache/hadoop/gateway/topology/file/simple-descriptor-five.json").toURI())));
+ assertTrue(isDeployed);
+ assertTrue(simpleDesc.exists());
+ ts.getProviderConfigurations();
+
+ // "Notice" the simple descriptor, so the provider configuration dependency relationship is updated
+ dm.onFileChange(simpleDesc);
+
+ // Attempt to delete the referenced provConfOne
+ assertTrue("Should be able to delete the provider configuration, now that it's not referenced by any descriptors",
+ ts.deleteProviderConfiguration(FilenameUtils.getBaseName(provConfOne)));
+
+ // Re-validate the provider configurations known by the topology service
+ providerConfigurations = ts.getProviderConfigurations();
+ assertNotNull(providerConfigurations);
+ assertEquals(1, providerConfigurations.size());
+ assertFalse(providerConfigurations.contains(provConfOneFile));
+ assertTrue(providerConfigurations.contains(provConfTwoFile));
+
+ // Attempt to delete the referenced provConfTwo
+ assertFalse("Should not be able to delete a provider configuration that is referenced by one or more descriptors",
+ ts.deleteProviderConfiguration(FilenameUtils.getBaseName(provConfTwo)));
+
+ // Delete the referencing simple descriptor
+ assertTrue(ts.deleteDescriptor(FilenameUtils.getBaseName(simpleDescName)));
+ assertFalse(simpleDesc.exists());
+
+ // Re-validate the simple descriptors known by the topology service
+ descriptors = ts.getDescriptors();
+ assertNotNull(descriptors);
+ assertTrue(descriptors.isEmpty());
+
+ // "Notice" the simple descriptor, so the provider configuration dependency relationship is updated
+ dm.onFileDelete(simpleDesc);
+
+ // Attempt to delete the referenced provConfTwo
+ assertTrue("Should be able to delete the provider configuration, now that it's not referenced by any descriptors",
+ ts.deleteProviderConfiguration(FilenameUtils.getBaseName(provConfTwo)));
+
+ // Re-validate the provider configurations known by the topology service
+ providerConfigurations = ts.getProviderConfigurations();
+ assertNotNull(providerConfigurations);
+ assertTrue(providerConfigurations.isEmpty());
+
+ } finally {
+ FileUtils.deleteQuietly(dir);
+ }
+ }
+
private void kickMonitor(FileAlterationMonitor monitor) {
for (FileAlterationObserver observer : monitor.getObservers()) {
observer.checkAndNotify();
}
}
+
@Test
public void testProviderParamsOrderIsPreserved() {
@@ -252,7 +568,7 @@ public class DefaultTopologyServiceTest {
private class TestTopologyListener implements TopologyListener {
- public ArrayList<List<TopologyEvent>> events = new ArrayList<List<TopologyEvent>>();
+ ArrayList<List<TopologyEvent>> events = new ArrayList<List<TopologyEvent>>();
@Override
public void handleTopologyEvent(List<TopologyEvent> events) {
@@ -261,4 +577,24 @@ public class DefaultTopologyServiceTest {
}
+
+ private class TestTopologyDeleteListener implements TopologyListener {
+
+ FileAlterationListener delegate;
+
+ TestTopologyDeleteListener(FileAlterationListener delegate) {
+ this.delegate = delegate;
+ }
+
+ @Override
+ public void handleTopologyEvent(List<TopologyEvent> events) {
+ for (TopologyEvent event : events) {
+ if (event.getType().equals(TopologyEvent.Type.DELETED)) {
+ delegate.onFileDelete(new File(event.getTopology().getUri()));
+ }
+ }
+ }
+
+ }
+
}
http://git-wip-us.apache.org/repos/asf/knox/blob/9ad9bcdb/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/provider-config-one.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/provider-config-one.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/provider-config-one.xml
new file mode 100644
index 0000000..95465a4
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/provider-config-one.xml
@@ -0,0 +1,74 @@
+<gateway>
+ <provider>
+ <role>authentication</role>
+ <name>ShiroProvider</name>
+ <enabled>false</enabled>
+ <param>
+ <!--
+ session timeout in minutes, this is really idle timeout,
+ defaults to 30mins, if the property value is not defined,,
+ current client authentication would expire if client idles contiuosly for more than this value
+ -->
+ <name>sessionTimeout</name>
+ <value>30</value>
+ </param>
+ <param>
+ <name>main.ldapRealm</name>
+ <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
+ </param>
+ <param>
+ <name>main.ldapContextFactory</name>
+ <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory</name>
+ <value>$ldapContextFactory</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.userDnTemplate</name>
+ <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.url</name>
+ <value>ldap://localhost:33389</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
+ <value>simple</value>
+ </param>
+ <param>
+ <name>urls./**</name>
+ <value>authcBasic</value>
+ </param>
+ </provider>
+
+ <provider>
+ <role>identity-assertion</role>
+ <name>Default</name>
+ <enabled>true</enabled>
+ </provider>
+
+ <!--
+ Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.
+ For example, a hadoop service running in AWS may return a response that includes URLs containing the
+ some AWS internal host name. If the client needs to make a subsequent request to the host identified
+ in those URLs they need to be mapped to external host names that the client Knox can use to connect.
+
+ If the external hostname and internal host names are same turn of this provider by setting the value of
+ enabled parameter as false.
+
+ The name parameter specifies the external host names in a comma separated list.
+ The value parameter specifies corresponding internal host names in a comma separated list.
+
+ Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out
+ of box sandbox.xml. This is because Sandbox uses port mapping to allow clients to connect to the
+ Hadoop services using localhost. In real clusters, external host names would almost never be localhost.
+ -->
+ <provider>
+ <role>hostmap</role>
+ <name>static</name>
+ <enabled>true</enabled>
+ <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
+ </provider>
+
+</gateway>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/9ad9bcdb/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-five.json
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-five.json b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-five.json
new file mode 100644
index 0000000..52cec35
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-five.json
@@ -0,0 +1,14 @@
+{
+ "discovery-type":"DUMMY",
+ "discovery-address":"http://c6401.ambari.apache.org:8080",
+ "provider-config-ref":"../shared-providers/ambari-cluster-policy.xml",
+ "cluster":"dummy",
+ "services":[
+ {"name":"NAMENODE"},
+ {"name":"JOBTRACKER"},
+ {"name":"WEBHDFS"},
+ {"name":"OOZIE"},
+ {"name":"HIVE"},
+ {"name":"RESOURCEMANAGER"}
+ ]
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/9ad9bcdb/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-six.json
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-six.json b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-six.json
new file mode 100644
index 0000000..e78f193
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/file/simple-descriptor-six.json
@@ -0,0 +1,18 @@
+{
+ "discovery-type":"DUMMY",
+ "discovery-address":"http://c6401.ambari.apache.org:8080",
+ "provider-config-ref":"../shared-providers/provider-config-one.xml",
+ "cluster":"dummy",
+ "services":[
+ {"name":"NAMENODE"},
+ {"name":"JOBTRACKER"},
+ {"name":"WEBHDFS"},
+ {"name":"WEBHCAT"},
+ {"name":"OOZIE"},
+ {"name":"WEBHBASE"},
+ {"name":"HIVE"},
+ {"name":"RESOURCEMANAGER"},
+ {"name":"AMBARI", "urls":["http://c6401.ambari.apache.org:8080"]},
+ {"name":"AMBARIUI", "urls":["http://c6401.ambari.apache.org:8080"]}
+ ]
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/9ad9bcdb/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/HrefListingMarshaller.java
----------------------------------------------------------------------
diff --git a/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/HrefListingMarshaller.java b/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/HrefListingMarshaller.java
new file mode 100644
index 0000000..c251213
--- /dev/null
+++ b/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/HrefListingMarshaller.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.service.admin;
+
+import org.eclipse.persistence.jaxb.JAXBContextProperties;
+
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.MultivaluedMap;
+import javax.ws.rs.ext.MessageBodyWriter;
+import javax.ws.rs.ext.Provider;
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Marshaller;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.lang.annotation.Annotation;
+import java.lang.reflect.Type;
+import java.util.HashMap;
+import java.util.Map;
+
+@Provider
+@Produces({MediaType.APPLICATION_JSON})
+public class HrefListingMarshaller implements MessageBodyWriter<TopologiesResource.HrefListing> {
+
+ @Override
+ public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
+ return (TopologiesResource.HrefListing.class == type);
+ }
+
+ @Override
+ public long getSize(TopologiesResource.HrefListing instance,
+ Class<?> type,
+ Type genericType,
+ Annotation[] annotations,
+ MediaType mediaType) {
+ return -1;
+ }
+
+ @Override
+ public void writeTo(TopologiesResource.HrefListing instance,
+ Class<?> type,
+ Type genericType,
+ Annotation[] annotations,
+ MediaType mediaType,
+ MultivaluedMap<String, Object> httpHeaders,
+ OutputStream entityStream) throws IOException, WebApplicationException {
+ try {
+ Map<String, Object> properties = new HashMap<>(1);
+ properties.put( JAXBContextProperties.MEDIA_TYPE, mediaType.toString());
+ JAXBContext context = JAXBContext.newInstance(new Class[]{TopologiesResource.HrefListing.class}, properties);
+ Marshaller m = context.createMarshaller();
+ m.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
+ m.marshal(instance, entityStream);
+ } catch (JAXBException e) {
+ throw new IOException(e);
+ }
+ }
+
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/9ad9bcdb/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/TopologiesResource.java
----------------------------------------------------------------------
diff --git a/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/TopologiesResource.java b/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/TopologiesResource.java
index 1504eca..28573bf 100644
--- a/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/TopologiesResource.java
+++ b/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/TopologiesResource.java
@@ -17,6 +17,11 @@
*/
package org.apache.hadoop.gateway.service.admin;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.FilenameUtils;
+import org.apache.hadoop.gateway.i18n.GatewaySpiMessages;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
import org.apache.hadoop.gateway.service.admin.beans.BeanConverter;
import org.apache.hadoop.gateway.service.admin.beans.Topology;
import org.apache.hadoop.gateway.services.GatewayServices;
@@ -37,25 +42,47 @@ import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
+import java.io.File;
+import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import static javax.ws.rs.core.MediaType.APPLICATION_XML;
+import static javax.ws.rs.core.MediaType.TEXT_PLAIN;
+
import static javax.ws.rs.core.Response.ok;
+import static javax.ws.rs.core.Response.created;
+import static javax.ws.rs.core.Response.notModified;
+import static javax.ws.rs.core.Response.status;
+
@Path("/api/v1")
public class TopologiesResource {
+
+ private static final String XML_EXT = ".xml";
+ private static final String JSON_EXT = ".json";
+
+ private static final String TOPOLOGIES_API_PATH = "topologies";
+ private static final String SINGLE_TOPOLOGY_API_PATH = TOPOLOGIES_API_PATH + "/{id}";
+ private static final String PROVIDERCONFIG_API_PATH = "providerconfig";
+ private static final String SINGLE_PROVIDERCONFIG_API_PATH = PROVIDERCONFIG_API_PATH + "/{name}";
+ private static final String DESCRIPTORS_API_PATH = "descriptors";
+ private static final String SINGLE_DESCRIPTOR_API_PATH = DESCRIPTORS_API_PATH + "/{name}";
+
+ private static GatewaySpiMessages log = MessagesFactory.get(GatewaySpiMessages.class);
+
@Context
private HttpServletRequest request;
@GET
@Produces({APPLICATION_JSON, APPLICATION_XML})
- @Path("topologies/{id}")
+ @Path(SINGLE_TOPOLOGY_API_PATH)
public Topology getTopology(@PathParam("id") String id) {
GatewayServices services = (GatewayServices) request.getServletContext()
.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
@@ -78,7 +105,7 @@ public class TopologiesResource {
@GET
@Produces({APPLICATION_JSON, APPLICATION_XML})
- @Path("topologies")
+ @Path(TOPOLOGIES_API_PATH)
public SimpleTopologyWrapper getTopologies() {
GatewayServices services = (GatewayServices) request.getServletContext()
.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
@@ -106,7 +133,7 @@ public class TopologiesResource {
@PUT
@Consumes({APPLICATION_JSON, APPLICATION_XML})
- @Path("topologies/{id}")
+ @Path(SINGLE_TOPOLOGY_API_PATH)
public Topology uploadTopology(@PathParam("id") String id, Topology t) {
GatewayServices gs = (GatewayServices) request.getServletContext()
@@ -122,7 +149,7 @@ public class TopologiesResource {
@DELETE
@Produces(APPLICATION_JSON)
- @Path("topologies/{id}")
+ @Path(SINGLE_TOPOLOGY_API_PATH)
public Response deleteTopology(@PathParam("id") String id) {
boolean deleted = false;
if(!"admin".equals(id)) {
@@ -143,6 +170,244 @@ public class TopologiesResource {
return ok().entity("{ \"deleted\" : " + deleted + " }").build();
}
+ @GET
+ @Produces({APPLICATION_JSON})
+ @Path(PROVIDERCONFIG_API_PATH)
+ public HrefListing getProviderConfigurations() {
+ HrefListing listing = new HrefListing();
+ listing.setHref(buildHref(request));
+
+ GatewayServices services =
+ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+
+ List<HrefListItem> configs = new ArrayList<>();
+ TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
+ // Get all the simple descriptor file names
+ for (File providerConfig : ts.getProviderConfigurations()){
+ String id = FilenameUtils.getBaseName(providerConfig.getName());
+ configs.add(new HrefListItem(buildHref(id, request), providerConfig.getName()));
+ }
+
+ listing.setItems(configs);
+ return listing;
+ }
+
+ @GET
+ @Produces({APPLICATION_XML})
+ @Path(SINGLE_PROVIDERCONFIG_API_PATH)
+ public Response getProviderConfiguration(@PathParam("name") String name) {
+ Response response;
+
+ GatewayServices services =
+ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+
+ TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
+
+ File providerConfigFile = null;
+
+ for (File pc : ts.getProviderConfigurations()){
+ // If the file name matches the specified id
+ if (FilenameUtils.getBaseName(pc.getName()).equals(name)) {
+ providerConfigFile = pc;
+ break;
+ }
+ }
+
+ if (providerConfigFile != null) {
+ byte[] content = null;
+ try {
+ content = FileUtils.readFileToByteArray(providerConfigFile);
+ response = ok().entity(content).build();
+ } catch (IOException e) {
+ log.failedToReadConfigurationFile(providerConfigFile.getAbsolutePath(), e);
+ response = Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
+ }
+
+ } else {
+ response = Response.status(Response.Status.NOT_FOUND).build();
+ }
+ return response;
+ }
+
+ @DELETE
+ @Produces(APPLICATION_JSON)
+ @Path(SINGLE_PROVIDERCONFIG_API_PATH)
+ public Response deleteProviderConfiguration(@PathParam("name") String name) {
+ Response response;
+ GatewayServices services =
+ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+
+ TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
+ if (ts.deleteProviderConfiguration(name)) {
+ response = ok().entity("{ \"deleted\" : \"provider config " + name + "\" }").build();
+ } else {
+ response = notModified().build();
+ }
+ return response;
+ }
+
+
+ @DELETE
+ @Produces(APPLICATION_JSON)
+ @Path(SINGLE_DESCRIPTOR_API_PATH)
+ public Response deleteSimpleDescriptor(@PathParam("name") String name) {
+ Response response = null;
+ if(!"admin".equals(name)) {
+ GatewayServices services =
+ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+
+ TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
+ if (ts.deleteDescriptor(name)) {
+ response = ok().entity("{ \"deleted\" : \"descriptor " + name + "\" }").build();
+ }
+ }
+
+ if (response == null) {
+ response = notModified().build();
+ }
+
+ return response;
+ }
+
+
+ @PUT
+ @Consumes({APPLICATION_XML})
+ @Path(SINGLE_PROVIDERCONFIG_API_PATH)
+ public Response uploadProviderConfiguration(@PathParam("name") String name, String content) {
+ Response response = null;
+
+ GatewayServices gs =
+ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+
+ TopologyService ts = gs.getService(GatewayServices.TOPOLOGY_SERVICE);
+
+ boolean isUpdate = configFileExists(ts.getProviderConfigurations(), name);
+
+ String filename = name.endsWith(XML_EXT) ? name : name + XML_EXT;
+ if (ts.deployProviderConfiguration(filename, content)) {
+ try {
+ if (isUpdate) {
+ response = Response.noContent().build();
+ } else{
+ response = created(new URI(buildHref(request))).build();
+ }
+ } catch (URISyntaxException e) {
+ log.invalidResourceURI(e.getInput(), e.getReason(), e);
+ response = status(Response.Status.BAD_REQUEST).entity("{ \"error\" : \"Failed to deploy provider configuration " + name + "\" }").build();
+ }
+ }
+
+ return response;
+ }
+
+
+ private boolean configFileExists(Collection<File> existing, String candidateName) {
+ boolean result = false;
+ for (File exists : existing) {
+ if (FilenameUtils.getBaseName(exists.getName()).equals(candidateName)) {
+ result = true;
+ break;
+ }
+ }
+ return result;
+ }
+
+
+ @PUT
+ @Consumes({APPLICATION_JSON})
+ @Path(SINGLE_DESCRIPTOR_API_PATH)
+ public Response uploadSimpleDescriptor(@PathParam("name") String name, String content) {
+ Response response = null;
+
+ GatewayServices gs =
+ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+
+ TopologyService ts = gs.getService(GatewayServices.TOPOLOGY_SERVICE);
+
+ boolean isUpdate = configFileExists(ts.getDescriptors(), name);
+
+ String filename = name.endsWith(JSON_EXT) ? name : name + JSON_EXT;
+ if (ts.deployDescriptor(filename, content)) {
+ try {
+ if (isUpdate) {
+ response = Response.noContent().build();
+ } else {
+ response = created(new URI(buildHref(request))).build();
+ }
+ } catch (URISyntaxException e) {
+ log.invalidResourceURI(e.getInput(), e.getReason(), e);
+ response = status(Response.Status.BAD_REQUEST).entity("{ \"error\" : \"Failed to deploy descriptor " + name + "\" }").build();
+ }
+ }
+
+ return response;
+ }
+
+
+ @GET
+ @Produces({APPLICATION_JSON})
+ @Path(DESCRIPTORS_API_PATH)
+ public HrefListing getSimpleDescriptors() {
+ HrefListing listing = new HrefListing();
+ listing.setHref(buildHref(request));
+
+ GatewayServices services =
+ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+
+ List<HrefListItem> descriptors = new ArrayList<>();
+ TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
+ for (File descriptor : ts.getDescriptors()){
+ String id = FilenameUtils.getBaseName(descriptor.getName());
+ descriptors.add(new HrefListItem(buildHref(id, request), descriptor.getName()));
+ }
+
+ listing.setItems(descriptors);
+ return listing;
+ }
+
+
+ @GET
+ @Produces({APPLICATION_JSON, TEXT_PLAIN})
+ @Path(SINGLE_DESCRIPTOR_API_PATH)
+ public Response getSimpleDescriptor(@PathParam("name") String name) {
+ Response response;
+
+ GatewayServices services =
+ (GatewayServices) request.getServletContext().getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+
+ TopologyService ts = services.getService(GatewayServices.TOPOLOGY_SERVICE);
+
+ File descriptorFile = null;
+
+ for (File sd : ts.getDescriptors()){
+ // If the file name matches the specified id
+ if (FilenameUtils.getBaseName(sd.getName()).equals(name)) {
+ descriptorFile = sd;
+ break;
+ }
+ }
+
+ if (descriptorFile != null) {
+ String mediaType = APPLICATION_JSON;
+
+ byte[] content = null;
+ try {
+ if ("yml".equals(FilenameUtils.getExtension(descriptorFile.getName()))) {
+ mediaType = TEXT_PLAIN;
+ }
+ content = FileUtils.readFileToByteArray(descriptorFile);
+ response = ok().type(mediaType).entity(content).build();
+ } catch (IOException e) {
+ log.failedToReadConfigurationFile(descriptorFile.getAbsolutePath(), e);
+ response = Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
+ }
+ } else {
+ response = Response.status(Response.Status.NOT_FOUND).build();
+ }
+
+ return response;
+ }
+
private static class TopologyComparator implements Comparator<SimpleTopology> {
@Override
@@ -151,13 +416,14 @@ public class TopologiesResource {
}
}
- String buildURI(org.apache.hadoop.gateway.topology.Topology topology, GatewayConfig config, HttpServletRequest req){
+
+ String buildURI(org.apache.hadoop.gateway.topology.Topology topology, GatewayConfig config, HttpServletRequest req){
String uri = buildXForwardBaseURL(req);
-// Strip extra context
+ // Strip extra context
uri = uri.replace(req.getContextPath(), "");
-// Add the gateway path
+ // Add the gateway path
String gatewayPath;
if(config.getGatewayPath() != null){
gatewayPath = config.getGatewayPath();
@@ -170,20 +436,31 @@ public class TopologiesResource {
return uri;
}
- String buildHref(org.apache.hadoop.gateway.topology.Topology t, HttpServletRequest req) {
+ String buildHref(HttpServletRequest req) {
+ return buildHref((String)null, req);
+ }
+
+ String buildHref(String id, HttpServletRequest req) {
String href = buildXForwardBaseURL(req);
-// Make sure that the pathInfo doesn't have any '/' chars at the end.
+ // Make sure that the pathInfo doesn't have any '/' chars at the end.
String pathInfo = req.getPathInfo();
- if(pathInfo.endsWith("/")) {
- while(pathInfo.endsWith("/")) {
- pathInfo = pathInfo.substring(0, pathInfo.length() - 1);
- }
+ while(pathInfo.endsWith("/")) {
+ pathInfo = pathInfo.substring(0, pathInfo.length() - 1);
+ }
+
+ href += pathInfo;
+
+ if (id != null) {
+ href += "/" + id;
}
- href += pathInfo + "/" + t.getName();
return href;
}
+ String buildHref(org.apache.hadoop.gateway.topology.Topology t, HttpServletRequest req) {
+ return buildHref(t.getName(), req);
+ }
+
private SimpleTopology getSimpleTopology(org.apache.hadoop.gateway.topology.Topology t, GatewayConfig config) {
String uri = buildURI(t, config, request);
String href = buildHref(t, request);
@@ -200,34 +477,34 @@ public class TopologiesResource {
String baseURL = "";
-// Get Protocol
+ // Get Protocol
if(req.getHeader(X_Forwarded_Proto) != null){
baseURL += req.getHeader(X_Forwarded_Proto) + "://";
} else {
baseURL += req.getProtocol() + "://";
}
-// Handle Server/Host and Port Here
+ // Handle Server/Host and Port Here
if (req.getHeader(X_Forwarded_Host) != null && req.getHeader(X_Forwarded_Port) != null){
-// Double check to see if host has port
+ // Double check to see if host has port
if(req.getHeader(X_Forwarded_Host).contains(req.getHeader(X_Forwarded_Port))){
baseURL += req.getHeader(X_Forwarded_Host);
} else {
-// If there's no port, add the host and port together;
+ // If there's no port, add the host and port together;
baseURL += req.getHeader(X_Forwarded_Host) + ":" + req.getHeader(X_Forwarded_Port);
}
} else if(req.getHeader(X_Forwarded_Server) != null && req.getHeader(X_Forwarded_Port) != null){
-// Tack on the server and port if they're available. Try host if server not available
+ // Tack on the server and port if they're available. Try host if server not available
baseURL += req.getHeader(X_Forwarded_Server) + ":" + req.getHeader(X_Forwarded_Port);
} else if(req.getHeader(X_Forwarded_Port) != null) {
-// if we at least have a port, we can use it.
+ // if we at least have a port, we can use it.
baseURL += req.getServerName() + ":" + req.getHeader(X_Forwarded_Port);
} else {
-// Resort to request members
+ // Resort to request members
baseURL += req.getServerName() + ":" + req.getLocalPort();
}
-// Handle Server context
+ // Handle Server context
if( req.getHeader(X_Forwarded_Context) != null ) {
baseURL += req.getHeader( X_Forwarded_Context );
} else {
@@ -237,6 +514,64 @@ public class TopologiesResource {
return baseURL;
}
+
+ static class HrefListing {
+ @JsonProperty
+ String href;
+
+ @JsonProperty
+ List<HrefListItem> items;
+
+ HrefListing() {}
+
+ public void setHref(String href) {
+ this.href = href;
+ }
+
+ public String getHref() {
+ return href;
+ }
+
+ public void setItems(List<HrefListItem> items) {
+ this.items = items;
+ }
+
+ public List<HrefListItem> getItems() {
+ return items;
+ }
+ }
+
+ static class HrefListItem {
+ @JsonProperty
+ String href;
+
+ @JsonProperty
+ String name;
+
+ HrefListItem() {}
+
+ HrefListItem(String href, String name) {
+ this.href = href;
+ this.name = name;
+ }
+
+ public void setHref(String href) {
+ this.href = href;
+ }
+
+ public String getHref() {
+ return href;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+ public String getName() {
+ return name;
+ }
+ }
+
+
@XmlAccessorType(XmlAccessType.NONE)
public static class SimpleTopology {
http://git-wip-us.apache.org/repos/asf/knox/blob/9ad9bcdb/gateway-spi/src/main/java/org/apache/hadoop/gateway/i18n/GatewaySpiMessages.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/i18n/GatewaySpiMessages.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/i18n/GatewaySpiMessages.java
index 45fcb54..aad4d8a 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/i18n/GatewaySpiMessages.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/i18n/GatewaySpiMessages.java
@@ -79,7 +79,13 @@ public interface GatewaySpiMessages {
@Message( level = MessageLevel.ERROR, text = "Gateway has failed to start. Unable to prompt user for master secret setup. Please consider using knoxcli.sh create-master" )
void unableToPromptForMasterUseKnoxCLI();
- @Message( level = MessageLevel.ERROR, text = "Error in generating certificate: {0}" )
- void failedToGenerateCertificate( @StackTrace( level = MessageLevel.ERROR ) Exception e );
+ @Message( level = MessageLevel.ERROR, text = "Error in generating certificate: {0}" )
+ void failedToGenerateCertificate( @StackTrace( level = MessageLevel.ERROR ) Exception e );
+
+ @Message(level = MessageLevel.ERROR, text = "Failed to read configuration: {0}")
+ void failedToReadConfigurationFile(final String filePath, @StackTrace(level = MessageLevel.DEBUG) Exception e );
+
+ @Message(level = MessageLevel.ERROR, text = "Invalid resource URI {0} : {1}")
+ void invalidResourceURI(final String uri, final String reason, @StackTrace(level = MessageLevel.DEBUG) Exception e );
}
http://git-wip-us.apache.org/repos/asf/knox/blob/9ad9bcdb/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/topology/TopologyService.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/topology/TopologyService.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/topology/TopologyService.java
index a964f38..017b3ec 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/topology/TopologyService.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/services/topology/TopologyService.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.gateway.services.Service;
import org.apache.hadoop.gateway.topology.Topology;
import org.apache.hadoop.gateway.topology.TopologyListener;
+import java.io.File;
import java.util.Collection;
import java.util.List;
import java.util.Map;
@@ -29,22 +30,34 @@ import java.util.Map;
public interface TopologyService extends Service {
- public void reloadTopologies();
+ void reloadTopologies();
- public void deployTopology(Topology t);
+ void deployTopology(Topology t);
- public void redeployTopologies(String topologyName);
+ void redeployTopologies(String topologyName);
- public void addTopologyChangeListener(TopologyListener listener);
+ void addTopologyChangeListener(TopologyListener listener);
- public void startMonitor() throws Exception;
+ void startMonitor() throws Exception;
- public void stopMonitor() throws Exception;
+ void stopMonitor() throws Exception;
- public Collection<Topology> getTopologies();
+ Collection<Topology> getTopologies();
- public void deleteTopology(Topology t);
+ boolean deployProviderConfiguration(String name, String content);
- public Map<String, List<String>> getServiceTestURLs(Topology t, GatewayConfig config);
+ Collection<File> getProviderConfigurations();
- }
+ boolean deployDescriptor(String name, String content);
+
+ Collection<File> getDescriptors();
+
+ void deleteTopology(Topology t);
+
+ boolean deleteDescriptor(String name);
+
+ boolean deleteProviderConfiguration(String name);
+
+ Map<String, List<String>> getServiceTestURLs(Topology t, GatewayConfig config);
+
+}
[23/25] knox git commit: KNOX-998 - Some more refactoring
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockHttpServletRequest.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockHttpServletRequest.java b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockHttpServletRequest.java
new file mode 100644
index 0000000..b43465f
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockHttpServletRequest.java
@@ -0,0 +1,410 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.mock;
+
+import javax.servlet.AsyncContext;
+import javax.servlet.DispatcherType;
+import javax.servlet.RequestDispatcher;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletInputStream;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+import javax.servlet.http.HttpUpgradeHandler;
+import javax.servlet.http.Part;
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.security.Principal;
+import java.util.Collection;
+import java.util.Enumeration;
+import java.util.Locale;
+import java.util.Map;
+
+public class MockHttpServletRequest implements HttpServletRequest {
+
+ private String queryString;
+ private String contentType;
+ private String characterEncoding;
+ private ServletInputStream inputStream;
+ private String method = "GET";
+
+ @Override
+ public String getAuthType() {
+ return null;
+ }
+
+ @Override
+ public Cookie[] getCookies() {
+ return new Cookie[ 0 ];
+ }
+
+ @Override
+ public long getDateHeader( String s ) {
+ return 0;
+ }
+
+ @Override
+ public String getHeader( String s ) {
+ return null;
+ }
+
+ @Override
+ public Enumeration<String> getHeaders( String s ) {
+ return null;
+ }
+
+ @Override
+ public Enumeration<String> getHeaderNames() {
+ return null;
+ }
+
+ @Override
+ public int getIntHeader( String s ) {
+ return 0;
+ }
+
+ @Override
+ public String getMethod() {
+ return method;
+ }
+
+ public void setMethod( String method ) {
+ this.method = method;
+ }
+
+ @Override
+ public String getPathInfo() {
+ return null;
+ }
+
+ @Override
+ public String getPathTranslated() {
+ return null;
+ }
+
+ @Override
+ public String getContextPath() {
+ return null;
+ }
+
+ @Override
+ public String getQueryString() {
+ return queryString;
+ }
+
+ public void setQueryString( String queryString ) {
+ this.queryString = queryString;
+ }
+
+ @Override
+ public String getRemoteUser() {
+ return null;
+ }
+
+ @Override
+ public boolean isUserInRole( String s ) {
+ return false;
+ }
+
+ @Override
+ public Principal getUserPrincipal() {
+ return null;
+ }
+
+ @Override
+ public String getRequestedSessionId() {
+ return null;
+ }
+
+ @Override
+ public String getRequestURI() {
+ return null;
+ }
+
+ @Override
+ public StringBuffer getRequestURL() {
+ return null;
+ }
+
+ @Override
+ public String getServletPath() {
+ return null;
+ }
+
+ @Override
+ public HttpSession getSession( boolean b ) {
+ return null;
+ }
+
+ @Override
+ public HttpSession getSession() {
+ return null;
+ }
+
+ @Override
+ public String changeSessionId() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public boolean isRequestedSessionIdValid() {
+ return false;
+ }
+
+ @Override
+ public boolean isRequestedSessionIdFromCookie() {
+ return false;
+ }
+
+ @Override
+ public boolean isRequestedSessionIdFromURL() {
+ return false;
+ }
+
+ @Override
+ @SuppressWarnings("deprecation")
+ public boolean isRequestedSessionIdFromUrl() {
+ return false;
+ }
+
+ @Override
+ public boolean authenticate( HttpServletResponse httpServletResponse ) throws IOException, ServletException {
+ return false;
+ }
+
+ @Override
+ public void login( String s, String s1 ) throws ServletException {
+ }
+
+ @Override
+ public void logout() throws ServletException {
+ }
+
+ @Override
+ public Collection<Part> getParts() throws IOException, ServletException {
+ return null;
+ }
+
+ @Override
+ public Part getPart( String s ) throws IOException, ServletException {
+ return null;
+ }
+
+ @Override
+ public <T extends HttpUpgradeHandler> T upgrade( Class<T> aClass ) throws IOException, ServletException {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public Object getAttribute( String s ) {
+ return null;
+ }
+
+ @Override
+ public Enumeration<String> getAttributeNames() {
+ return null;
+ }
+
+ @Override
+ public String getCharacterEncoding() {
+ return characterEncoding;
+ }
+
+ @Override
+ public void setCharacterEncoding( String characterEncoding ) throws UnsupportedEncodingException {
+ this.characterEncoding = characterEncoding;
+ }
+
+ @Override
+ public int getContentLength() {
+ return 0;
+ }
+
+ @Override
+ public long getContentLengthLong() {
+ return 0;
+ }
+
+ @Override
+ public String getContentType() {
+ return contentType;
+ }
+
+ public void setContentType( String contentType ) {
+ this.contentType = contentType;
+ }
+
+ @Override
+ public ServletInputStream getInputStream() throws IOException {
+ return inputStream;
+ }
+
+ public void setInputStream( ServletInputStream intputStream ) {
+ this.inputStream = intputStream;
+ }
+
+ @Override
+ public String getParameter( String s ) {
+ return null;
+ }
+
+ @Override
+ public Enumeration<String> getParameterNames() {
+ return null;
+ }
+
+ @Override
+ public String[] getParameterValues( String s ) {
+ return new String[ 0 ];
+ }
+
+ @Override
+ public Map<String, String[]> getParameterMap() {
+ return null;
+ }
+
+ @Override
+ public String getProtocol() {
+ return null;
+ }
+
+ @Override
+ public String getScheme() {
+ return null;
+ }
+
+ @Override
+ public String getServerName() {
+ return null;
+ }
+
+ @Override
+ public int getServerPort() {
+ return 0;
+ }
+
+ @Override
+ public BufferedReader getReader() throws IOException {
+ return null;
+ }
+
+ @Override
+ public String getRemoteAddr() {
+ return null;
+ }
+
+ @Override
+ public String getRemoteHost() {
+ return null;
+ }
+
+ @Override
+ public void setAttribute( String s, Object o ) {
+ }
+
+ @Override
+ public void removeAttribute( String s ) {
+ }
+
+ @Override
+ public Locale getLocale() {
+ return null;
+ }
+
+ @Override
+ public Enumeration<Locale> getLocales() {
+ return null;
+ }
+
+ @Override
+ public boolean isSecure() {
+ return false;
+ }
+
+ @Override
+ public RequestDispatcher getRequestDispatcher( String s ) {
+ return null;
+ }
+
+ @Override
+ @SuppressWarnings("deprecation")
+ public String getRealPath( String s ) {
+ return null;
+ }
+
+ @Override
+ public int getRemotePort() {
+ return 0;
+ }
+
+ @Override
+ public String getLocalName() {
+ return null;
+ }
+
+ @Override
+ public String getLocalAddr() {
+ return null;
+ }
+
+ @Override
+ public int getLocalPort() {
+ return 0;
+ }
+
+ @Override
+ public ServletContext getServletContext() {
+ return null;
+ }
+
+ @Override
+ public AsyncContext startAsync() throws IllegalStateException {
+ return null;
+ }
+
+ @Override
+ public AsyncContext startAsync( ServletRequest servletRequest, ServletResponse servletResponse ) throws IllegalStateException {
+ return null;
+ }
+
+ @Override
+ public boolean isAsyncStarted() {
+ return false;
+ }
+
+ @Override
+ public boolean isAsyncSupported() {
+ return false;
+ }
+
+ @Override
+ public AsyncContext getAsyncContext() {
+ return null;
+ }
+
+ @Override
+ public DispatcherType getDispatcherType() {
+ return null;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockHttpServletResponse.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockHttpServletResponse.java b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockHttpServletResponse.java
new file mode 100644
index 0000000..69f69b3
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockHttpServletResponse.java
@@ -0,0 +1,195 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.mock;
+
+import javax.servlet.ServletOutputStream;
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.Collection;
+import java.util.Locale;
+
+public class MockHttpServletResponse implements HttpServletResponse {
+
+ @Override
+ public void addCookie( Cookie cookie ) {
+ }
+
+ @Override
+ public boolean containsHeader( String s ) {
+ return false;
+ }
+
+ @Override
+ public String encodeURL( String s ) {
+ return null;
+ }
+
+ @Override
+ public String encodeRedirectURL( String s ) {
+ return null;
+ }
+
+ @Override
+ @SuppressWarnings("deprecation")
+ public String encodeUrl( String s ) {
+ return null;
+ }
+
+ @Override
+ public String encodeRedirectUrl( String s ) {
+ return null;
+ }
+
+ @Override
+ public void sendError( int i, String s ) throws IOException {
+ }
+
+ @Override
+ public void sendError( int i ) throws IOException {
+ }
+
+ @Override
+ public void sendRedirect( String s ) throws IOException {
+ }
+
+ @Override
+ public void setDateHeader( String s, long l ) {
+ }
+
+ @Override
+ public void addDateHeader( String s, long l ) {
+ }
+
+ @Override
+ public void setHeader( String s, String s1 ) {
+ }
+
+ @Override
+ public void addHeader( String s, String s1 ) {
+ }
+
+ @Override
+ public void setIntHeader( String s, int i ) {
+ }
+
+ @Override
+ public void addIntHeader( String s, int i ) {
+ }
+
+ @Override
+ public void setStatus( int i ) {
+ }
+
+ @Override
+ @SuppressWarnings("deprecation")
+ public void setStatus( int i, String s ) {
+ }
+
+ @Override
+ public int getStatus() {
+ return 0;
+ }
+
+ @Override
+ public String getHeader( String s ) {
+ return null;
+ }
+
+ @Override
+ public Collection<String> getHeaders( String s ) {
+ return null;
+ }
+
+ @Override
+ public Collection<String> getHeaderNames() {
+ return null;
+ }
+
+ @Override
+ public String getCharacterEncoding() {
+ return null;
+ }
+
+ @Override
+ public String getContentType() {
+ return null;
+ }
+
+ @Override
+ public ServletOutputStream getOutputStream() throws IOException {
+ return null;
+ }
+
+ @Override
+ public PrintWriter getWriter() throws IOException {
+ return null;
+ }
+
+ @Override
+ public void setCharacterEncoding( String s ) {
+ }
+
+ @Override
+ public void setContentLength( int i ) {
+ }
+
+ @Override
+ public void setContentLengthLong( long l ) {
+ }
+
+ @Override
+ public void setContentType( String s ) {
+ }
+
+ @Override
+ public void setBufferSize( int i ) {
+ }
+
+ @Override
+ public int getBufferSize() {
+ return 0;
+ }
+
+ @Override
+ public void flushBuffer() throws IOException {
+ }
+
+ @Override
+ public void resetBuffer() {
+ }
+
+ @Override
+ public boolean isCommitted() {
+ return false;
+ }
+
+ @Override
+ public void reset() {
+ }
+
+ @Override
+ public void setLocale( Locale locale ) {
+ }
+
+ @Override
+ public Locale getLocale() {
+ return null;
+ }
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockInteraction.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockInteraction.java b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockInteraction.java
new file mode 100644
index 0000000..b326ec4
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockInteraction.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.mock;
+
+public class MockInteraction {
+
+ private MockResponseProvider response = new MockResponseProvider();
+ private MockRequestMatcher request = new MockRequestMatcher( response );
+
+ public MockRequestMatcher expect() {
+ return request;
+ }
+
+ public MockResponseProvider respond() {
+ return response;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockRequestMatcher.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockRequestMatcher.java b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockRequestMatcher.java
new file mode 100644
index 0000000..fc0a105
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockRequestMatcher.java
@@ -0,0 +1,330 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.mock;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.ArrayUtils;
+import org.apache.http.NameValuePair;
+import org.apache.http.client.utils.URLEncodedUtils;
+import org.apache.http.message.BasicNameValuePair;
+import org.hamcrest.Matcher;
+import org.hamcrest.Matchers;
+
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletRequest;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URL;
+import java.nio.charset.Charset;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import static org.hamcrest.CoreMatchers.*;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalToIgnoringCase;
+import static org.xmlmatchers.XmlMatchers.isEquivalentTo;
+import static org.xmlmatchers.transform.XmlConverters.the;
+import static uk.co.datumedge.hamcrest.json.SameJSONAs.sameJSONAs;
+
+public class MockRequestMatcher {
+
+ private static final Charset UTF8 = Charset.forName( "UTF-8" );
+
+ private String from;
+ private MockResponseProvider response;
+ private Set<String> methods = null;
+ private String pathInfo = null;
+ private String requestURL = null;
+ Map<String,Matcher> headers = null;
+ Set<Cookie> cookies = null;
+ private Map<String,Object> attributes = null;
+ private Map<String,String> queryParams = null;
+ private String contentType = null;
+ private String characterEncoding = null;
+ private Integer contentLength = null;
+ private byte[] entity = null;
+ private Map<String,String[]> formParams = null;
+
+ public MockRequestMatcher( MockResponseProvider response ) {
+ this.response = response;
+ }
+
+ public MockResponseProvider respond() {
+ return response;
+ }
+
+ public MockRequestMatcher from( String from ) {
+ this.from = from;
+ return this;
+ }
+
+ public MockRequestMatcher method( String... methods ) {
+ if( this.methods == null ) {
+ this.methods = new HashSet<>();
+ }
+ if( methods != null ) {
+ for( String method: methods ) {
+ this.methods.add( method );
+ }
+ }
+ return this;
+ }
+
+ public MockRequestMatcher pathInfo( String pathInfo ) {
+ this.pathInfo = pathInfo;
+ return this;
+ }
+
+ public MockRequestMatcher requestUrl( String requestUrl ) {
+ this.requestURL = requestUrl;
+ return this;
+ }
+
+ public MockRequestMatcher header( String name, String value ) {
+ if( headers == null ) {
+ headers = new HashMap<>();
+ }
+ headers.put( name, Matchers.is(value) );
+ return this;
+ }
+
+ public MockRequestMatcher header( String name, Matcher matcher ) {
+ if( headers == null ) {
+ headers = new HashMap<>();
+ }
+ headers.put( name, matcher );
+ return this;
+ }
+
+ public MockRequestMatcher cookie( Cookie cookie ) {
+ if( cookies == null ) {
+ cookies = new HashSet<>();
+ }
+ cookies.add( cookie );
+ return this;
+ }
+
+ public MockRequestMatcher attribute( String name, Object value ) {
+ if( this.attributes == null ) {
+ this.attributes = new HashMap<>();
+ }
+ attributes.put( name, value );
+ return this;
+ }
+
+ public MockRequestMatcher queryParam( String name, String value ) {
+ if( this.queryParams == null ) {
+ this.queryParams = new HashMap<>();
+ }
+ queryParams.put( name, value );
+ return this;
+ }
+
+ public MockRequestMatcher formParam( String name, String... values ) {
+ if( entity != null ) {
+ throw new IllegalStateException( "Entity already specified." );
+ }
+ if( formParams == null ) {
+ formParams = new HashMap<>();
+ }
+ String[] currentValues = formParams.get( name );
+ if( currentValues == null ) {
+ currentValues = values;
+ } else if ( values != null ) {
+ currentValues = ArrayUtils.addAll( currentValues, values );
+ }
+ formParams.put( name, currentValues );
+ return this;
+ }
+
+ public MockRequestMatcher content( String string, Charset charset ) {
+ content( string.getBytes( charset ) );
+ return this;
+ }
+
+ public MockRequestMatcher content( byte[] entity ) {
+ if( formParams != null ) {
+ throw new IllegalStateException( "Form params already specified." );
+ }
+ this.entity = entity;
+ return this;
+ }
+
+ public MockRequestMatcher content( URL url ) throws IOException {
+ content( url.openStream() );
+ return this;
+ }
+
+ public MockRequestMatcher content( InputStream stream ) throws IOException {
+ content( IOUtils.toByteArray( stream ) );
+ return this;
+ }
+
+ public MockRequestMatcher contentType( String contentType ) {
+ this.contentType = contentType;
+ return this;
+ }
+
+ public MockRequestMatcher contentLength( int length ) {
+ this.contentLength = length;
+ return this;
+ }
+
+ public MockRequestMatcher characterEncoding( String charset ) {
+ this.characterEncoding = charset;
+ return this;
+ }
+
+ public void match( HttpServletRequest request ) throws IOException {
+ if( methods != null ) {
+ assertThat(
+ "Request " + request.getMethod() + " " + request.getRequestURL() +
+ " is not using one of the expected HTTP methods",
+ methods, hasItem( request.getMethod() ) );
+ }
+ if( pathInfo != null ) {
+ assertThat(
+ "Request " + request.getMethod() + " " + request.getRequestURL() +
+ " does not have the expected pathInfo",
+ request.getPathInfo(), is( pathInfo ) );
+ }
+ if( requestURL != null ) {
+ assertThat(
+ "Request " + request.getMethod() + " " + request.getRequestURL() +
+ " does not have the expected requestURL",
+ request.getRequestURL().toString(), is( requestURL ) );
+ }
+ if( headers != null ) {
+ for( Entry<String, Matcher> entry : headers.entrySet() ) {
+ assertThat(
+ "Request " + request.getMethod() + " " + request.getRequestURL() +
+ " does not have the expected value for header " + entry.getKey(),
+ request.getHeader( entry.getKey() ), entry.getValue() );
+ }
+ }
+ if( cookies != null ) {
+ List<Cookie> requestCookies = Arrays.asList( request.getCookies() );
+ for( Cookie cookie: cookies ) {
+ assertThat(
+ "Request " + request.getMethod() + " " + request.getRequestURL() +
+ " does not have the expected cookie " + cookie,
+ requestCookies, hasItem( cookie ) );
+ }
+ }
+ if( contentType != null ) {
+ String[] requestContentType = request.getContentType().split(";",2);
+ assertThat(
+ "Request " + request.getMethod() + " " + request.getRequestURL() +
+ " does not have the expected content type",
+ requestContentType[ 0 ], is( contentType ) );
+ }
+ if( characterEncoding != null ) {
+ assertThat(
+ "Request " + request.getMethod() + " " + request.getRequestURL() +
+ " does not have the expected character encoding",
+ request.getCharacterEncoding(), equalToIgnoringCase( characterEncoding ) );
+ }
+ if( contentLength != null ) {
+ assertThat(
+ "Request " + request.getMethod() + " " + request.getRequestURL() +
+ " does not have the expected content length",
+ request.getContentLength(), is( contentLength ) );
+ }
+ if( attributes != null ) {
+ for( String name: attributes.keySet() ) {
+ assertThat(
+ "Request " + request.getMethod() + " " + request.getRequestURL() +
+ " is missing attribute '" + name + "'",
+ request.getAttribute( name ), notNullValue() );
+ assertThat(
+ "Request " + request.getMethod() + " " + request.getRequestURL() +
+ " has wrong value for attribute '" + name + "'",
+ request.getAttribute( name ), is( request.getAttribute( name ) ) );
+ }
+ }
+ // Note: Cannot use any of the expect.getParameter*() methods because they will read the
+ // body and we don't want that to happen.
+ if( queryParams != null ) {
+ String queryString = request.getQueryString();
+ List<NameValuePair> requestParams = parseQueryString( queryString == null ? "" : queryString );
+ for( Entry<String, String> entry : queryParams.entrySet() ) {
+ assertThat(
+ "Request " + request.getMethod() + " " + request.getRequestURL() +
+ " query string " + queryString + " is missing parameter '" + entry.getKey() + "'",
+ requestParams, hasItem( new BasicNameValuePair(entry.getKey(), entry.getValue())) );
+ }
+ }
+ if( formParams != null ) {
+ String paramString = IOUtils.toString( request.getInputStream(), request.getCharacterEncoding() );
+ List<NameValuePair> requestParams = parseQueryString( paramString == null ? "" : paramString );
+ for( Entry<String, String[]> entry : formParams.entrySet() ) {
+ String[] expectedValues = entry.getValue();
+ for( String expectedValue : expectedValues ) {
+ assertThat(
+ "Request " + request.getMethod() + " " + request.getRequestURL() +
+ " form params " + paramString + " is missing a value " + expectedValue + " for parameter '" + entry.getKey() + "'",
+ requestParams, hasItem( new BasicNameValuePair(entry.getKey(), expectedValue ) ));
+ }
+ }
+ }
+ if( entity != null ) {
+ if( contentType != null && contentType.endsWith( "/xml" ) ) {
+ String expectEncoding = characterEncoding;
+ String expect = new String( entity, ( expectEncoding == null ? UTF8.name() : expectEncoding ) );
+ String actualEncoding = request.getCharacterEncoding();
+ String actual = IOUtils.toString( request.getInputStream(), actualEncoding == null ? UTF8.name() : actualEncoding );
+ assertThat( the( actual ), isEquivalentTo( the( expect ) ) );
+ } else if ( contentType != null && contentType.endsWith( "/json" ) ) {
+ String expectEncoding = characterEncoding;
+ String expect = new String( entity, ( expectEncoding == null ? UTF8.name() : expectEncoding ) );
+ String actualEncoding = request.getCharacterEncoding();
+ String actual = IOUtils.toString( request.getInputStream(), actualEncoding == null ? UTF8.name() : actualEncoding );
+// System.out.println( "EXPECT=" + expect );
+// System.out.println( "ACTUAL=" + actual );
+ assertThat( actual, sameJSONAs( expect ) );
+ } else if( characterEncoding == null || request.getCharacterEncoding() == null ) {
+ byte[] bytes = IOUtils.toByteArray( request.getInputStream() );
+ assertThat(
+ "Request " + request.getMethod() + " " + request.getRequestURL() +
+ " content does not match the expected content",
+ bytes, is( entity ) );
+ } else {
+ String expect = new String( entity, characterEncoding );
+ String actual = IOUtils.toString( request.getInputStream(), request.getCharacterEncoding() );
+ assertThat(
+ "Request " + request.getMethod() + " " + request.getRequestURL() +
+ " content does not match the expected content",
+ actual, is( expect ) );
+ }
+ }
+ }
+
+ public String toString() {
+ return "from=" + from + ", pathInfo=" + pathInfo;
+ }
+
+ private static List<NameValuePair> parseQueryString( String queryString ) {
+ return URLEncodedUtils.parse(queryString, Charset.defaultCharset());
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockResponseProvider.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockResponseProvider.java b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockResponseProvider.java
new file mode 100644
index 0000000..503ff65
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockResponseProvider.java
@@ -0,0 +1,157 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.mock;
+
+import org.apache.commons.io.IOUtils;
+
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URL;
+import java.nio.charset.Charset;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+public class MockResponseProvider {
+
+ Integer errorCode = null;
+ String errorMsg = null;
+ Integer statusCode = null;
+ String redirectUrl = null;
+ Map<String,String> headers = null;
+ Set<Cookie> cookies = null;
+ byte[] entity = null;
+ String contentType = null;
+ String characterEncoding = null;
+ Integer contentLength = null;
+
+ public MockResponseProvider status( int statusCode ) {
+ this.statusCode = statusCode;
+ return this;
+ }
+
+ public MockResponseProvider error( int code, String message ) {
+ errorCode = code;
+ errorMsg = message;
+ return this;
+ }
+
+ public MockResponseProvider redirect( String location ) {
+ redirectUrl = location;
+ return this;
+ }
+
+ public MockResponseProvider header( String name, String value ) {
+ if( headers == null ) {
+ headers = new HashMap<>();
+ }
+ headers.put( name, value );
+ return this;
+ }
+
+ public MockResponseProvider cookie( Cookie cookie ) {
+ if( cookies == null ) {
+ cookies = new HashSet<>();
+ }
+ cookies.add( cookie );
+ return this;
+ }
+
+ public MockResponseProvider content( byte[] entity ) {
+ this.entity = entity;
+ return this;
+ }
+
+ public MockResponseProvider content( String string, Charset charset ) {
+ this.entity = string.getBytes( charset );
+ return this;
+ }
+
+ public MockResponseProvider content( URL url ) throws IOException {
+ content( url.openStream() );
+ return this;
+ }
+
+ public MockResponseProvider content( InputStream stream ) throws IOException {
+ content( IOUtils.toByteArray( stream ) );
+ return this;
+ }
+
+ public MockResponseProvider contentType( String contentType ) {
+ this.contentType = contentType;
+ return this;
+ }
+
+ public MockResponseProvider contentLength( int contentLength ) {
+ this.contentLength = contentLength;
+ return this;
+ }
+
+ public MockResponseProvider characterEncoding( String charset ) {
+ this.characterEncoding = charset;
+ return this;
+ }
+
+ public void apply( HttpServletResponse response ) throws IOException {
+ if( statusCode != null ) {
+ response.setStatus( statusCode );
+ } else {
+ response.setStatus( HttpServletResponse.SC_OK );
+ }
+ if( errorCode != null ) {
+ if( errorMsg != null ) {
+ response.sendError( errorCode, errorMsg );
+ } else {
+ response.sendError( errorCode );
+ }
+ }
+ if( redirectUrl != null ) {
+ response.sendRedirect( redirectUrl );
+ }
+ if( headers != null ) {
+ for( Entry<String, String> entry : headers.entrySet() ) {
+ response.addHeader( entry.getKey(), entry.getValue() );
+ }
+ }
+ if( cookies != null ) {
+ for( Cookie cookie: cookies ) {
+ response.addCookie( cookie );
+ }
+ }
+ if( contentType != null ) {
+ response.setContentType( contentType );
+ }
+ if( characterEncoding != null ) {
+ response.setCharacterEncoding( characterEncoding );
+ }
+ if( contentLength != null ) {
+ response.setContentLength( contentLength );
+ }
+ response.flushBuffer();
+ if( entity != null ) {
+ response.getOutputStream().write( entity );
+ //KNOX-685: response.getOutputStream().flush();
+ response.getOutputStream().close();
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServer.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServer.java b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServer.java
new file mode 100644
index 0000000..09905cd
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServer.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.mock;
+
+import org.eclipse.jetty.server.Handler;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.servlet.Servlet;
+import java.util.LinkedList;
+import java.util.Queue;
+
+/**
+ * An embedded Jetty server with a single servlet deployed on "/*".
+ * It is used by populating a queue of "interactions".
+ * Each interaction is an expected request and a resulting response.
+ * These interactions are added to a queue in a fluent API style.
+ * So in most of the tests like GatewayBasicFuncTest.testBasicJsonUseCase you will see calls like
+ * driver.getMock( "WEBHDFS" ).expect()....respond()...;
+ * This adds a single interaction to the mock server which is returned via the driver.getMock( "WEBHDFS" ) above.
+ * Any number of interactions may be added.
+ * When the request comes in it will check the request against the expected request.
+ * If it matches return the response otherwise it will return a 500 error.
+ * Typically at the end of a test you should check to make sure the interaction queue is consumed by calling isEmpty().
+ * The reset() method can be used to ensure everything is cleaned up so that the mock server can be reused beteween tests.
+ * The whole idea was modeled after how the REST testing framework REST-assured and aims to be a server side equivalent.
+ */
+public class MockServer {
+
+ private Logger log = LoggerFactory.getLogger( this.getClass() );
+
+ private String name;
+ private Server jetty;
+
+ private Queue<MockInteraction> interactions = new LinkedList<MockInteraction>();
+
+ public MockServer( String name ) {
+ this.name = name;
+ }
+
+ public MockServer( String name, boolean start ) throws Exception {
+ this.name = name;
+ if( start ) {
+ start();
+ }
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void start() throws Exception {
+ Handler context = createHandler();
+ jetty = new Server(0);
+ jetty.setHandler( context );
+ jetty.start();
+ log.info( "Mock server started on port " + getPort() );
+ }
+
+ public void stop() throws Exception {
+ jetty.stop();
+ jetty.join();
+ }
+
+ private ServletContextHandler createHandler() {
+ Servlet servlet = new MockServlet( getName(), interactions );
+ ServletHolder holder = new ServletHolder( servlet );
+ ServletContextHandler context = new ServletContextHandler( ServletContextHandler.SESSIONS );
+ context.setContextPath( "/" );
+ context.addServlet( holder, "/*" );
+ return context;
+ }
+
+ public int getPort() {
+ return jetty.getURI().getPort();
+ }
+
+ public MockRequestMatcher expect() {
+ MockInteraction interaction = new MockInteraction();
+ interactions.add( interaction );
+ return interaction.expect();
+ }
+
+ public MockResponseProvider respond() {
+ MockInteraction interaction = new MockInteraction();
+ interactions.add( interaction );
+ return interaction.respond();
+ }
+
+ public int getCount() {
+ return interactions.size();
+ }
+
+ public boolean isEmpty() {
+ return interactions.isEmpty();
+ }
+
+ public void reset() {
+ interactions.clear();
+ }
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServlet.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServlet.java b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServlet.java
new file mode 100644
index 0000000..2c82dfd
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServlet.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.mock;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+import java.util.Queue;
+
+import org.apache.log4j.Logger;
+
+import static org.junit.Assert.fail;
+
+public class MockServlet extends HttpServlet {
+
+ private static final Logger LOG = Logger.getLogger(MockServlet.class.getName());
+
+ public String name;
+ public Queue<MockInteraction> interactions;
+
+ public MockServlet( String name, Queue<MockInteraction> interactions ) {
+ this.name = name;
+ this.interactions = interactions;
+ }
+
+ @Override
+ protected void service( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException {
+ LOG.debug( "service: request=" + request.getMethod() + " " + request.getRequestURL() + "?" + request.getQueryString() );
+ try {
+ if( interactions.isEmpty() ) {
+ fail( "Mock servlet " + name + " received a request but the expected interaction queue is empty." );
+ }
+ MockInteraction interaction = interactions.remove();
+ interaction.expect().match( request );
+ interaction.respond().apply( response );
+ LOG.debug( "service: response=" + response.getStatus() );
+ } catch( AssertionError e ) {
+ LOG.debug( "service: exception=" + e.getMessage() );
+ e.printStackTrace(); // I18N not required.
+ throw new ServletException( e );
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServletContext.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServletContext.java b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServletContext.java
new file mode 100644
index 0000000..4181067
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServletContext.java
@@ -0,0 +1,293 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.mock;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterRegistration;
+import javax.servlet.RequestDispatcher;
+import javax.servlet.Servlet;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRegistration;
+import javax.servlet.SessionCookieConfig;
+import javax.servlet.SessionTrackingMode;
+import javax.servlet.descriptor.JspConfigDescriptor;
+import java.io.InputStream;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.Enumeration;
+import java.util.EventListener;
+import java.util.Map;
+import java.util.Set;
+
+public class MockServletContext implements ServletContext {
+
+ @Override
+ public String getContextPath() {
+ return null;
+ }
+
+ @Override
+ public ServletContext getContext( String s ) {
+ return null;
+ }
+
+ @Override
+ public int getMajorVersion() {
+ return 0;
+ }
+
+ @Override
+ public int getMinorVersion() {
+ return 0;
+ }
+
+ @Override
+ public int getEffectiveMajorVersion() {
+ return 0;
+ }
+
+ @Override
+ public int getEffectiveMinorVersion() {
+ return 0;
+ }
+
+ @Override
+ public String getMimeType( String s ) {
+ return null;
+ }
+
+ @Override
+ public Set<String> getResourcePaths( String s ) {
+ return null;
+ }
+
+ @Override
+ public URL getResource( String s ) throws MalformedURLException {
+ return null;
+ }
+
+ @Override
+ public InputStream getResourceAsStream( String s ) {
+ return null;
+ }
+
+ @Override
+ public RequestDispatcher getRequestDispatcher( String s ) {
+ return null;
+ }
+
+ @Override
+ public RequestDispatcher getNamedDispatcher( String s ) {
+ return null;
+ }
+
+ @Override
+ @SuppressWarnings("deprecation")
+ public Servlet getServlet( String s ) throws ServletException {
+ return null;
+ }
+
+ @Override
+ @SuppressWarnings("deprecation")
+ public Enumeration<Servlet> getServlets() {
+ return null;
+ }
+
+ @Override
+ @SuppressWarnings("deprecation")
+ public Enumeration<String> getServletNames() {
+ return null;
+ }
+
+ @Override
+ public void log( String s ) {
+ }
+
+ @Override
+ @SuppressWarnings("deprecation")
+ public void log( Exception e, String s ) {
+ }
+
+ @Override
+ public void log( String s, Throwable throwable ) {
+ }
+
+ @Override
+ public String getRealPath( String s ) {
+ return null;
+ }
+
+ @Override
+ public String getServerInfo() {
+ return null;
+ }
+
+ @Override
+ public String getInitParameter( String s ) {
+ return null;
+ }
+
+ @Override
+ public Enumeration<String> getInitParameterNames() {
+ return null;
+ }
+
+ @Override
+ public boolean setInitParameter( String s, String s1 ) {
+ return false;
+ }
+
+ @Override
+ public Object getAttribute( String s ) {
+ return null;
+ }
+
+ @Override
+ public Enumeration<String> getAttributeNames() {
+ return null;
+ }
+
+ @Override
+ public void setAttribute( String s, Object o ) {
+ }
+
+ @Override
+ public void removeAttribute( String s ) {
+ }
+
+ @Override
+ public String getServletContextName() {
+ return null;
+ }
+
+ @Override
+ public ServletRegistration.Dynamic addServlet( String s, String s1 ) {
+ return null;
+ }
+
+ @Override
+ public ServletRegistration.Dynamic addServlet( String s, Servlet servlet ) {
+ return null;
+ }
+
+ @Override
+ public ServletRegistration.Dynamic addServlet( String s, Class<? extends Servlet> aClass ) {
+ return null;
+ }
+
+ @Override
+ public <T extends Servlet> T createServlet( Class<T> tClass ) throws ServletException {
+ return null;
+ }
+
+ @Override
+ public ServletRegistration getServletRegistration( String s ) {
+ return null;
+ }
+
+ @Override
+ public Map<String, ? extends ServletRegistration> getServletRegistrations() {
+ return null;
+ }
+
+ @Override
+ public FilterRegistration.Dynamic addFilter( String s, String s1 ) {
+ return null;
+ }
+
+ @Override
+ public FilterRegistration.Dynamic addFilter( String s, Filter filter ) {
+ return null;
+ }
+
+ @Override
+ public FilterRegistration.Dynamic addFilter( String s, Class<? extends Filter> aClass ) {
+ return null;
+ }
+
+ @Override
+ public <T extends Filter> T createFilter( Class<T> tClass ) throws ServletException {
+ return null;
+ }
+
+ @Override
+ public FilterRegistration getFilterRegistration( String s ) {
+ return null;
+ }
+
+ @Override
+ public Map<String, ? extends FilterRegistration> getFilterRegistrations() {
+ return null;
+ }
+
+ @Override
+ public SessionCookieConfig getSessionCookieConfig() {
+ return null;
+ }
+
+ @Override
+ public void setSessionTrackingModes( Set<SessionTrackingMode> sessionTrackingModes ) {
+ }
+
+ @Override
+ public Set<SessionTrackingMode> getDefaultSessionTrackingModes() {
+ return null;
+ }
+
+ @Override
+ public Set<SessionTrackingMode> getEffectiveSessionTrackingModes() {
+ return null;
+ }
+
+ @Override
+ public void addListener( String s ) {
+ }
+
+ @Override
+ public <T extends EventListener> void addListener( T t ) {
+ }
+
+ @Override
+ public void addListener( Class<? extends EventListener> aClass ) {
+ }
+
+ @Override
+ public <T extends EventListener> T createListener( Class<T> tClass ) throws ServletException {
+ return null;
+ }
+
+ @Override
+ public JspConfigDescriptor getJspConfigDescriptor() {
+ return null;
+ }
+
+ @Override
+ public ClassLoader getClassLoader() {
+ return null;
+ }
+
+ @Override
+ public void declareRoles( String... strings ) {
+ }
+
+ @Override
+ public String getVirtualServerName() {
+ throw new UnsupportedOperationException();
+ }
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServletInputStream.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServletInputStream.java b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServletInputStream.java
new file mode 100644
index 0000000..82eda72
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/knox/test/mock/MockServletInputStream.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.test.mock;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import javax.servlet.ReadListener;
+import javax.servlet.ServletInputStream;
+
+public class MockServletInputStream extends ServletInputStream {
+
+ private InputStream stream;
+
+ public MockServletInputStream( InputStream stream ) {
+ this.stream = stream;
+ }
+
+ @Override
+ public int read() throws IOException {
+ return stream.read();
+ }
+
+ @Override
+ public boolean isFinished() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public boolean isReady() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void setReadListener( ReadListener readListener ) {
+ throw new UnsupportedOperationException();
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/AmbariServiceDefinitionTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/AmbariServiceDefinitionTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/AmbariServiceDefinitionTest.java
index 79837e8..9478574 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/AmbariServiceDefinitionTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/AmbariServiceDefinitionTest.java
@@ -23,8 +23,8 @@ import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.GatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.services.topology.TopologyService;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.mock.MockServer;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.mock.MockServer;
import org.apache.http.HttpStatus;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
@@ -48,8 +48,8 @@ import java.util.Properties;
import java.util.UUID;
import static io.restassured.RestAssured.given;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.notNullValue;
import static uk.co.datumedge.hamcrest.json.SameJSONAs.sameJSONAs;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminFuncTest.java
index eba5de6..9778169 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminFuncTest.java
@@ -22,7 +22,7 @@ import com.mycila.xmltool.XMLTag;
import org.apache.knox.gateway.config.GatewayConfig;
import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.apache.http.HttpStatus;
import org.hamcrest.MatcherAssert;
import org.junit.AfterClass;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminTopologyFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminTopologyFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminTopologyFuncTest.java
index 7dcb4e0..e6f7b80 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminTopologyFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminTopologyFuncTest.java
@@ -34,9 +34,7 @@ import javax.ws.rs.core.MediaType;
import io.restassured.http.ContentType;
import com.mycila.xmltool.XMLDoc;
import com.mycila.xmltool.XMLTag;
-import org.apache.directory.server.protocol.shared.transport.TcpTransport;
import org.apache.knox.gateway.config.GatewayConfig;
-import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.GatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
@@ -49,7 +47,7 @@ import org.apache.knox.gateway.util.XmlUtils;
import io.restassured.response.ResponseBody;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.apache.http.HttpStatus;
import org.apache.log4j.Appender;
import org.hamcrest.MatcherAssert;
@@ -63,8 +61,8 @@ import org.xml.sax.InputSource;
import static io.restassured.RestAssured.given;
import static junit.framework.TestCase.assertTrue;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAppFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAppFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAppFuncTest.java
index a282cfe..84bed16 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAppFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAppFuncTest.java
@@ -30,14 +30,12 @@ import java.util.Properties;
import java.util.UUID;
import org.apache.commons.io.FileUtils;
-import org.apache.directory.server.protocol.shared.transport.TcpTransport;
-import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.GatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.services.topology.TopologyService;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.mock.MockServer;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.mock.MockServer;
import org.apache.http.HttpStatus;
import org.apache.log4j.Appender;
import org.hamcrest.MatcherAssert;
@@ -49,8 +47,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static io.restassured.RestAssured.given;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.Matchers.arrayWithSize;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewayBasicFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayBasicFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayBasicFuncTest.java
index 3adf41a..02be270 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayBasicFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayBasicFuncTest.java
@@ -47,10 +47,10 @@ import com.mycila.xmltool.XMLTag;
import org.apache.commons.io.filefilter.WildcardFileFilter;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.knox.gateway.util.KnoxCLI;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.category.MediumTests;
-import org.apache.hadoop.test.category.VerifyTest;
-import org.apache.hadoop.test.mock.MockRequestMatcher;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.category.MediumTests;
+import org.apache.knox.test.category.VerifyTest;
+import org.apache.knox.test.mock.MockRequestMatcher;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
@@ -87,8 +87,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static io.restassured.RestAssured.given;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.*;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.greaterThan;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewayDeployFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayDeployFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayDeployFuncTest.java
index 9349dca..137cd47 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayDeployFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayDeployFuncTest.java
@@ -21,13 +21,11 @@ import io.restassured.response.Response;
import com.mycila.xmltool.XMLDoc;
import com.mycila.xmltool.XMLTag;
import org.apache.commons.io.FileUtils;
-import org.apache.directory.server.protocol.shared.transport.TcpTransport;
import org.apache.knox.gateway.config.GatewayConfig;
-import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.category.ReleaseTest;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.category.ReleaseTest;
import org.apache.http.HttpStatus;
import org.apache.log4j.Appender;
import org.hamcrest.MatcherAssert;
@@ -51,8 +49,8 @@ import java.util.UUID;
import java.util.regex.Pattern;
import static io.restassured.RestAssured.given;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewayHealthFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayHealthFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayHealthFuncTest.java
index c7ac9ee..3c71248 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayHealthFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayHealthFuncTest.java
@@ -25,7 +25,7 @@ import org.apache.knox.gateway.config.GatewayConfig;
import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.apache.http.HttpStatus;
import org.hamcrest.MatcherAssert;
import org.junit.AfterClass;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapDynamicGroupFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapDynamicGroupFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapDynamicGroupFuncTest.java
index 3a3d776..74b8a21 100755
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapDynamicGroupFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapDynamicGroupFuncTest.java
@@ -18,8 +18,8 @@
package org.apache.knox.gateway;
import static io.restassured.RestAssured.given;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
@@ -39,7 +39,7 @@ import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.GatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.services.security.AliasService;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.apache.http.HttpStatus;
import org.apache.log4j.Appender;
import org.hamcrest.MatcherAssert;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapGroupFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapGroupFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapGroupFuncTest.java
index 37ee90c..ba044b4 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapGroupFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapGroupFuncTest.java
@@ -18,15 +18,14 @@
package org.apache.knox.gateway;
import static io.restassured.RestAssured.given;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
-import java.io.InputStream;
import java.net.URL;
import java.util.Enumeration;
import java.util.HashMap;
@@ -39,7 +38,7 @@ import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.GatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.services.security.AliasService;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.apache.http.HttpStatus;
import org.apache.log4j.Appender;
import org.hamcrest.MatcherAssert;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapPosixGroupFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapPosixGroupFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapPosixGroupFuncTest.java
index b623f06..2654db1 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapPosixGroupFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapPosixGroupFuncTest.java
@@ -19,15 +19,13 @@ package org.apache.knox.gateway;
import com.mycila.xmltool.XMLDoc;
import com.mycila.xmltool.XMLTag;
-import org.apache.directory.server.protocol.shared.transport.TcpTransport;
import org.apache.knox.gateway.config.GatewayConfig;
-import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.GatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.services.security.AliasService;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.category.ReleaseTest;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.category.ReleaseTest;
import org.apache.http.HttpStatus;
import org.apache.log4j.Appender;
import org.hamcrest.MatcherAssert;
@@ -43,7 +41,6 @@ import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
-import java.net.InetSocketAddress;
import java.net.URL;
import java.util.Enumeration;
import java.util.HashMap;
@@ -51,8 +48,8 @@ import java.util.Map;
import java.util.UUID;
import static io.restassured.RestAssured.given;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLocalServiceFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLocalServiceFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLocalServiceFuncTest.java
index 442a767..d73d200 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLocalServiceFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLocalServiceFuncTest.java
@@ -20,13 +20,11 @@ package org.apache.knox.gateway;
import com.mycila.xmltool.XMLDoc;
import com.mycila.xmltool.XMLTag;
import org.apache.commons.io.FileUtils;
-import org.apache.directory.server.protocol.shared.transport.TcpTransport;
import org.apache.knox.gateway.config.GatewayConfig;
-import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.log.NoOpAppender;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.log.NoOpAppender;
import org.apache.http.HttpStatus;
import org.apache.log4j.Appender;
import org.hamcrest.MatcherAssert;
@@ -44,8 +42,8 @@ import java.util.Map;
import java.util.UUID;
import static io.restassured.RestAssured.given;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.Assert.assertThat;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewayMultiFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayMultiFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayMultiFuncTest.java
index 6dc469c..e14f44a 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayMultiFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayMultiFuncTest.java
@@ -29,14 +29,13 @@ import java.util.UUID;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.directory.server.protocol.shared.transport.TcpTransport;
-import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.GatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.services.topology.TopologyService;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.category.ReleaseTest;
-import org.apache.hadoop.test.mock.MockServer;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.category.ReleaseTest;
+import org.apache.knox.test.mock.MockServer;
import org.apache.http.HttpHost;
import org.apache.http.HttpStatus;
import org.apache.http.auth.AuthScope;
@@ -61,8 +60,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static io.restassured.RestAssured.given;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.endsWith;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingDisableFeatureTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingDisableFeatureTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingDisableFeatureTest.java
index a4d8166..3c0429f 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingDisableFeatureTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingDisableFeatureTest.java
@@ -20,9 +20,9 @@ package org.apache.knox.gateway;
import com.mycila.xmltool.XMLDoc;
import com.mycila.xmltool.XMLTag;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.category.ReleaseTest;
-import org.apache.hadoop.test.mock.MockServer;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.category.ReleaseTest;
+import org.apache.knox.test.mock.MockServer;
import org.apache.http.HttpStatus;
import org.junit.After;
import org.junit.Before;
@@ -36,8 +36,8 @@ import java.net.ConnectException;
import java.util.concurrent.ConcurrentHashMap;
import static io.restassured.RestAssured.given;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.is;
/**
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFailTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFailTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFailTest.java
index bc01c86..dcaa353 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFailTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFailTest.java
@@ -18,9 +18,9 @@ package org.apache.knox.gateway;
* limitations under the License.
*/
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.category.ReleaseTest;
-import org.apache.hadoop.test.mock.MockServer;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.category.ReleaseTest;
+import org.apache.knox.test.mock.MockServer;
import org.apache.http.HttpStatus;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -31,8 +31,8 @@ import java.io.IOException;
import java.util.concurrent.ConcurrentHashMap;
import static io.restassured.RestAssured.given;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
/**
* Test the fail cases for the Port Mapping Feature
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFuncTest.java
index cbf138b..18e1487 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFuncTest.java
@@ -20,9 +20,9 @@ package org.apache.knox.gateway;
import com.mycila.xmltool.XMLDoc;
import com.mycila.xmltool.XMLTag;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.category.ReleaseTest;
-import org.apache.hadoop.test.mock.MockServer;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.category.ReleaseTest;
+import org.apache.knox.test.mock.MockServer;
import org.apache.http.HttpStatus;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -33,8 +33,8 @@ import java.io.IOException;
import java.util.concurrent.ConcurrentHashMap;
import static io.restassured.RestAssured.given;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.is;
/**
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySampleFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySampleFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySampleFuncTest.java
index b146972..757ade7 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySampleFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySampleFuncTest.java
@@ -19,12 +19,10 @@ package org.apache.knox.gateway;
import com.mycila.xmltool.XMLDoc;
import com.mycila.xmltool.XMLTag;
-import org.apache.directory.server.protocol.shared.transport.TcpTransport;
import org.apache.knox.gateway.config.GatewayConfig;
-import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.apache.http.HttpStatus;
import org.apache.log4j.Appender;
import org.hamcrest.MatcherAssert;
@@ -43,8 +41,8 @@ import java.util.Map;
import java.util.UUID;
import static io.restassured.RestAssured.given;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySslFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySslFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySslFuncTest.java
index 3726dbc..4e85542 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySslFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySslFuncTest.java
@@ -18,8 +18,6 @@
package org.apache.knox.gateway;
import java.io.File;
-import java.nio.file.FileSystems;
-import java.nio.file.Path;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
@@ -42,15 +40,13 @@ import javax.net.ssl.X509TrustManager;
import javax.xml.transform.stream.StreamSource;
import org.apache.commons.io.FileUtils;
-import org.apache.directory.server.protocol.shared.transport.TcpTransport;
-import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.GatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.services.topology.TopologyService;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.category.ReleaseTest;
-import org.apache.hadoop.test.mock.MockServer;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.category.ReleaseTest;
+import org.apache.knox.test.mock.MockServer;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
@@ -75,8 +71,8 @@ import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/Knox242FuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/Knox242FuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/Knox242FuncTest.java
index cd30311..024919b 100755
--- a/gateway-test/src/test/java/org/apache/knox/gateway/Knox242FuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/Knox242FuncTest.java
@@ -18,16 +18,14 @@
package org.apache.knox.gateway;
import static io.restassured.RestAssured.given;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
-import java.io.InputStream;
-import java.net.InetSocketAddress;
import java.net.URL;
import java.nio.file.FileSystems;
import java.nio.file.Path;
@@ -41,7 +39,7 @@ import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.GatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.services.security.AliasService;
-import org.apache.hadoop.test.TestUtils;
+import org.apache.knox.test.TestUtils;
import org.apache.http.HttpStatus;
import org.apache.log4j.Appender;
import org.hamcrest.MatcherAssert;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/KnoxCliLdapFuncTestNegative.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/KnoxCliLdapFuncTestNegative.java b/gateway-test/src/test/java/org/apache/knox/gateway/KnoxCliLdapFuncTestNegative.java
index fc2f601..e59f3a0 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/KnoxCliLdapFuncTestNegative.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/KnoxCliLdapFuncTestNegative.java
@@ -22,8 +22,8 @@ import com.mycila.xmltool.XMLTag;
import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.util.KnoxCLI;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.log.NoOpAppender;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.log.NoOpAppender;
import org.apache.log4j.Appender;
import org.junit.BeforeClass;
import org.junit.AfterClass;
@@ -38,8 +38,8 @@ import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/KnoxCliLdapFuncTestPositive.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/KnoxCliLdapFuncTestPositive.java b/gateway-test/src/test/java/org/apache/knox/gateway/KnoxCliLdapFuncTestPositive.java
index f612a4e..12a7c15 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/KnoxCliLdapFuncTestPositive.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/KnoxCliLdapFuncTestPositive.java
@@ -19,13 +19,11 @@ package org.apache.knox.gateway;
import com.mycila.xmltool.XMLDoc;
import com.mycila.xmltool.XMLTag;
-import org.apache.directory.server.protocol.shared.transport.TcpTransport;
-import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.util.KnoxCLI;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.log.NoOpAppender;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.log.NoOpAppender;
import org.apache.log4j.Appender;
import org.junit.BeforeClass;
import org.junit.AfterClass;
@@ -40,8 +38,8 @@ import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.assertThat;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/KnoxCliSysBindTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/KnoxCliSysBindTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/KnoxCliSysBindTest.java
index 73336c7..d8b6496 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/KnoxCliSysBindTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/KnoxCliSysBindTest.java
@@ -19,13 +19,11 @@ package org.apache.knox.gateway;
import com.mycila.xmltool.XMLDoc;
import com.mycila.xmltool.XMLTag;
-import org.apache.directory.server.protocol.shared.transport.TcpTransport;
-import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
import org.apache.knox.gateway.services.DefaultGatewayServices;
import org.apache.knox.gateway.services.ServiceLifecycleException;
import org.apache.knox.gateway.util.KnoxCLI;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.log.NoOpAppender;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.log.NoOpAppender;
import org.apache.log4j.Appender;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -40,8 +38,8 @@ import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.assertThat;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/OozieServiceDefinitionTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/OozieServiceDefinitionTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/OozieServiceDefinitionTest.java
index b0f23e7..491b6bb 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/OozieServiceDefinitionTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/OozieServiceDefinitionTest.java
@@ -28,8 +28,8 @@ import org.apache.knox.gateway.filter.rewrite.impl.UrlRewriteRequest;
import org.apache.knox.gateway.services.GatewayServices;
import org.apache.knox.gateway.services.registry.ServiceRegistry;
import org.apache.knox.gateway.util.XmlUtils;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.mock.MockServletInputStream;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.mock.MockServletInputStream;
import org.easymock.EasyMock;
import org.junit.Test;
import org.w3c.dom.Document;
@@ -40,8 +40,8 @@ import javax.servlet.http.HttpServletRequest;
import java.io.InputStream;
import java.io.Reader;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.xml.HasXPath.hasXPath;
http://git-wip-us.apache.org/repos/asf/knox/blob/1451428f/gateway-test/src/test/java/org/apache/knox/gateway/WebHdfsHaFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/knox/gateway/WebHdfsHaFuncTest.java b/gateway-test/src/test/java/org/apache/knox/gateway/WebHdfsHaFuncTest.java
index 98739a1..f4f77e2 100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/WebHdfsHaFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/WebHdfsHaFuncTest.java
@@ -19,9 +19,9 @@ package org.apache.knox.gateway;
import com.mycila.xmltool.XMLDoc;
import com.mycila.xmltool.XMLTag;
-import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.category.ReleaseTest;
-import org.apache.hadoop.test.mock.MockServer;
+import org.apache.knox.test.TestUtils;
+import org.apache.knox.test.category.ReleaseTest;
+import org.apache.knox.test.mock.MockServer;
import org.apache.http.HttpStatus;
import org.junit.After;
import org.junit.Before;
@@ -32,8 +32,8 @@ import org.junit.experimental.categories.Category;
import java.io.IOException;
import static io.restassured.RestAssured.given;
-import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
-import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.apache.knox.test.TestUtils.LOG_ENTER;
+import static org.apache.knox.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.is;
@Category(ReleaseTest.class)
[04/25] knox git commit: KNOX-1088 - Remove LDAP BaseDirectoryService*
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/41952dd3/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/SimpleLdapDirectoryServer.java
----------------------------------------------------------------------
diff --git a/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/SimpleLdapDirectoryServer.java b/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/SimpleLdapDirectoryServer.java
index 12fe30d..b1685a1 100644
--- a/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/SimpleLdapDirectoryServer.java
+++ b/gateway-demo-ldap/src/main/java/org/apache/hadoop/gateway/security/ldap/SimpleLdapDirectoryServer.java
@@ -25,12 +25,17 @@ import org.apache.directory.api.ldap.model.name.Dn;
import org.apache.directory.server.core.api.CoreSession;
import org.apache.directory.server.core.api.DirectoryService;
import org.apache.directory.server.core.api.partition.Partition;
+import org.apache.directory.server.core.factory.DefaultDirectoryServiceFactory;
import org.apache.directory.server.core.factory.DirectoryServiceFactory;
+import org.apache.directory.server.core.factory.JdbmPartitionFactory;
+import org.apache.directory.server.core.factory.PartitionFactory;
import org.apache.directory.server.ldap.LdapServer;
import org.apache.directory.server.protocol.shared.store.LdifFileLoader;
import org.apache.directory.server.protocol.shared.transport.TcpTransport;
import org.apache.directory.server.protocol.shared.transport.Transport;
import org.apache.log4j.PropertyConfigurator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileNotFoundException;
@@ -39,6 +44,8 @@ import java.util.UUID;
public class SimpleLdapDirectoryServer {
+ private static final Logger LOG = LoggerFactory.getLogger(SimpleLdapDirectoryServer.class);
+
private DirectoryServiceFactory factory;
private DirectoryService service;
@@ -50,7 +57,36 @@ public class SimpleLdapDirectoryServer {
throw new FileNotFoundException( usersLdif.getAbsolutePath() );
}
- factory = new SimpleDirectoryServiceFactory();
+ DirectoryService directoryService = null;
+ try {
+ // creating the instance here so that
+ // we we can set some properties like accesscontrol, anon access
+ // before starting up the service
+ directoryService = new SimpleDirectoryService();
+
+ // no need to register a shutdown hook during tests because this
+ // starts a lot of threads and slows down test execution
+ directoryService.setShutdownHookEnabled( false );
+ } catch ( Exception e ) {
+ throw new RuntimeException( e );
+ }
+
+ PartitionFactory partitionFactory = null;
+ try {
+ String typeName = System.getProperty( "apacheds.partition.factory" );
+
+ if ( typeName != null ) {
+ Class<? extends PartitionFactory> type = ( Class<? extends PartitionFactory> ) Class.forName( typeName );
+ partitionFactory = type.newInstance();
+ } else {
+ partitionFactory = new JdbmPartitionFactory();
+ }
+ } catch ( Exception e ) {
+ LOG.error( "Error instantiating custom partiton factory", e );
+ throw new RuntimeException( e );
+ }
+
+ factory = new DefaultDirectoryServiceFactory( directoryService, partitionFactory );
factory.init( UUID.randomUUID().toString() );
service = factory.getDirectoryService();
http://git-wip-us.apache.org/repos/asf/knox/blob/41952dd3/gateway-demo-ldap/src/test/java/org/apache/hadoop/gateway/security/ldap/SimpleLdapServerTest.java
----------------------------------------------------------------------
diff --git a/gateway-demo-ldap/src/test/java/org/apache/hadoop/gateway/security/ldap/SimpleLdapServerTest.java b/gateway-demo-ldap/src/test/java/org/apache/hadoop/gateway/security/ldap/SimpleLdapServerTest.java
index 33a367e..7d73219 100644
--- a/gateway-demo-ldap/src/test/java/org/apache/hadoop/gateway/security/ldap/SimpleLdapServerTest.java
+++ b/gateway-demo-ldap/src/test/java/org/apache/hadoop/gateway/security/ldap/SimpleLdapServerTest.java
@@ -23,14 +23,12 @@ import org.apache.directory.api.ldap.model.exception.LdapException;
import org.apache.directory.ldap.client.api.LdapConnection;
import org.apache.directory.ldap.client.api.LdapNetworkConnection;
import org.apache.directory.server.protocol.shared.transport.TcpTransport;
-import org.apache.directory.server.protocol.shared.transport.Transport;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
-import java.net.ServerSocket;
import static org.junit.Assert.fail;
http://git-wip-us.apache.org/repos/asf/knox/blob/41952dd3/gateway-test-release/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-test-release/pom.xml b/gateway-test-release/pom.xml
index 3d4cbc5..81ccbe1 100644
--- a/gateway-test-release/pom.xml
+++ b/gateway-test-release/pom.xml
@@ -128,17 +128,6 @@
</dependency>
<dependency>
<groupId>org.apache.directory.server</groupId>
- <artifactId>apacheds-core-integ</artifactId>
- <version>${apacheds-version}</version>
- <exclusions>
- <exclusion>
- <groupId>org.apache.directory.api</groupId>
- <artifactId>api-ldap-schema-data</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.directory.server</groupId>
<artifactId>apacheds-all</artifactId>
<exclusions>
<exclusion>
http://git-wip-us.apache.org/repos/asf/knox/blob/41952dd3/gateway-test/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-test/pom.xml b/gateway-test/pom.xml
index 24e894b..f8576e6 100644
--- a/gateway-test/pom.xml
+++ b/gateway-test/pom.xml
@@ -109,20 +109,6 @@
<scope>test</scope>
</dependency>
- <!--
- <dependency>
- <groupId>org.apache.directory.server</groupId>
- <artifactId>apacheds-all</artifactId>
- <scope>test</scope>
- <exclusions>
- <exclusion>
- <groupId>org.apache.directory.shared</groupId>
- <artifactId>shared-ldap-schema</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- -->
-
<dependency>
<groupId>org.apache.velocity</groupId>
<artifactId>velocity</artifactId>
[20/25] knox git commit: Merge branch 'master' into
KNOX-998-Package_Restructuring
Posted by mo...@apache.org.
Merge branch 'master' into KNOX-998-Package_Restructuring
# Conflicts:
# gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryService.java
# gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryServiceFactory.java
# gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleDirectoryServiceFactory.java
# gateway-server/src/main/java/org/apache/knox/gateway/GatewayFilter.java
# gateway-server/src/test/java/org/apache/knox/gateway/GatewayFilterTest.java
# gateway-server/src/test/java/org/apache/knox/gateway/services/topology/DefaultTopologyServiceTest.java
# gateway-service-admin/src/main/java/org/apache/knox/gateway/service/admin/TopologiesResource.java
# gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminTopologyFuncTest.java
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/c754cc06
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/c754cc06
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/c754cc06
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: c754cc06ac33c7cfff28c47ec562d888241c2641
Parents: 9577842 11ec78a
Author: Sandeep More <mo...@apache.org>
Authored: Wed Nov 1 17:10:14 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Wed Nov 1 17:10:14 2017 -0400
----------------------------------------------------------------------
gateway-demo-ldap/pom.xml | 36 +-
.../security/ldap/BaseDirectoryService.java | 2323 ------------------
.../ldap/BaseDirectoryServiceFactory.java | 290 ---
.../security/ldap/SimpleDirectoryService.java | 6 +-
.../ldap/SimpleDirectoryServiceFactory.java | 34 -
.../ldap/SimpleLdapDirectoryServer.java | 38 +-
.../ambari/AmbariServiceDiscovery.java | 3 +-
.../filter/RegexIdentityAssertionFilter.java | 4 +-
.../regex/filter/RegexTemplate.java | 12 +-
.../regex/filter/RegexTemplateTest.java | 23 +-
.../webappsec/filter/StrictTranportFilter.java | 137 ++
.../webappsec/deploy/WebAppSecContributor.java | 11 +
.../webappsec/StrictTranportFilterTest.java | 164 ++
.../home/conf/topologies/manager.xml | 1 +
gateway-release/home/templates/sandbox-apps.xml | 1 +
.../org/apache/knox/gateway/GatewayFilter.java | 65 +-
.../apache/knox/gateway/GatewayMessages.java | 34 +-
.../gateway/config/impl/GatewayConfigImpl.java | 3 +-
.../topology/impl/DefaultTopologyService.java | 221 +-
.../builder/BeanPropertyTopologyBuilder.java | 11 +
.../xml/KnoxFormatXmlTopologyRules.java | 2 +
.../src/main/resources/conf/topology-v1.xsd | 1 +
.../apache/knox/gateway/GatewayFilterTest.java | 49 +
.../topology/DefaultTopologyServiceTest.java | 404 ++-
.../topology/file/provider-config-one.xml | 74 +
.../topology/file/simple-descriptor-five.json | 14 +
.../topology/file/simple-descriptor-six.json | 18 +
.../service/admin/HrefListingMarshaller.java | 75 +
.../service/admin/TopologiesResource.java | 393 ++-
.../service/admin/beans/BeanConverter.java | 2 +
.../gateway/service/admin/beans/Topology.java | 11 +
.../services/ambariui/2.2.1/rewrite.xml | 104 +
.../services/ambariui/2.2.1/service.xml | 92 +
.../knox/gateway/i18n/GatewaySpiMessages.java | 10 +-
.../services/topology/TopologyService.java | 33 +-
.../apache/knox/gateway/topology/Topology.java | 9 +
.../gateway/topology/topology_binding-xml.xml | 5 +-
gateway-test-release/pom.xml | 11 -
gateway-test/pom.xml | 14 -
.../gateway/GatewayAdminTopologyFuncTest.java | 586 +++++
pom.xml | 8 +-
41 files changed, 2495 insertions(+), 2837 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-demo-ldap/pom.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryService.java
----------------------------------------------------------------------
diff --cc gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryService.java
index 53add76,0000000..e69de29
mode 100644,000000..100644
--- a/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryService.java
+++ b/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryService.java
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryServiceFactory.java
----------------------------------------------------------------------
diff --cc gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryServiceFactory.java
index aed78bf,0000000..e69de29
mode 100644,000000..100644
--- a/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryServiceFactory.java
+++ b/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/BaseDirectoryServiceFactory.java
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleDirectoryService.java
----------------------------------------------------------------------
diff --cc gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleDirectoryService.java
index 69cdb3c,0000000..4e843a5
mode 100644,000000..100644
--- a/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleDirectoryService.java
+++ b/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleDirectoryService.java
@@@ -1,29 -1,0 +1,33 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.security.ldap;
+
- public class SimpleDirectoryService extends BaseDirectoryService {
++import org.apache.directory.server.core.DefaultDirectoryService;
++
++public class SimpleDirectoryService extends DefaultDirectoryService {
+
+ public SimpleDirectoryService() throws Exception {
++ super();
+ }
+
++ @Override
+ protected void showSecurityWarnings() throws Exception {
+ // NoOp - This prevents confusing warnings from being output.
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleDirectoryServiceFactory.java
----------------------------------------------------------------------
diff --cc gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleDirectoryServiceFactory.java
index a25355b,0000000..e69de29
mode 100644,000000..100644
--- a/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleDirectoryServiceFactory.java
+++ b/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleDirectoryServiceFactory.java
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleLdapDirectoryServer.java
----------------------------------------------------------------------
diff --cc gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleLdapDirectoryServer.java
index 9f59e9b,0000000..4809f19
mode 100644,000000..100644
--- a/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleLdapDirectoryServer.java
+++ b/gateway-demo-ldap/src/main/java/org/apache/knox/gateway/security/ldap/SimpleLdapDirectoryServer.java
@@@ -1,124 -1,0 +1,160 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.security.ldap;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.directory.api.ldap.model.entry.DefaultModification;
+import org.apache.directory.api.ldap.model.entry.ModificationOperation;
+import org.apache.directory.api.ldap.model.exception.LdapException;
+import org.apache.directory.api.ldap.model.name.Dn;
+import org.apache.directory.server.core.api.CoreSession;
+import org.apache.directory.server.core.api.DirectoryService;
+import org.apache.directory.server.core.api.partition.Partition;
++import org.apache.directory.server.core.factory.DefaultDirectoryServiceFactory;
+import org.apache.directory.server.core.factory.DirectoryServiceFactory;
++import org.apache.directory.server.core.factory.JdbmPartitionFactory;
++import org.apache.directory.server.core.factory.PartitionFactory;
+import org.apache.directory.server.ldap.LdapServer;
+import org.apache.directory.server.protocol.shared.store.LdifFileLoader;
+import org.apache.directory.server.protocol.shared.transport.TcpTransport;
+import org.apache.directory.server.protocol.shared.transport.Transport;
+import org.apache.log4j.PropertyConfigurator;
++import org.slf4j.Logger;
++import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.net.ServerSocket;
+import java.util.UUID;
+
+public class SimpleLdapDirectoryServer {
+
++ private static final Logger LOG = LoggerFactory.getLogger(SimpleLdapDirectoryServer.class);
++
+ private DirectoryServiceFactory factory;
+
+ private DirectoryService service;
+
+ private LdapServer server;
+
+ public SimpleLdapDirectoryServer( String rootDn, File usersLdif, Transport... transports ) throws Exception {
+ if( !usersLdif.exists() ) {
+ throw new FileNotFoundException( usersLdif.getAbsolutePath() );
+ }
+
- factory = new SimpleDirectoryServiceFactory();
++ DirectoryService directoryService = null;
++ try {
++ // creating the instance here so that
++ // we we can set some properties like accesscontrol, anon access
++ // before starting up the service
++ directoryService = new SimpleDirectoryService();
++
++ // no need to register a shutdown hook during tests because this
++ // starts a lot of threads and slows down test execution
++ directoryService.setShutdownHookEnabled( false );
++ } catch ( Exception e ) {
++ throw new RuntimeException( e );
++ }
++
++ PartitionFactory partitionFactory = null;
++ try {
++ String typeName = System.getProperty( "apacheds.partition.factory" );
++
++ if ( typeName != null ) {
++ Class<? extends PartitionFactory> type = ( Class<? extends PartitionFactory> ) Class.forName( typeName );
++ partitionFactory = type.newInstance();
++ } else {
++ partitionFactory = new JdbmPartitionFactory();
++ }
++ } catch ( Exception e ) {
++ LOG.error( "Error instantiating custom partiton factory", e );
++ throw new RuntimeException( e );
++ }
++
++ factory = new DefaultDirectoryServiceFactory( directoryService, partitionFactory );
+ factory.init( UUID.randomUUID().toString() );
+ service = factory.getDirectoryService();
+
+ enabledPosixSchema( service );
+
+ Partition partition = factory.getPartitionFactory().createPartition(
+ service.getSchemaManager(), service.getDnFactory(), "users", rootDn, 500,
+ service.getInstanceLayout().getInstanceDirectory() );
+ service.addPartition( partition );
+
+ CoreSession session = service.getAdminSession();
+ LdifFileLoader lfl = new LdifFileLoader( session, usersLdif, null );
+ lfl.execute();
+
+ server = new LdapServer();
+ server.setTransports( transports );
+ server.setDirectoryService( service );
+ }
+
+ private static void enabledPosixSchema( DirectoryService service ) throws LdapException {
+ service.getSchemaManager().getLoadedSchema( "nis" ).enable();
+ service.getAdminSession().modify(
+ new Dn( "cn=nis,ou=schema" ),
+ new DefaultModification( ModificationOperation.REPLACE_ATTRIBUTE, "m-disabled", "FALSE" ) );
+ }
+
+ public void start() throws Exception {
+ service.startup();
+ server.start();
+ }
+
+ public void stop( boolean clean ) throws Exception {
+ server.stop();
+ service.shutdown();
+ if( clean ) {
+ FileUtils.deleteDirectory( service.getInstanceLayout().getInstanceDirectory() );
+ }
+ }
+
+ public static void main( String[] args ) throws Exception {
+ PropertyConfigurator.configure( System.getProperty( "log4j.configuration" ) );
+
+ SimpleLdapDirectoryServer ldap;
+
+ File file;
+ if ( args.length < 1 ) {
+ file = new File( "conf/users.ldif" );
+ } else {
+ File dir = new File( args[0] );
+ if( !dir.exists() || !dir.isDirectory() ) {
+ throw new FileNotFoundException( dir.getAbsolutePath() );
+ }
+ file = new File( dir, "users.ldif" );
+ }
+
+ if( !file.exists() || !file.canRead() ) {
+ throw new FileNotFoundException( file.getAbsolutePath() );
+ }
+
+ int port = 33389;
+
+ // Make sure the port is free.
+ ServerSocket socket = new ServerSocket( port );
+ socket.close();
+
+ TcpTransport transport = new TcpTransport( port );
+ ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", file, transport );
+ ldap.start();
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --cc gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
index 70af903,0000000..dbc783d
mode 100644,000000..100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
@@@ -1,305 -1,0 +1,306 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.knox.gateway.config.ConfigurationException;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.services.security.AliasServiceException;
+import org.apache.knox.gateway.topology.discovery.GatewayService;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.message.BasicHeader;
+import org.apache.http.util.EntityUtils;
+
+
+class AmbariServiceDiscovery implements ServiceDiscovery {
+
+ static final String TYPE = "AMBARI";
+
+ static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
+
+ static final String AMBARI_HOSTROLES_URI =
+ AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
+
+ static final String AMBARI_SERVICECONFIGS_URI =
+ AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
+
+ private static final String COMPONENT_CONFIG_MAPPING_FILE =
+ "ambari-service-discovery-component-config-mapping.properties";
+
+ private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+ // Map of component names to service configuration types
+ private static Map<String, String> componentServiceConfigs = new HashMap<>();
+ static {
+ try {
+ Properties configMapping = new Properties();
+ configMapping.load(AmbariServiceDiscovery.class.getClassLoader().getResourceAsStream(COMPONENT_CONFIG_MAPPING_FILE));
+ for (String componentName : configMapping.stringPropertyNames()) {
+ componentServiceConfigs.put(componentName, configMapping.getProperty(componentName));
+ }
+ } catch (Exception e) {
+ log.failedToLoadServiceDiscoveryConfiguration(COMPONENT_CONFIG_MAPPING_FILE, e);
+ }
+ }
+
+ private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
+ private static final String DEFAULT_PWD_ALIAS = "ambari.discovery.password";
+
+ @GatewayService
+ private AliasService aliasService;
+
+ private CloseableHttpClient httpClient = null;
+
+
+ AmbariServiceDiscovery() {
+ httpClient = org.apache.http.impl.client.HttpClients.createDefault();
+ }
+
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+
+ @Override
+ public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+ Map<String, Cluster> clusters = new HashMap<String, Cluster>();
+
+ String discoveryAddress = config.getAddress();
+
+ // Invoke Ambari REST API to discover the available clusters
+ String clustersDiscoveryURL = String.format("%s" + AMBARI_CLUSTERS_URI, discoveryAddress);
+
+ JSONObject json = invokeREST(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
+
+ // Parse the cluster names from the response, and perform the cluster discovery
+ JSONArray clusterItems = (JSONArray) json.get("items");
+ for (Object clusterItem : clusterItems) {
+ String clusterName = (String) ((JSONObject)((JSONObject) clusterItem).get("Clusters")).get("cluster_name");
+ try {
+ Cluster c = discover(config, clusterName);
+ clusters.put(clusterName, c);
+ } catch (Exception e) {
+ log.clusterDiscoveryError(clusterName, e);
+ }
+ }
+
+ return clusters;
+ }
+
+
+ @Override
+ public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+ AmbariCluster cluster = new AmbariCluster(clusterName);
+
+ Map<String, String> serviceComponents = new HashMap<>();
+
+ String discoveryAddress = config.getAddress();
+ String discoveryUser = config.getUser();
+ String discoveryPwdAlias = config.getPasswordAlias();
+
+ Map<String, List<String>> componentHostNames = new HashMap<>();
+ String hostRolesURL = String.format("%s" + AMBARI_HOSTROLES_URI, discoveryAddress, clusterName);
+ JSONObject hostRolesJSON = invokeREST(hostRolesURL, discoveryUser, discoveryPwdAlias);
+ if (hostRolesJSON != null) {
+ // Process the host roles JSON
+ JSONArray items = (JSONArray) hostRolesJSON.get("items");
+ for (Object obj : items) {
+ JSONArray components = (JSONArray) ((JSONObject) obj).get("components");
+ for (Object component : components) {
+ JSONArray hostComponents = (JSONArray) ((JSONObject) component).get("host_components");
+ for (Object hostComponent : hostComponents) {
+ JSONObject hostRoles = (JSONObject) ((JSONObject) hostComponent).get("HostRoles");
+ String serviceName = (String) hostRoles.get("service_name");
+ String componentName = (String) hostRoles.get("component_name");
+
+ serviceComponents.put(componentName, serviceName);
+
+ // Assuming public host name is more applicable than host_name
+ String hostName = (String) hostRoles.get("public_host_name");
+ if (hostName == null) {
+ // Some (even slightly) older versions of Ambari/HDP do not return public_host_name,
+ // so fall back to host_name in those cases.
+ hostName = (String) hostRoles.get("host_name");
+ }
+
+ if (hostName != null) {
+ log.discoveredServiceHost(serviceName, hostName);
+ if (!componentHostNames.containsKey(componentName)) {
+ componentHostNames.put(componentName, new ArrayList<String>());
+ }
+ componentHostNames.get(componentName).add(hostName);
+ }
+ }
+ }
+ }
+ }
+
+ Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations =
+ new HashMap<String, Map<String, AmbariCluster.ServiceConfiguration>>();
+ String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
+ JSONObject serviceConfigsJSON = invokeREST(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
+ if (serviceConfigsJSON != null) {
+ // Process the service configurations
+ JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
+ for (Object serviceConfig : serviceConfigs) {
+ String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
+ JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
+ for (Object configuration : configurations) {
+ String configType = (String) ((JSONObject) configuration).get("type");
+ String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
+
+ Map<String, String> configProps = new HashMap<String, String>();
+ JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
+ for (String propertyName : configProperties.keySet()) {
+ configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
+ }
+ if (!serviceConfigurations.containsKey(serviceName)) {
+ serviceConfigurations.put(serviceName, new HashMap<String, AmbariCluster.ServiceConfiguration>());
+ }
+ serviceConfigurations.get(serviceName).put(configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+ cluster.addServiceConfiguration(serviceName, configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+ }
+ }
+ }
+
+ // Construct the AmbariCluster model
+ for (String componentName : serviceComponents.keySet()) {
+ String serviceName = serviceComponents.get(componentName);
+ List<String> hostNames = componentHostNames.get(componentName);
+
+ Map<String, AmbariCluster.ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+ String configType = componentServiceConfigs.get(componentName);
+ if (configType != null) {
+ AmbariCluster.ServiceConfiguration svcConfig = configs.get(configType);
+ AmbariComponent c = new AmbariComponent(componentName,
+ svcConfig.getVersion(),
+ clusterName,
+ serviceName,
+ hostNames,
+ svcConfig.getProperties());
+ cluster.addComponent(c);
+ }
+ }
+
+ return cluster;
+ }
+
+
+ protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+ JSONObject result = null;
+
+ CloseableHttpResponse response = null;
+ try {
+ HttpGet request = new HttpGet(url);
+
+ // If no configured username, then use default username alias
+ String password = null;
+ if (username == null) {
+ if (aliasService != null) {
+ try {
+ char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
+ if (defaultUser != null) {
+ username = new String(defaultUser);
+ }
+ } catch (AliasServiceException e) {
+ log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
+ }
+ }
+
+ // If username is still null
+ if (username == null) {
+ log.aliasServiceUserNotFound();
+ throw new ConfigurationException("No username is configured for Ambari service discovery.");
+ }
+ }
+
+ if (aliasService != null) {
- // If not password alias is configured, then try the default alias
++ // If no password alias is configured, then try the default alias
+ if (passwordAlias == null) {
+ passwordAlias = DEFAULT_PWD_ALIAS;
+ }
++
+ try {
+ char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
+ if (pwd != null) {
+ password = new String(pwd);
+ }
+
+ } catch (AliasServiceException e) {
+ log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
+ }
+ }
+
+ // If the password could not be determined
+ if (password == null) {
+ log.aliasServicePasswordNotFound();
+ throw new ConfigurationException("No password is configured for Ambari service discovery.");
+ }
+
+ // Add an auth header if credentials are available
+ String encodedCreds =
+ org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
+ request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
+
+ response = httpClient.execute(request);
+
+ if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
+ HttpEntity entity = response.getEntity();
+ if (entity != null) {
+ result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
+ log.debugJSON(result.toJSONString());
+ } else {
+ log.noJSON(url);
+ }
+ } else {
+ log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
+ }
+
+ } catch (IOException e) {
+ log.restInvocationError(url, e);
+ } finally {
+ if(response != null) {
+ try {
+ response.close();
+ } catch (IOException e) {
+ // Ignore
+ }
+ }
+ }
+ return result;
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-provider-identity-assertion-regex/src/main/java/org/apache/knox/gateway/identityasserter/regex/filter/RegexIdentityAssertionFilter.java
----------------------------------------------------------------------
diff --cc gateway-provider-identity-assertion-regex/src/main/java/org/apache/knox/gateway/identityasserter/regex/filter/RegexIdentityAssertionFilter.java
index 4cc86ae,0000000..3c9cf11
mode 100644,000000..100644
--- a/gateway-provider-identity-assertion-regex/src/main/java/org/apache/knox/gateway/identityasserter/regex/filter/RegexIdentityAssertionFilter.java
+++ b/gateway-provider-identity-assertion-regex/src/main/java/org/apache/knox/gateway/identityasserter/regex/filter/RegexIdentityAssertionFilter.java
@@@ -1,88 -1,0 +1,90 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.identityasserter.regex.filter;
+
+import javax.security.auth.Subject;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+
+import org.apache.knox.gateway.identityasserter.common.filter.CommonIdentityAssertionFilter;
+import org.apache.knox.gateway.security.principal.PrincipalMappingException;
+
+import java.util.Map;
+import java.util.StringTokenizer;
+import java.util.TreeMap;
++import java.lang.Boolean;
+
+public class RegexIdentityAssertionFilter extends
+ CommonIdentityAssertionFilter {
+
+ private String input = null;
+ private String output = null;
+ private Map<String,String> dict;
+ RegexTemplate template;
+
+ @Override
+ public void init(FilterConfig filterConfig) throws ServletException {
+ super.init(filterConfig);
+ try {
+ input = filterConfig.getInitParameter( "input" );
+ if( input == null ) {
+ input = "";
+ }
+ output = filterConfig.getInitParameter( "output" );
+ if( output == null ) {
+ output = "";
+ }
+ dict = loadDictionary( filterConfig.getInitParameter( "lookup" ) );
- template = new RegexTemplate( input, output, dict );
++ boolean useOriginalOnLookupFailure = Boolean.parseBoolean(filterConfig.getInitParameter("use.original.on.lookup.failure"));
++ template = new RegexTemplate( input, output, dict, useOriginalOnLookupFailure);
+ } catch ( PrincipalMappingException e ) {
+ throw new ServletException( e );
+ }
+ }
+
+ public String[] mapGroupPrincipals(String mappedPrincipalName, Subject subject) {
+ // Returning null will allow existing Subject group principals to remain the same
+ return null;
+ }
+
+ public String mapUserPrincipal(String principalName) {
+ return template.apply( principalName );
+ }
+
+ private Map<String, String> loadDictionary( String config ) throws PrincipalMappingException {
+ Map<String,String> dict = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+ if( config != null && !config.isEmpty() ) {
+ try {
+ StringTokenizer t = new StringTokenizer( config, ";" );
+ while( t.hasMoreTokens() ) {
+ String nvp = t.nextToken();
+ String[] a = nvp.split( "=" );
+ dict.put( a[0].trim(), a[1].trim() );
+ }
+ return dict;
+ } catch( Exception e ) {
+ dict.clear();
+ throw new PrincipalMappingException(
+ "Unable to load lookup dictionary from provided configuration: " + config +
+ ". No principal mapping will be provided.", e );
+ }
+ }
+ return dict;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-provider-identity-assertion-regex/src/main/java/org/apache/knox/gateway/identityasserter/regex/filter/RegexTemplate.java
----------------------------------------------------------------------
diff --cc gateway-provider-identity-assertion-regex/src/main/java/org/apache/knox/gateway/identityasserter/regex/filter/RegexTemplate.java
index e8f108e,0000000..659d3df
mode 100644,000000..100644
--- a/gateway-provider-identity-assertion-regex/src/main/java/org/apache/knox/gateway/identityasserter/regex/filter/RegexTemplate.java
+++ b/gateway-provider-identity-assertion-regex/src/main/java/org/apache/knox/gateway/identityasserter/regex/filter/RegexTemplate.java
@@@ -1,75 -1,0 +1,79 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.identityasserter.regex.filter;
+
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class RegexTemplate {
+
+ private static Pattern directPattern = Pattern.compile( "\\{(\\[?\\d+?\\]?)\\}" );
+ private static Pattern indirectPattern = Pattern.compile( "\\[(\\d+?)\\]" );
+
+ Pattern inputPattern;
+ String outputTemplate;
+ Map<String,String> lookupTable;
++ boolean useOriginalOnLookupFailure;
+
+ public RegexTemplate( String regex, String template ) {
- this( regex, template, null );
++ this( regex, template, null, false );
+ }
+
- public RegexTemplate( String regex, String template, Map<String,String> map ) {
++ public RegexTemplate( String regex, String template, Map<String,String> map, boolean useOriginalOnLookupFailure ) {
+ this.inputPattern = Pattern.compile( regex );
+ this.outputTemplate = template;
+ this.lookupTable = map;
++ this.useOriginalOnLookupFailure = useOriginalOnLookupFailure;
+ }
+
+ public String apply( String input ) {
+ String output = outputTemplate;
+ Matcher inputMatcher = inputPattern.matcher( input );
+ if( inputMatcher.find() ) {
+ output = expandTemplate( inputMatcher, output );
+ }
+ return output;
+ }
+
+ private String expandTemplate( Matcher inputMatcher, String output ) {
+ Matcher directMatcher = directPattern.matcher( output );
+ while( directMatcher.find() ) {
++ String lookupKey = null;
+ String lookupValue = null;
+ String lookupStr = directMatcher.group( 1 );
+ Matcher indirectMatcher = indirectPattern.matcher( lookupStr );
+ if( indirectMatcher.find() ) {
+ lookupStr = indirectMatcher.group( 1 );
+ int lookupIndex = Integer.parseInt( lookupStr );
+ if( lookupTable != null ) {
- String lookupKey = inputMatcher.group( lookupIndex );
++ lookupKey = inputMatcher.group( lookupIndex );
+ lookupValue = lookupTable.get( lookupKey );
+ }
+ } else {
+ int lookupIndex = Integer.parseInt( lookupStr );
+ lookupValue = inputMatcher.group( lookupIndex );
+ }
- output = directMatcher.replaceFirst( lookupValue == null ? "" : lookupValue );
++ String replaceWith = this.useOriginalOnLookupFailure ? lookupKey : "" ;
++ output = directMatcher.replaceFirst( lookupValue == null ? replaceWith : lookupValue );
+ directMatcher = directPattern.matcher( output );
+ }
+ return output;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-provider-identity-assertion-regex/src/test/java/org/apache/knox/gateway/identityasserter/regex/filter/RegexTemplateTest.java
----------------------------------------------------------------------
diff --cc gateway-provider-identity-assertion-regex/src/test/java/org/apache/knox/gateway/identityasserter/regex/filter/RegexTemplateTest.java
index 3c3b06f,0000000..49630be
mode 100644,000000..100644
--- a/gateway-provider-identity-assertion-regex/src/test/java/org/apache/knox/gateway/identityasserter/regex/filter/RegexTemplateTest.java
+++ b/gateway-provider-identity-assertion-regex/src/test/java/org/apache/knox/gateway/identityasserter/regex/filter/RegexTemplateTest.java
@@@ -1,72 -1,0 +1,93 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.identityasserter.regex.filter;
+
+import org.junit.Test;
+
+import java.util.Map;
+import java.util.TreeMap;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.core.Is.is;
+
+public class RegexTemplateTest {
+
+ @Test
+ public void testExtractUsernameFromEmailAddress() {
+
+ RegexTemplate template;
+ String actual;
+
+ template = new RegexTemplate( "(.*)@.*", "prefix_{1}_suffix" );
+ actual = template.apply( "member@apache.org" );
+ assertThat( actual, is( "prefix_member_suffix" ) );
+
+ template = new RegexTemplate( "(.*)@.*", "prefix_{0}_suffix" );
+ actual = template.apply( "member@apache.org" );
+ assertThat( actual, is( "prefix_member@apache.org_suffix" ) );
+
+ template = new RegexTemplate( "(.*)@.*", "prefix_{1}_{a}_suffix" );
+ actual = template.apply( "member@apache.org" );
+ assertThat( actual, is( "prefix_member_{a}_suffix" ) );
+
+ }
+
+ @Test
+ public void testExtractUsernameFromEmailAddressAndMapDomain() {
+
+ RegexTemplate template;
+ Map<String,String> map = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
+ map.put( "us", "USA" );
+ map.put( "ca", "CANADA" );
+
+ String actual;
+
- template = new RegexTemplate( "(.*)@(.*?)\\..*", "prefix_{1}:{[2]}_suffix", map );
++ template = new RegexTemplate( "(.*)@(.*?)\\..*", "prefix_{1}:{[2]}_suffix", map, false );
+ actual = template.apply( "member@us.apache.org" );
+ assertThat( actual, is( "prefix_member:USA_suffix" ) );
+
+ actual = template.apply( "member@ca.apache.org" );
+ assertThat( actual, is( "prefix_member:CANADA_suffix" ) );
+
+ actual = template.apply( "member@nj.apache.org" );
+ assertThat( actual, is( "prefix_member:_suffix" ) );
+
+ }
+
++ @Test
++ public void testLookupFailure() {
++
++ RegexTemplate template;
++ Map<String,String> map = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
++ map.put( "us", "USA" );
++ map.put( "ca", "CANADA" );
++
++ String actual;
++
++ template = new RegexTemplate( "(.*)@(.*?)\\..*", "prefix_{1}:{[2]}_suffix", map, true );
++ actual = template.apply( "member@us.apache.org" );
++ assertThat( actual, is( "prefix_member:USA_suffix" ) );
++
++ actual = template.apply( "member@ca.apache.org" );
++ assertThat( actual, is( "prefix_member:CANADA_suffix" ) );
++
++ actual = template.apply( "member@nj.apache.org" );
++ assertThat( actual, is( "prefix_member:nj_suffix" ) );
++
++ }
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/deploy/WebAppSecContributor.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/deploy/WebAppSecContributor.java
index a182b37,0000000..17fb8c2
mode 100644,000000..100644
--- a/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/deploy/WebAppSecContributor.java
+++ b/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/deploy/WebAppSecContributor.java
@@@ -1,107 -1,0 +1,118 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.webappsec.deploy;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.knox.gateway.deploy.DeploymentContext;
+import org.apache.knox.gateway.deploy.ProviderDeploymentContributorBase;
+import org.apache.knox.gateway.descriptor.FilterParamDescriptor;
+import org.apache.knox.gateway.descriptor.ResourceDescriptor;
+import org.apache.knox.gateway.topology.Provider;
+import org.apache.knox.gateway.topology.Service;
+
+public class WebAppSecContributor extends
+ ProviderDeploymentContributorBase {
+ private static final String ROLE = "webappsec";
+ private static final String NAME = "WebAppSec";
+ private static final String CSRF_SUFFIX = "_CSRF";
+ private static final String CSRF_FILTER_CLASSNAME = "org.apache.knox.gateway.webappsec.filter.CSRFPreventionFilter";
+ private static final String CSRF_ENABLED = "csrf.enabled";
+ private static final String CORS_SUFFIX = "_CORS";
+ private static final String CORS_FILTER_CLASSNAME = "com.thetransactioncompany.cors.CORSFilter";
+ private static final String CORS_ENABLED = "cors.enabled";
+ private static final String XFRAME_OPTIONS_SUFFIX = "_XFRAMEOPTIONS";
+ private static final String XFRAME_OPTIONS_FILTER_CLASSNAME = "org.apache.knox.gateway.webappsec.filter.XFrameOptionsFilter";
+ private static final String XFRAME_OPTIONS_ENABLED = "xframe.options.enabled";
++ private static final String STRICT_TRANSPORT_SUFFIX = "_STRICTTRANSPORT";
++ private static final String STRICT_TRANSPORT_FILTER_CLASSNAME = "org.apache.hadoop.gateway.webappsec.filter.StrictTranportFilter";
++ private static final String STRICT_TRANSPORT_ENABLED = "strict.transport.enabled";
+
+
+ @Override
+ public String getRole() {
+ return ROLE;
+ }
+
+ @Override
+ public String getName() {
+ return NAME;
+ }
+
+ @Override
+ public void initializeContribution(DeploymentContext context) {
+ super.initializeContribution(context);
+ }
+
+ @Override
+ public void contributeFilter(DeploymentContext context, Provider provider, Service service,
+ ResourceDescriptor resource, List<FilterParamDescriptor> params) {
+
+ Provider webappsec = context.getTopology().getProvider(ROLE, NAME);
+ if (webappsec != null && webappsec.isEnabled()) {
+ Map<String,String> map = provider.getParams();
+ if (params == null) {
+ params = new ArrayList<FilterParamDescriptor>();
+ }
+
+ Map<String, String> providerParams = provider.getParams();
+ // CORS support
+ String corsEnabled = map.get(CORS_ENABLED);
+ if ( corsEnabled != null && "true".equals(corsEnabled)) {
+ provisionConfig(resource, providerParams, params, "cors.");
+ resource.addFilter().name( getName() + CORS_SUFFIX ).role( getRole() ).impl( CORS_FILTER_CLASSNAME ).params( params );
+ }
+
+ // CRSF
+ params = new ArrayList<FilterParamDescriptor>();
+ String csrfEnabled = map.get(CSRF_ENABLED);
+ if ( csrfEnabled != null && "true".equals(csrfEnabled)) {
+ provisionConfig(resource, providerParams, params, "csrf.");
+ resource.addFilter().name( getName() + CSRF_SUFFIX ).role( getRole() ).impl( CSRF_FILTER_CLASSNAME ).params( params );
+ }
+
+ // X-Frame-Options - clickjacking protection
+ params = new ArrayList<FilterParamDescriptor>();
+ String xframeOptionsEnabled = map.get(XFRAME_OPTIONS_ENABLED);
+ if ( xframeOptionsEnabled != null && "true".equals(xframeOptionsEnabled)) {
+ provisionConfig(resource, providerParams, params, "xframe.");
+ resource.addFilter().name( getName() + XFRAME_OPTIONS_SUFFIX ).role( getRole() ).impl( XFRAME_OPTIONS_FILTER_CLASSNAME ).params( params );
+ }
++
++ // HTTP Strict-Transport-Security
++ params = new ArrayList<FilterParamDescriptor>();
++ String strictTranportEnabled = map.get(STRICT_TRANSPORT_ENABLED);
++ if ( strictTranportEnabled != null && "true".equals(strictTranportEnabled)) {
++ provisionConfig(resource, providerParams, params, "strict.");
++ resource.addFilter().name( getName() + STRICT_TRANSPORT_SUFFIX).role( getRole() ).impl(STRICT_TRANSPORT_FILTER_CLASSNAME).params( params );
++ }
+ }
+ }
+
+ private void provisionConfig(ResourceDescriptor resource, Map<String,String> providerParams,
+ List<FilterParamDescriptor> params, String prefix) {
+ for(Entry<String, String> entry : providerParams.entrySet()) {
+ if (entry.getKey().startsWith(prefix)) {
+ params.add( resource.createFilterParam().name( entry.getKey().toLowerCase() ).value( entry.getValue() ) );
+ }
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-release/home/conf/topologies/manager.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/c754cc06/gateway-server/src/main/java/org/apache/knox/gateway/GatewayFilter.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/GatewayFilter.java
index 5d7c5db,0000000..8dd29bf
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/GatewayFilter.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/GatewayFilter.java
@@@ -1,390 -1,0 +1,453 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway;
+
+import org.apache.knox.gateway.audit.api.Action;
+import org.apache.knox.gateway.audit.api.ActionOutcome;
+import org.apache.knox.gateway.audit.api.AuditContext;
+import org.apache.knox.gateway.audit.api.AuditService;
+import org.apache.knox.gateway.audit.api.AuditServiceFactory;
+import org.apache.knox.gateway.audit.api.Auditor;
+import org.apache.knox.gateway.audit.api.CorrelationContext;
+import org.apache.knox.gateway.audit.api.CorrelationServiceFactory;
+import org.apache.knox.gateway.audit.api.ResourceType;
+import org.apache.knox.gateway.audit.log4j.audit.AuditConstants;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.filter.AbstractGatewayFilter;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.i18n.resources.ResourcesFactory;
++import org.apache.knox.gateway.topology.Topology;
+import org.apache.knox.gateway.util.urltemplate.Matcher;
+import org.apache.knox.gateway.util.urltemplate.Parser;
+import org.apache.knox.gateway.util.urltemplate.Template;
+
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
++import javax.servlet.http.HttpServletRequestWrapper;
+import javax.servlet.http.HttpServletResponse;
+
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Enumeration;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+
+/**
+ *
+ */
+public class GatewayFilter implements Filter {
+
+ private static final FilterChain EMPTY_CHAIN = new FilterChain() {
+ public void doFilter( ServletRequest servletRequest, ServletResponse servletResponse ) throws IOException, ServletException {
+ }
+ };
+
+ private static final GatewayMessages LOG = MessagesFactory.get( GatewayMessages.class );
+ private static final GatewayResources RES = ResourcesFactory.get( GatewayResources.class );
+ private static AuditService auditService = AuditServiceFactory.getAuditService();
+ private static Auditor auditor = auditService.getAuditor(
+ AuditConstants.DEFAULT_AUDITOR_NAME, AuditConstants.KNOX_SERVICE_NAME,
+ AuditConstants.KNOX_COMPONENT_NAME );
+
+ private Set<Holder> holders;
+ private Matcher<Chain> chains;
+ private FilterConfig config;
+
+ public GatewayFilter() {
+ holders = new HashSet<>();
+ chains = new Matcher<Chain>();
+ }
+
+ @Override
+ public void init( FilterConfig filterConfig ) throws ServletException {
+ this.config = filterConfig;
+ }
+
+ @Override
+ public void doFilter( ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain ) throws IOException, ServletException {
+ doFilter( servletRequest, servletResponse );
+ if( filterChain != null ) {
+ filterChain.doFilter( servletRequest, servletResponse );
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public void doFilter( ServletRequest servletRequest, ServletResponse servletResponse ) throws IOException, ServletException {
+ HttpServletRequest httpRequest = (HttpServletRequest)servletRequest;
+ HttpServletResponse httpResponse = (HttpServletResponse)servletResponse;
+
+ //TODO: The resulting pathInfo + query needs to be added to the servlet context somehow so that filters don't need to rebuild it. This is done in HttpClientDispatch right now for example.
+ String servlet = httpRequest.getServletPath();
+ String path = httpRequest.getPathInfo();
+ String query = httpRequest.getQueryString();
+ String requestPath = ( servlet == null ? "" : servlet ) + ( path == null ? "" : path );
+ String requestPathWithQuery = requestPath + ( query == null ? "" : "?" + query );
+
+ Template pathWithQueryTemplate;
+ try {
+ pathWithQueryTemplate = Parser.parseLiteral( requestPathWithQuery );
+ } catch( URISyntaxException e ) {
+ throw new ServletException( e );
+ }
+ String contextWithPathAndQuery = httpRequest.getContextPath() + requestPathWithQuery;
+ LOG.receivedRequest( httpRequest.getMethod(), requestPath );
+
+ servletRequest.setAttribute(
+ AbstractGatewayFilter.SOURCE_REQUEST_URL_ATTRIBUTE_NAME, pathWithQueryTemplate );
+ servletRequest.setAttribute(
+ AbstractGatewayFilter.SOURCE_REQUEST_CONTEXT_URL_ATTRIBUTE_NAME, contextWithPathAndQuery );
+
+ Matcher<Chain>.Match match = chains.match( pathWithQueryTemplate );
-
++
++ // if there was no match then look for a default service for the topology
++ if (match == null) {
++ Topology topology = (Topology) servletRequest.getServletContext().getAttribute("org.apache.hadoop.gateway.topology");
++ if (topology != null) {
++ String defaultServicePath = topology.getDefaultServicePath();
++ if (defaultServicePath != null) {
++ try {
++ String newPathWithQuery = defaultServicePath + "/" + pathWithQueryTemplate;
++ match = chains.match(Parser.parseLiteral(newPathWithQuery));
++ String origUrl = ((HttpServletRequest) servletRequest).getRequestURL().toString();
++ String url = origUrl;
++ if (path.equals("/")) {
++ url += defaultServicePath;
++ }
++ else {
++ int index = origUrl.indexOf(path);
++ url = origUrl.substring(0, index) + "/" + defaultServicePath + path;
++ }
++ String contextPath = defaultServicePath;
++ servletRequest = new ForwardedRequest((HttpServletRequest) servletRequest,
++ contextPath,
++ url);
++ } catch (URISyntaxException e) {
++ throw new ServletException( e );
++ }
++ }
++ }
++ }
++
+ assignCorrelationRequestId();
+ // Populate Audit/correlation parameters
+ AuditContext auditContext = auditService.getContext();
+ auditContext.setTargetServiceName( match == null ? null : match.getValue().getResourceRole() );
+ auditContext.setRemoteIp( getRemoteAddress(servletRequest) );
+ auditContext.setRemoteHostname( servletRequest.getRemoteHost() );
+ auditor.audit(
+ Action.ACCESS, contextWithPathAndQuery, ResourceType.URI,
+ ActionOutcome.UNAVAILABLE, RES.requestMethod(((HttpServletRequest)servletRequest).getMethod()));
+
+ if( match != null ) {
+ Chain chain = match.getValue();
+ servletRequest.setAttribute( AbstractGatewayFilter.TARGET_SERVICE_ROLE, chain.getResourceRole() );
+ try {
+ chain.doFilter( servletRequest, servletResponse );
+ } catch( IOException e ) {
+ LOG.failedToExecuteFilter( e );
+ auditor.audit( Action.ACCESS, contextWithPathAndQuery, ResourceType.URI, ActionOutcome.FAILURE );
+ throw e;
+ } catch( ServletException e ) {
+ LOG.failedToExecuteFilter( e );
+ auditor.audit( Action.ACCESS, contextWithPathAndQuery, ResourceType.URI, ActionOutcome.FAILURE );
+ throw e;
+ } catch( RuntimeException e ) {
+ LOG.failedToExecuteFilter( e );
+ auditor.audit( Action.ACCESS, contextWithPathAndQuery, ResourceType.URI, ActionOutcome.FAILURE );
+ throw e;
+ } catch( ThreadDeath e ) {
+ LOG.failedToExecuteFilter( e );
+ auditor.audit( Action.ACCESS, contextWithPathAndQuery, ResourceType.URI, ActionOutcome.FAILURE );
+ throw e;
+ } catch( Throwable e ) {
+ LOG.failedToExecuteFilter( e );
+ auditor.audit( Action.ACCESS, contextWithPathAndQuery, ResourceType.URI, ActionOutcome.FAILURE );
+ throw new ServletException( e );
+ }
+ } else {
+ LOG.failedToMatchPath( requestPath );
+ httpResponse.setStatus( HttpServletResponse.SC_NOT_FOUND );
+ }
+ //KAM[ Don't do this or the Jetty default servlet will overwrite any response setup by the filter.
+ // filterChain.doFilter( servletRequest, servletResponse );
+ //]
+ }
+
+ private String getRemoteAddress(ServletRequest servletRequest) {
+ GatewayConfig gatewayConfig =
+ (GatewayConfig) servletRequest.getServletContext().
+ getAttribute(GatewayConfig.GATEWAY_CONFIG_ATTRIBUTE);
+
+ String addrHeaderName = gatewayConfig.getHeaderNameForRemoteAddress();
+ String addr = ((HttpServletRequest)servletRequest).getHeader(addrHeaderName);
+ if (addr == null || addr.trim().isEmpty()) {
+ addr = servletRequest.getRemoteAddr();
+ }
+ return addr;
+ }
+
+ @Override
+ public void destroy() {
+ for( Holder holder : holders ) {
+ holder.destroy();
+ }
+ }
+
+ private void addHolder( Holder holder ) {
+ holders.add( holder );
+ Chain chain = chains.get( holder.template );
+ if( chain == null ) {
+ chain = new Chain();
+ chain.setResourceRole( holder.getResourceRole() );
+ chains.add( holder.template, chain );
+ }
+ chain.chain.add( holder );
+ }
+
+ public void addFilter( String path, String name, Filter filter, Map<String,String> params, String resourceRole ) throws URISyntaxException {
+ Holder holder = new Holder( path, name, filter, params, resourceRole );
+ addHolder( holder );
+ }
+
+// public void addFilter( String path, String name, Class<RegexDirFilter> clazz, Map<String,String> params ) throws URISyntaxException {
+// Holder holder = new Holder( path, name, clazz, params );
+// addHolder( holder );
+// }
+
+ public void addFilter( String path, String name, String clazz, Map<String,String> params, String resourceRole ) throws URISyntaxException {
+ Holder holder = new Holder( path, name, clazz, params, resourceRole );
+ addHolder( holder );
+ }
+
+ // Now creating the correlation context only if required since it may be created upstream in the CorrelationHandler.
+ private void assignCorrelationRequestId() {
+ CorrelationContext correlationContext = CorrelationServiceFactory.getCorrelationService().getContext();
+ if( correlationContext == null ) {
+ correlationContext = CorrelationServiceFactory.getCorrelationService().createContext();
+ }
+ String requestId = correlationContext.getRequestId();
+ if( requestId == null ) {
+ correlationContext.setRequestId( UUID.randomUUID().toString() );
+ }
+ }
+
+ private class Chain implements FilterChain {
+
+ private List<Holder> chain;
+ private String resourceRole;
+
+ private Chain() {
+ this.chain = new ArrayList<Holder>();
+ }
+
+ private Chain( List<Holder> chain ) {
+ this.chain = chain;
+ }
+
+ public void doFilter( ServletRequest servletRequest, ServletResponse servletResponse ) throws IOException, ServletException {
+ if( chain != null && !chain.isEmpty() ) {
+ final Filter filter = chain.get( 0 );
+ final FilterChain chain = subChain();
+ filter.doFilter( servletRequest, servletResponse, chain );
+ }
+ }
+
+ private FilterChain subChain() {
+ if( chain != null && chain.size() > 1 ) {
+ return new Chain( chain.subList( 1, chain.size() ) );
+ } else {
+ return EMPTY_CHAIN;
+ }
+ }
+
+ private String getResourceRole() {
+ return resourceRole;
+ }
+
+ private void setResourceRole( String resourceRole ) {
+ this.resourceRole = resourceRole;
+ }
+
+ }
+
+ private class Holder implements Filter, FilterConfig {
+// private String path;
+ private Template template;
+ private String name;
+ private Map<String,String> params;
+ private Filter instance;
+ private Class<? extends Filter> clazz;
+ private String type;
+ private String resourceRole;
+
+ private Holder( String path, String name, Filter filter, Map<String,String> params, String resourceRole ) throws URISyntaxException {
+// this.path = path;
+ this.template = Parser.parseTemplate( path );
+ this.name = name;
+ this.params = params;
+ this.instance = filter;
+ this.clazz = filter.getClass();
+ this.type = clazz.getCanonicalName();
+ this.resourceRole = resourceRole;
+ }
+
+// private Holder( String path, String name, Class<RegexDirFilter> clazz, Map<String,String> params ) throws URISyntaxException {
+// this.path = path;
+// this.template = Parser.parse( path );
+// this.name = name;
+// this.params = params;
+// this.instance = null;
+// this.clazz = clazz;
+// this.type = clazz.getCanonicalName();
+// }
+
+ private Holder( String path, String name, String clazz, Map<String,String> params, String resourceRole ) throws URISyntaxException {
+// this.path = path;
+ this.template = Parser.parseTemplate( path );
+ this.name = name;
+ this.params = params;
+ this.instance = null;
+ this.clazz = null;
+ this.type = clazz;
+ this.resourceRole = resourceRole;
+ }
+
+ @Override
+ public String getFilterName() {
+ return name;
+ }
+
+ @Override
+ public ServletContext getServletContext() {
+ return GatewayFilter.this.config.getServletContext();
+ }
+
+ @Override
+ public String getInitParameter( String name ) {
+ String value = null;
+ if( params != null ) {
+ value = params.get( name );
+ }
+ return value;
+ }
+
+ @Override
+ public Enumeration<String> getInitParameterNames() {
+ Enumeration<String> names = null;
+ if( params != null ) {
+ names = Collections.enumeration( params.keySet() );
+ }
+ return names;
+ }
+
+ @Override
+ public void init( FilterConfig filterConfig ) throws ServletException {
+ getInstance().init( filterConfig );
+ }
+
+ @Override
+ public void doFilter( ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain ) throws IOException, ServletException {
+ final Filter filter = getInstance();
+ filter.doFilter( servletRequest, servletResponse, filterChain );
+ }
+
+ @Override
+ public void destroy() {
+ if( instance != null ) {
+ instance.destroy();
+ instance = null;
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private Class<? extends Filter> getClazz() throws ClassNotFoundException {
+ if( clazz == null ) {
+ ClassLoader loader = Thread.currentThread().getContextClassLoader();
+ if( loader == null ) {
+ loader = this.getClass().getClassLoader();
+ }
+ clazz = (Class)loader.loadClass( type );
+ }
+ return clazz;
+ }
+
+ private Filter getInstance() throws ServletException {
+ if( instance == null ) {
+ try {
+ if( clazz == null ) {
+ clazz = getClazz();
+ }
+ instance = clazz.newInstance();
+ instance.init( this );
+ } catch( Exception e ) {
+ throw new ServletException( e );
+ }
+ }
+ return instance;
+ }
+
+ private String getResourceRole() {
+ return resourceRole;
+ }
+
+ }
+
++ /**
++ * A request wrapper class that wraps a request and adds the context path if
++ * needed.
++ */
++ static class ForwardedRequest extends HttpServletRequestWrapper {
++
++ private String newURL;
++ private String contextpath;
++
++ public ForwardedRequest(final HttpServletRequest request,
++ final String contextpath, final String newURL) {
++ super(request);
++ this.newURL = newURL;
++ this.contextpath = contextpath;
++ }
++
++ @Override
++ public StringBuffer getRequestURL() {
++ return new StringBuffer(newURL);
++ }
++
++ @Override
++ public String getRequestURI() {
++ return newURL;
++ }
++
++ @Override
++ public String getContextPath() {
++ return super.getContextPath() + "/" + this.contextpath;
++ }
++
++ }
+}
[02/25] knox git commit: KNOX-1049 - change defaultServicePath to path
Posted by mo...@apache.org.
KNOX-1049 - change defaultServicePath to path
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/222385bf
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/222385bf
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/222385bf
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 222385bf85bb0c234ca5da981f575ee8db0cd2d3
Parents: 3a41155
Author: Larry McCay <lm...@hortonworks.com>
Authored: Wed Oct 25 13:14:02 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Wed Oct 25 13:14:02 2017 -0400
----------------------------------------------------------------------
.../hadoop/gateway/service/admin/beans/Topology.java | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/222385bf/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/Topology.java
----------------------------------------------------------------------
diff --git a/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/Topology.java b/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/Topology.java
index 8bc5fa7..a197c27 100644
--- a/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/Topology.java
+++ b/gateway-service-admin/src/main/java/org/apache/hadoop/gateway/service/admin/beans/Topology.java
@@ -34,7 +34,7 @@ public class Topology {
private String name;
@XmlElement
- private String defaultServicePath;
+ private String path;
@XmlElement
private long timestamp;
@@ -72,12 +72,12 @@ public class Topology {
return timestamp;
}
- public void setDefaultService( String defaultServicePath ) {
- this.defaultServicePath = defaultServicePath;
+ public void setPath( String defaultServicePath ) {
+ this.path = defaultServicePath;
}
- public String getDefaultService() {
- return defaultServicePath;
+ public String getPath() {
+ return path;
}
public void setTimestamp( long timestamp ) {