You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by km...@apache.org on 2016/02/25 21:54:21 UTC
[1/5] knox git commit: [KNOX-670] - Knox should be able to sost
simple web apps
Repository: knox
Updated Branches:
refs/heads/master 82539e402 -> a70a3b56c
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest/test-apps/minimal-test-app/rewrite.xml
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest/test-apps/minimal-test-app/rewrite.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest/test-apps/minimal-test-app/rewrite.xml
new file mode 100644
index 0000000..656e229
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest/test-apps/minimal-test-app/rewrite.xml
@@ -0,0 +1,17 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<rules/>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest/test-apps/minimal-test-app/service.xml
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest/test-apps/minimal-test-app/service.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest/test-apps/minimal-test-app/service.xml
new file mode 100644
index 0000000..aa16c2c
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest/test-apps/minimal-test-app/service.xml
@@ -0,0 +1,21 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="MINIMAL_TEST_APP" name="minimal-test-app" version="1.0.0">
+ <routes>
+ <route path="/**?**"/>
+ </routes>
+</service>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-util-common/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-util-common/pom.xml b/gateway-util-common/pom.xml
index 736cc6b..1cbe38d 100644
--- a/gateway-util-common/pom.xml
+++ b/gateway-util-common/pom.xml
@@ -48,10 +48,14 @@
</build>
<dependencies>
- <dependency>
- <groupId>com.fasterxml.jackson.core</groupId>
- <artifactId>jackson-databind</artifactId>
- </dependency>
+ <dependency>
+ <groupId>com.fasterxml.jackson.core</groupId>
+ <artifactId>jackson-databind</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>commons-codec</groupId>
+ <artifactId>commons-codec</artifactId>
+ </dependency>
<!-- ********** ********** ********** ********** ********** ********** -->
<!-- ********** Test Dependencies ********** -->
@@ -66,7 +70,7 @@
<groupId>${gateway-group}</groupId>
<artifactId>gateway-i18n</artifactId>
</dependency>
- <dependency>
+ <dependency>
<groupId>org.apache.directory.server</groupId>
<artifactId>apacheds-jdbm</artifactId>
</dependency>
@@ -97,7 +101,7 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
- <dependency>
+ <dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/Urls.java
----------------------------------------------------------------------
diff --git a/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/Urls.java b/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/Urls.java
index 4d31150..f10b0d6 100644
--- a/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/Urls.java
+++ b/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/Urls.java
@@ -17,11 +17,15 @@
*/
package org.apache.hadoop.gateway.util;
+import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import org.apache.commons.codec.DecoderException;
+import org.apache.commons.codec.net.URLCodec;
+
/**
*
*/
@@ -108,4 +112,56 @@ public class Urls {
}
return domain.substring(idx);
}
+
+ public static String encode( String str ) {
+ URLCodec codec = new URLCodec();
+ try {
+ return codec.encode( str, "UTF-8" );
+ } catch( UnsupportedEncodingException e ) {
+ throw new IllegalArgumentException( e );
+ }
+ }
+
+ public static String decode( String str ) {
+ URLCodec codec = new URLCodec();
+ try {
+ return codec.decode( str, "UTF-8" );
+ } catch( UnsupportedEncodingException e ) {
+ throw new IllegalArgumentException( e );
+ } catch( DecoderException e ) {
+ throw new IllegalArgumentException( e );
+ }
+ }
+
+ public static String trimLeadingAndTrailingSlash( String s ) {
+ if( s == null ) {
+ return "";
+ } else {
+ int b = 0;
+ int e = s.length();
+ while( b < e && s.charAt( b ) == '/' ) { b++; }
+ while( e > b && s.charAt( e-1 ) == '/' ) { e--; }
+ return s.substring( b, e );
+ }
+ }
+
+ public static String trimLeadingAndTrailingSlashJoin( String... parts ) {
+ StringBuilder s = new StringBuilder();
+ if( parts != null ) {
+ String p = "";
+ String n = "";
+ for( int i=0; i<parts.length; i++ ) {
+ n = trimLeadingAndTrailingSlash( parts[i] );
+ if( !n.isEmpty() ) {
+ if( !p.isEmpty() ) {
+ s.append( '/' );
+ }
+ s.append( n );
+ p = n;
+ }
+ }
+ }
+ return s.toString();
+ }
+
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/XmlUtils.java
----------------------------------------------------------------------
diff --git a/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/XmlUtils.java b/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/XmlUtils.java
new file mode 100644
index 0000000..1536793
--- /dev/null
+++ b/gateway-util-common/src/main/java/org/apache/hadoop/gateway/util/XmlUtils.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.util;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.Writer;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.OutputKeys;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerException;
+import javax.xml.transform.TransformerFactory;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.transform.stream.StreamResult;
+
+import org.w3c.dom.Document;
+import org.xml.sax.SAXException;
+
+public class XmlUtils {
+
+ public static Document readXml( File file ) throws ParserConfigurationException, IOException, SAXException {
+ DocumentBuilderFactory f = DocumentBuilderFactory.newInstance();
+ DocumentBuilder b = f.newDocumentBuilder();
+ Document d = b.parse( file );
+ return d;
+ }
+
+ public static void writeXml( Document document, Writer writer ) throws TransformerException {
+ TransformerFactory f = TransformerFactory.newInstance();
+// if( f.getClass().getPackage().getName().equals( "com.sun.org.apache.xalan.internal.xsltc.trax" ) ) {
+ f.setAttribute( "indent-number", 4 );
+// }
+ Transformer t = f.newTransformer();
+ t.setOutputProperty( OutputKeys.INDENT, "yes" );
+ t.setOutputProperty( "{xml.apache.org/xslt}indent-amount", "4" );
+ DOMSource s = new DOMSource( document );
+ StreamResult r = new StreamResult( writer );
+ t.transform( s, r );
+ }
+
+ public static void writeXml( Document document, File file ) throws TransformerException, IOException {
+ writeXml( document, new FileWriter( file ) );
+ }
+
+
+ public static Document createDocument() throws ParserConfigurationException {
+ DocumentBuilderFactory f = DocumentBuilderFactory.newInstance();
+ DocumentBuilder b = f.newDocumentBuilder();
+ Document d = b.newDocument();
+ d.setXmlStandalone( true );
+ return d;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-util-common/src/test/java/org/apache/hadoop/gateway/util/UrlsTest.java
----------------------------------------------------------------------
diff --git a/gateway-util-common/src/test/java/org/apache/hadoop/gateway/util/UrlsTest.java b/gateway-util-common/src/test/java/org/apache/hadoop/gateway/util/UrlsTest.java
index f55c79c..ac6ce93 100644
--- a/gateway-util-common/src/test/java/org/apache/hadoop/gateway/util/UrlsTest.java
+++ b/gateway-util-common/src/test/java/org/apache/hadoop/gateway/util/UrlsTest.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.gateway.util;
import org.junit.Assert;
import org.junit.Test;
+import static junit.framework.TestCase.assertEquals;
+
public class UrlsTest {
/**
@@ -54,4 +56,30 @@ public class UrlsTest {
// ip addresses can not be wildcarded - may be a completely different domain
Assert.assertEquals(Urls.getDomainName("http://127.0.0.1", null), null);
}
+
+ @Test
+ public void testTrimLeadingAndTrailingSlash() {
+ assertEquals( "", Urls.trimLeadingAndTrailingSlash( null ) );
+ assertEquals( "", Urls.trimLeadingAndTrailingSlash( "" ) );
+ assertEquals( "", Urls.trimLeadingAndTrailingSlash( "/" ) );
+ assertEquals( "", Urls.trimLeadingAndTrailingSlash( "//" ) );
+ assertEquals( "x", Urls.trimLeadingAndTrailingSlash( "x" ) );
+ assertEquals( "x/x", Urls.trimLeadingAndTrailingSlash( "x/x" ) );
+ assertEquals( "x", Urls.trimLeadingAndTrailingSlash( "/x/" ) );
+ assertEquals( "x/x", Urls.trimLeadingAndTrailingSlash( "x/x" ) );
+ }
+
+ @Test
+ public void testTrimLeadingAndTrailingSlashJoin() throws Exception {
+ assertEquals( "", Urls.trimLeadingAndTrailingSlashJoin( null ) );
+ assertEquals( "", Urls.trimLeadingAndTrailingSlashJoin( "" ) );
+ assertEquals( "", Urls.trimLeadingAndTrailingSlashJoin( "", "" ) );
+ assertEquals( "x", Urls.trimLeadingAndTrailingSlashJoin( "x" ) );
+ assertEquals( "x", Urls.trimLeadingAndTrailingSlashJoin( "x", "" ) );
+ assertEquals( "x", Urls.trimLeadingAndTrailingSlashJoin( "", "x", "" ) );
+ assertEquals( "x/x", Urls.trimLeadingAndTrailingSlashJoin( "", "x", "", "", "x" ) );
+ assertEquals( "x/x", Urls.trimLeadingAndTrailingSlashJoin( null, "x", null, null, "x" ) );
+ assertEquals( "x/y/z", Urls.trimLeadingAndTrailingSlashJoin( "x", "y", "z" ) );
+ }
+
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-util-urltemplate/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-util-urltemplate/pom.xml b/gateway-util-urltemplate/pom.xml
index df91f2e..2f5c309 100644
--- a/gateway-util-urltemplate/pom.xml
+++ b/gateway-util-urltemplate/pom.xml
@@ -64,18 +64,18 @@
<scope>test</scope>
</dependency>
- <!--
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<scope>test</scope>
</dependency>
- -->
+ <!--
<dependency>
<groupId>org.eclipse.jetty.orbit</groupId>
<artifactId>javax.servlet</artifactId>
<scope>test</scope>
</dependency>
+ -->
<dependency>
<groupId>org.hamcrest</groupId>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index c180475..c87486a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -99,7 +99,7 @@
<gateway-version>0.9.0-SNAPSHOT</gateway-version>
<gateway-group>org.apache.knox</gateway-group>
<hadoop-version>2.2.0</hadoop-version>
- <jetty-version>8.1.14.v20131031</jetty-version>
+ <jetty-version>8.1.18.v20150929</jetty-version>
<surefire-version>2.16</surefire-version>
<failsafe-version>2.19.1</failsafe-version>
</properties>
@@ -670,10 +670,18 @@
<version>${jetty-version}</version>
</dependency>
<dependency>
+ <groupId>javax.servlet</groupId>
+ <artifactId>javax.servlet-api</artifactId>
+ <version>3.0.1</version>
+ </dependency>
+
+ <!--
+ <dependency>
<groupId>org.eclipse.jetty.orbit</groupId>
<artifactId>javax.servlet</artifactId>
<version>3.0.0.v201112011016</version>
</dependency>
+ -->
<!--
<dependency>
<groupId>org.eclipse.jetty</groupId>
@@ -719,6 +727,11 @@
<artifactId>httpclient</artifactId>
<version>4.3.6</version>
</dependency>
+ <dependency>
+ <groupId>joda-time</groupId>
+ <artifactId>joda-time</artifactId>
+ <version>2.9.2</version>
+ </dependency>
<!--
<dependency>
@@ -946,22 +959,22 @@
<dependency>
<groupId>org.jboss.shrinkwrap</groupId>
<artifactId>shrinkwrap-api</artifactId>
- <version>1.0.1</version>
+ <version>1.2.3</version>
</dependency>
<dependency>
<groupId>org.jboss.shrinkwrap</groupId>
<artifactId>shrinkwrap-impl-base</artifactId>
- <version>1.0.1</version>
+ <version>1.2.3</version>
</dependency>
<dependency>
<groupId>org.jboss.shrinkwrap.descriptors</groupId>
<artifactId>shrinkwrap-descriptors-api-javaee</artifactId>
- <version>2.0.0-alpha-4</version>
+ <version>2.0.0-alpha-8</version>
</dependency>
<dependency>
<groupId>org.jboss.shrinkwrap.descriptors</groupId>
<artifactId>shrinkwrap-descriptors-impl-javaee</artifactId>
- <version>2.0.0-alpha-4</version>
+ <version>2.0.0-alpha-8</version>
</dependency>
<dependency>
<groupId>org.apache.directory.server</groupId>
@@ -1037,6 +1050,11 @@
<artifactId>gateway-demo-ldap-launcher</artifactId>
<version>${gateway-version}</version>
</dependency>
+ <dependency>
+ <groupId>net.lingala.zip4j</groupId>
+ <artifactId>zip4j</artifactId>
+ <version>1.3.2</version>
+ </dependency>
<!-- ********** ********** ********** ********** ********** ********** -->
<!-- ********** Test Dependencies ********** -->
@@ -1135,6 +1153,7 @@
<scope>test</scope>
</dependency>
+
</dependencies>
</dependencyManagement>
[4/5] knox git commit: [KNOX-670] - Knox should be able to sost
simple web apps
Posted by km...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java
index fd28b8e..4815595 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentFactory.java
@@ -17,49 +17,56 @@
*/
package org.apache.hadoop.gateway.deploy;
-import org.apache.hadoop.gateway.GatewayForwardingServlet;
+import java.beans.Statement;
+import java.io.File;
+import java.io.IOException;
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.ServiceLoader;
+import java.util.Set;
+import java.util.TreeMap;
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Marshaller;
+
import org.apache.hadoop.gateway.GatewayMessages;
import org.apache.hadoop.gateway.GatewayServlet;
import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.deploy.impl.ApplicationDeploymentContributor;
import org.apache.hadoop.gateway.descriptor.GatewayDescriptor;
import org.apache.hadoop.gateway.descriptor.GatewayDescriptorFactory;
import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
import org.apache.hadoop.gateway.services.GatewayServices;
import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
+import org.apache.hadoop.gateway.topology.Application;
import org.apache.hadoop.gateway.topology.Provider;
import org.apache.hadoop.gateway.topology.Service;
+import org.apache.hadoop.gateway.topology.Service;
import org.apache.hadoop.gateway.topology.Topology;
import org.apache.hadoop.gateway.topology.Version;
import org.apache.hadoop.gateway.util.ServiceDefinitionsLoader;
+import org.apache.hadoop.gateway.util.Urls;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.asset.Asset;
import org.jboss.shrinkwrap.api.asset.StringAsset;
+import org.jboss.shrinkwrap.api.spec.EnterpriseArchive;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.jboss.shrinkwrap.descriptor.api.Descriptors;
import org.jboss.shrinkwrap.descriptor.api.webapp30.WebAppDescriptor;
+import org.jboss.shrinkwrap.descriptor.api.webcommon30.FilterType;
import org.jboss.shrinkwrap.descriptor.api.webcommon30.ServletType;
-import javax.xml.bind.JAXBContext;
-import javax.xml.bind.JAXBException;
-import javax.xml.bind.Marshaller;
-import java.beans.Statement;
-import java.io.File;
-import java.io.IOException;
-import java.io.StringWriter;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.ServiceLoader;
-import java.util.Set;
-import java.util.TreeMap;
-
public abstract class DeploymentFactory {
- private static final String DEFAULT_APP_REDIRECT_CONTEXT_PATH = "redirectTo";
+ private static final String SERVLET_NAME_SUFFIX = "-knox-gateway-servlet";
+ private static final String FILTER_NAME_SUFFIX = "-knox-gateway-filter";
private static GatewayMessages log = MessagesFactory.get( GatewayMessages.class );
private static GatewayServices gatewayServices = null;
@@ -78,30 +85,120 @@ public abstract class DeploymentFactory {
DeploymentFactory.gatewayServices = services;
}
- public static WebArchive createDeployment( GatewayConfig config, Topology topology ) {
- DeploymentContext context = null;
- //TODO move the loading of service defs
- String stacks = config.getGatewayServicesDir();
- log.usingServicesDirectory(stacks);
- File stacksDir = new File(stacks);
- Set<ServiceDeploymentContributor> deploymentContributors = ServiceDefinitionsLoader.loadServiceDefinitions(stacksDir);
- addServiceDeploymentContributors(deploymentContributors.iterator());
+ static List<Application> findApplicationsByUrl( Topology topology, String url ) {
+ List<Application> foundApps = new ArrayList<Application>();
+ if( topology != null ) {
+ url = Urls.trimLeadingAndTrailingSlash( url );
+ Collection<Application> searchApps = topology.getApplications();
+ if( searchApps != null ) {
+ for( Application searchApp : searchApps ) {
+ List<String> searchUrls = searchApp.getUrls();
+ if( searchUrls == null || searchUrls.isEmpty() ) {
+ searchUrls = new ArrayList<String>(1);
+ searchUrls.add( searchApp.getName() );
+ }
+ for( String searchUrl : searchUrls ) {
+ if( url.equalsIgnoreCase( Urls.trimLeadingAndTrailingSlash( searchUrl ) ) ) {
+ foundApps.add( searchApp );
+ break;
+ }
+ }
+ }
+ }
+ }
+ return foundApps;
+ }
+
+ // Verify that there are no two apps with duplicate urls.
+ static void validateNoAppsWithDuplicateUrlsInTopology( Topology topology ) {
+ if( topology != null ) {
+ Collection<Application> apps = topology.getApplications();
+ if( apps != null ) {
+ for( Application app : apps ) {
+ List<String> urls = app.getUrls();
+ if( urls == null || urls.isEmpty() ) {
+ urls = new ArrayList<String>(1);
+ urls.add( app.getName() );
+ }
+ for( String url : urls ) {
+ List<Application> dups = findApplicationsByUrl( topology, url );
+ if( dups != null ) {
+ for( Application dup : dups ) {
+ if( dup != app ) {
+ throw new DeploymentException( "Topology " + topology.getName() + " contains applications " + app.getName() + " and " + dup.getName() + " with the same url: " + url );
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ // Verify that if there are services that there are no applications with a root url.
+ static void validateNoAppsWithRootUrlsInServicesTopology( Topology topology ) {
+ if( topology != null ) {
+ if( topology.getServices() != null && !topology.getServices().isEmpty() ) {
+ List<Application> dups = findApplicationsByUrl( topology, "/" );
+ if( dups != null && !dups.isEmpty() ) {
+ throw new DeploymentException( "Topology " + topology.getName() + " contains both services and an application " + dups.get( 0 ).getName() + " with a root url." );
+ }
+ }
+ }
+ }
+
+ static void validateTopology( Topology topology ) {
+ validateNoAppsWithRootUrlsInServicesTopology( topology );
+ validateNoAppsWithDuplicateUrlsInTopology( topology );
+ }
+
+ public static EnterpriseArchive createDeployment( GatewayConfig config, Topology topology ) {
+ validateTopology( topology );
+ loadStacksServiceContributors( config );
Map<String,List<ProviderDeploymentContributor>> providers = selectContextProviders( topology );
Map<String,List<ServiceDeploymentContributor>> services = selectContextServices( topology );
- context = createDeploymentContext( config, topology.getName(), topology, providers, services );
- initialize( context, providers, services );
- contribute( context, providers, services );
- finalize( context, providers, services );
- if (topology.getName().equals("_default")) {
- // if this is the default topology then add the forwarding webapp as well
- context = deployDefaultTopology(config, topology);
+ Map<String,ServiceDeploymentContributor> applications = selectContextApplications( config, topology );
+ EnterpriseArchive ear = ShrinkWrap.create( EnterpriseArchive.class, topology.getName() );
+ ear.addAsResource( toStringAsset( topology ), "topology.xml" );
+ if( !services.isEmpty() ) {
+ WebArchive war = createServicesDeployment( config, topology, providers, services );
+ ear.addAsModule( war );
}
- storeTopology( context, topology );
+ if( !applications.isEmpty() ) {
+ for( Map.Entry<String, ServiceDeploymentContributor> application : applications.entrySet() ) {
+ WebArchive war = createApplicationDeployment( config, topology, providers, application );
+ ear.addAsModule( war );
+ }
+ }
+ return ear;
+ }
+
+ private static WebArchive createServicesDeployment(
+ GatewayConfig config,
+ Topology topology,
+ Map<String,List<ProviderDeploymentContributor>> providers,
+ Map<String,List<ServiceDeploymentContributor>> services ) {
+ DeploymentContext context = createDeploymentContext( config, "/", topology, providers );
+ initialize( context, providers, services, null );
+ contribute( context, providers, services, null );
+ finalize( context, providers, services, null );
return context.getWebArchive();
}
- private static void storeTopology( DeploymentContext context, Topology topology ) {
+ public static WebArchive createApplicationDeployment(
+ GatewayConfig config,
+ Topology topology,
+ Map<String,List<ProviderDeploymentContributor>> providers,
+ Map.Entry<String,ServiceDeploymentContributor> application ) {
+ String appPath = "/" + Urls.trimLeadingAndTrailingSlash( application.getKey() );
+ DeploymentContext context = createDeploymentContext( config, appPath, topology, providers );
+ initialize( context, providers, null, application );
+ contribute( context, providers, null, application );
+ finalize( context, providers, null, application );
+ return context.getWebArchive();
+ }
+
+ private static Asset toStringAsset( Topology topology ) {
StringWriter writer = new StringWriter();
String xml;
try {
@@ -120,41 +217,20 @@ public abstract class DeploymentFactory {
throw new DeploymentException( "Failed to marshall topology.", e );
}
StringAsset asset = new StringAsset( xml );
- context.getWebArchive().addAsWebInfResource( asset, "topology.xml" );
- }
-
- private static DeploymentContext deployDefaultTopology(GatewayConfig config,
- Topology topology) {
- // this is the "default" topology which does some specialized
- // redirects for compatibility with hadoop cli java client use
- // we do not want the various listeners and providers added or
- // the usual gateway.xml, etc.
- DeploymentContext context;
- Map<String,List<ProviderDeploymentContributor>> providers = new HashMap<String,List<ProviderDeploymentContributor>>();
- Map<String,List<ServiceDeploymentContributor>> services = new HashMap<String,List<ServiceDeploymentContributor>>();
- context = createDeploymentContext( config, "forward", topology, providers, services);
- WebAppDescriptor wad = context.getWebAppDescriptor();
- String servletName = context.getTopology().getName();
- String servletClass = GatewayForwardingServlet.class.getName();
- wad.createServlet().servletName( servletName ).servletClass( servletClass );
- wad.createServletMapping().servletName( servletName ).urlPattern( "/*" );
- ServletType<WebAppDescriptor> servlet = findServlet( context, context.getTopology().getName() );
- servlet.createInitParam()
- .paramName( DEFAULT_APP_REDIRECT_CONTEXT_PATH )
- .paramValue( config.getDefaultAppRedirectPath() );
- writeDeploymentDescriptor(context);
- return context;
+ return asset;
}
private static DeploymentContext createDeploymentContext(
- GatewayConfig config, String archiveName, Topology topology,
- Map<String,List<ProviderDeploymentContributor>> providers,
- Map<String,List<ServiceDeploymentContributor>> services ) {
- WebArchive webArchive = ShrinkWrap.create( WebArchive.class, archiveName );
+ GatewayConfig config,
+ String archivePath,
+ Topology topology,
+ Map<String,List<ProviderDeploymentContributor>> providers ) {
+ archivePath = Urls.encode( archivePath );
+ WebArchive webArchive = ShrinkWrap.create( WebArchive.class, archivePath );
WebAppDescriptor webAppDesc = Descriptors.create( WebAppDescriptor.class );
GatewayDescriptor gateway = GatewayDescriptorFactory.create();
DeploymentContext context = new DeploymentContextImpl(
- config, topology, gateway, webArchive, webAppDesc, providers, services );
+ config, topology, gateway, webArchive, webAppDesc, providers );
return context;
}
@@ -235,41 +311,108 @@ public abstract class DeploymentFactory {
return defaults;
}
+ private static Map<String,ServiceDeploymentContributor> selectContextApplications(
+ GatewayConfig config, Topology topology ) {
+ Map<String,ServiceDeploymentContributor> contributors = new HashMap<>();
+ if( topology != null ) {
+ for( Application application : topology.getApplications() ) {
+ String name = application.getName();
+ if( name == null || name.isEmpty() ) {
+ throw new DeploymentException( "Topologies cannot contain an application without a name." );
+ }
+ ApplicationDeploymentContributor contributor = new ApplicationDeploymentContributor( config, application );
+ List<String> urls = application.getUrls();
+ if( urls == null || urls.isEmpty() ) {
+ urls = new ArrayList<String>( 1 );
+ urls.add( "/" + name );
+ }
+ for( String url : urls ) {
+ if( url == null || url.isEmpty() || url.equals( "/" ) ) {
+ if( !topology.getServices().isEmpty() ) {
+ throw new DeploymentException( String.format(
+ "Topologies with services cannot contain an application (%s) with a root url.", name ) );
+ }
+ }
+ contributors.put( url, contributor );
+ }
+ }
+ }
+ return contributors;
+ }
+
private static void initialize(
DeploymentContext context,
Map<String,List<ProviderDeploymentContributor>> providers,
- Map<String,List<ServiceDeploymentContributor>> services ) {
+ Map<String,List<ServiceDeploymentContributor>> services,
+ Map.Entry<String,ServiceDeploymentContributor> applications ) {
WebAppDescriptor wad = context.getWebAppDescriptor();
- String servletName = context.getTopology().getName();
- String servletClass = GatewayServlet.class.getName();
- wad.createServlet().servletName( servletName ).servletClass( servletClass );
- wad.createServletMapping().servletName( servletName ).urlPattern( "/*" );
+ String topoName = context.getTopology().getName();
+ if( applications == null ) {
+ String servletName = topoName + SERVLET_NAME_SUFFIX;
+ wad.createServlet().servletName( servletName ).servletClass( GatewayServlet.class.getName() );
+ wad.createServletMapping().servletName( servletName ).urlPattern( "/*" );
+ } else {
+ String filterName = topoName + FILTER_NAME_SUFFIX;
+ wad.createFilter().filterName( filterName ).filterClass( GatewayServlet.class.getName() );
+ wad.createFilterMapping().filterName( filterName ).urlPattern( "/*" );
+ }
if (gatewayServices != null) {
gatewayServices.initializeContribution(context);
} else {
log.gatewayServicesNotInitialized();
}
- for( String role : providers.keySet() ) {
- for( ProviderDeploymentContributor contributor : providers.get( role ) ) {
- try {
- injectServices(contributor);
- log.initializeProvider( contributor.getName(), contributor.getRole() );
- contributor.initializeContribution( context );
- } catch( Exception e ) {
- log.failedToInitializeContribution( e );
- throw new DeploymentException("Failed to initialize contribution.", e);
+ initializeProviders( context, providers );
+ initializeServices( context, services );
+ initializeApplications( context, applications );
+ }
+
+ private static void initializeProviders(
+ DeploymentContext context,
+ Map<String,List<ProviderDeploymentContributor>> providers ) {
+ if( providers != null ) {
+ for( String role : providers.keySet() ) {
+ for( ProviderDeploymentContributor contributor : providers.get( role ) ) {
+ try {
+ injectServices( contributor );
+ log.initializeProvider( contributor.getName(), contributor.getRole() );
+ contributor.initializeContribution( context );
+ } catch( Exception e ) {
+ log.failedToInitializeContribution( e );
+ throw new DeploymentException( "Failed to initialize contribution.", e );
+ }
}
}
}
- for( String role : services.keySet() ) {
- for( ServiceDeploymentContributor contributor : services.get( role ) ) {
+ }
+
+ private static void initializeServices( DeploymentContext context, Map<String, List<ServiceDeploymentContributor>> services ) {
+ if( services != null ) {
+ for( String role : services.keySet() ) {
+ for( ServiceDeploymentContributor contributor : services.get( role ) ) {
+ try {
+ injectServices( contributor );
+ log.initializeService( contributor.getName(), contributor.getRole() );
+ contributor.initializeContribution( context );
+ } catch( Exception e ) {
+ log.failedToInitializeContribution( e );
+ throw new DeploymentException( "Failed to initialize contribution.", e );
+ }
+ }
+ }
+ }
+ }
+
+ private static void initializeApplications( DeploymentContext context, Map.Entry<String, ServiceDeploymentContributor> application ) {
+ if( application != null ) {
+ ServiceDeploymentContributor contributor = application.getValue();
+ if( contributor != null ) {
try {
- injectServices(contributor);
- log.initializeService( contributor.getName(), contributor.getRole() );
+ injectServices( contributor );
+ log.initializeApplication( contributor.getName() );
contributor.initializeContribution( context );
} catch( Exception e ) {
log.failedToInitializeContribution( e );
- throw new DeploymentException("Failed to initialize contribution.", e);
+ throw new DeploymentException( "Failed to initialize application contribution.", e );
}
}
}
@@ -300,8 +443,15 @@ public abstract class DeploymentFactory {
private static void contribute(
DeploymentContext context,
Map<String,List<ProviderDeploymentContributor>> providers,
- Map<String,List<ServiceDeploymentContributor>> services ) {
- Topology topology = context.getTopology();
+ Map<String,List<ServiceDeploymentContributor>> services,
+ Map.Entry<String,ServiceDeploymentContributor> applications ) {
+ Topology topology = context.getTopology();
+ contributeProviders( context, topology, providers );
+ contributeServices( context, topology, services );
+ contributeApplications( context, topology, applications );
+ }
+
+ private static void contributeProviders( DeploymentContext context, Topology topology, Map<String, List<ProviderDeploymentContributor>> providers ) {
for( Provider provider : topology.getProviders() ) {
ProviderDeploymentContributor contributor = getProviderContributor( providers, provider.getRole(), provider.getName() );
if( contributor != null && provider.isEnabled() ) {
@@ -315,23 +465,44 @@ public abstract class DeploymentFactory {
}
}
}
- for( Service service : topology.getServices() ) {
- ServiceDeploymentContributor contributor = getServiceContributor( service.getRole(), service.getName(), service.getVersion() );
- if( contributor != null ) {
- try {
- log.contributeService( service.getName(), service.getRole() );
- contributor.contributeService( context, service );
- if (gatewayServices != null) {
- ServiceRegistry sr = gatewayServices.getService(GatewayServices.SERVICE_REGISTRY_SERVICE);
- if (sr != null) {
- String regCode = sr.getRegistrationCode(topology.getName());
- sr.registerService(regCode, topology.getName(), service.getRole(), service.getUrls() );
+ }
+
+ private static void contributeServices( DeploymentContext context, Topology topology, Map<String, List<ServiceDeploymentContributor>> services ) {
+ if( services != null ) {
+ for( Service service : topology.getServices() ) {
+ ServiceDeploymentContributor contributor = getServiceContributor( service.getRole(), service.getName(), service.getVersion() );
+ if( contributor != null ) {
+ try {
+ log.contributeService( service.getName(), service.getRole() );
+ contributor.contributeService( context, service );
+ if( gatewayServices != null ) {
+ ServiceRegistry sr = gatewayServices.getService( GatewayServices.SERVICE_REGISTRY_SERVICE );
+ if( sr != null ) {
+ String regCode = sr.getRegistrationCode( topology.getName() );
+ sr.registerService( regCode, topology.getName(), service.getRole(), service.getUrls() );
+ }
}
+ } catch( Exception e ) {
+ // Maybe it makes sense to throw exception
+ log.failedToContributeService( service.getName(), service.getRole(), e );
+ throw new DeploymentException( "Failed to contribute service.", e );
}
+ }
+ }
+ }
+ }
+
+ private static void contributeApplications( DeploymentContext context, Topology topology, Map.Entry<String, ServiceDeploymentContributor> applications ) {
+ if( applications != null ) {
+ ServiceDeploymentContributor contributor = applications.getValue();
+ if( contributor != null ) {
+ try {
+ log.contributeApplication( contributor.getName() );
+ Application applicationDesc = topology.getApplication( applications.getKey() );
+ contributor.contributeService( context, applicationDesc );
} catch( Exception e ) {
- // Maybe it makes sense to throw exception
- log.failedToContributeService( service.getName(), service.getRole(), e );
- throw new DeploymentException("Failed to contribute service.", e);
+ log.failedToInitializeContribution( e );
+ throw new DeploymentException( "Failed to contribution application.", e );
}
}
}
@@ -374,7 +545,8 @@ public abstract class DeploymentFactory {
private static void finalize(
DeploymentContext context,
Map<String,List<ProviderDeploymentContributor>> providers,
- Map<String,List<ServiceDeploymentContributor>> services ) {
+ Map<String,List<ServiceDeploymentContributor>> services,
+ Map.Entry<String,ServiceDeploymentContributor> application ) {
try {
// Write the gateway descriptor (gateway.xml) into the war.
StringWriter writer = new StringWriter();
@@ -384,14 +556,33 @@ public abstract class DeploymentFactory {
GatewayServlet.GATEWAY_DESCRIPTOR_LOCATION_DEFAULT );
// Set the location of the gateway descriptor as a servlet init param.
- ServletType<WebAppDescriptor> servlet = findServlet( context, context.getTopology().getName() );
- servlet.createInitParam()
- .paramName( GatewayServlet.GATEWAY_DESCRIPTOR_LOCATION_PARAM )
- .paramValue( GatewayServlet.GATEWAY_DESCRIPTOR_LOCATION_DEFAULT );
-
+ if( application == null ) {
+ String servletName = context.getTopology().getName() + SERVLET_NAME_SUFFIX;
+ ServletType<WebAppDescriptor> servlet = findServlet( context, servletName );
+ servlet.createInitParam()
+ .paramName( GatewayServlet.GATEWAY_DESCRIPTOR_LOCATION_PARAM )
+ .paramValue( "/WEB-INF/" + GatewayServlet.GATEWAY_DESCRIPTOR_LOCATION_DEFAULT );
+ } else {
+ String servletName = context.getTopology().getName() + FILTER_NAME_SUFFIX;
+ FilterType<WebAppDescriptor> filter = findFilter( context, servletName );
+ filter.createInitParam()
+ .paramName( GatewayServlet.GATEWAY_DESCRIPTOR_LOCATION_PARAM )
+ .paramValue( "/WEB-INF/" + GatewayServlet.GATEWAY_DESCRIPTOR_LOCATION_DEFAULT );
+ }
if (gatewayServices != null) {
gatewayServices.finalizeContribution(context);
}
+ finalizeProviders( context, providers );
+ finalizeServices( context, services );
+ finalizeApplications( context, application );
+ writeDeploymentDescriptor( context, application != null );
+ } catch ( IOException e ) {
+ throw new RuntimeException( e );
+ }
+ }
+
+ private static void finalizeProviders( DeploymentContext context, Map<String, List<ProviderDeploymentContributor>> providers ) {
+ if( providers != null ) {
for( String role : providers.keySet() ) {
for( ProviderDeploymentContributor contributor : providers.get( role ) ) {
try {
@@ -400,10 +591,15 @@ public abstract class DeploymentFactory {
} catch( Exception e ) {
// Maybe it makes sense to throw exception
log.failedToFinalizeContribution( e );
- throw new DeploymentException("Failed to finalize contribution.", e);
+ throw new DeploymentException( "Failed to finalize contribution.", e );
}
}
}
+ }
+ }
+
+ private static void finalizeServices( DeploymentContext context, Map<String, List<ServiceDeploymentContributor>> services ) {
+ if( services != null ) {
for( String role : services.keySet() ) {
for( ServiceDeploymentContributor contributor : services.get( role ) ) {
try {
@@ -412,22 +608,36 @@ public abstract class DeploymentFactory {
} catch( Exception e ) {
// Maybe it makes sense to throw exception
log.failedToFinalizeContribution( e );
- throw new DeploymentException("Failed to finalize contribution.", e);
+ throw new DeploymentException( "Failed to finalize contribution.", e );
}
}
}
+ }
+ }
- writeDeploymentDescriptor(context);
-
- } catch ( IOException e ) {
- throw new RuntimeException( e );
+ private static void finalizeApplications( DeploymentContext context, Map.Entry<String, ServiceDeploymentContributor> application ) {
+ if( application != null ) {
+ ServiceDeploymentContributor contributor = application.getValue();
+ if( contributor != null ) {
+ try {
+ log.finalizeApplication( contributor.getName() );
+ contributor.finalizeContribution( context );
+ } catch( Exception e ) {
+ log.failedToInitializeContribution( e );
+ throw new DeploymentException( "Failed to contribution application.", e );
+ }
+ }
}
}
- private static void writeDeploymentDescriptor(DeploymentContext context) {
+ private static void writeDeploymentDescriptor( DeploymentContext context, boolean override ) {
// Write the web.xml into the war.
Asset webXmlAsset = new StringAsset( context.getWebAppDescriptor().exportAsString() );
- context.getWebArchive().setWebXML( webXmlAsset );
+ if( override ) {
+ context.getWebArchive().addAsWebInfResource( webXmlAsset, "override-web.xml" );
+ } else {
+ context.getWebArchive().setWebXML( webXmlAsset );
+ }
}
public static ServletType<WebAppDescriptor> findServlet( DeploymentContext context, String name ) {
@@ -440,6 +650,24 @@ public abstract class DeploymentFactory {
return null;
}
+ public static FilterType<WebAppDescriptor> findFilter( DeploymentContext context, String name ) {
+ List<FilterType<WebAppDescriptor>> filters = context.getWebAppDescriptor().getAllFilter();
+ for( FilterType<WebAppDescriptor> filter : filters ) {
+ if( name.equals( filter.getFilterName() ) ) {
+ return filter;
+ }
+ }
+ return null;
+ }
+
+ private static void loadStacksServiceContributors( GatewayConfig config ) {
+ String stacks = config.getGatewayServicesDir();
+ log.usingServicesDirectory(stacks);
+ File stacksDir = new File(stacks);
+ Set<ServiceDeploymentContributor> deploymentContributors = ServiceDefinitionsLoader.loadServiceDefinitions(stacksDir);
+ addServiceDeploymentContributors(deploymentContributors.iterator());
+ }
+
private static void loadServiceContributors() {
SERVICE_CONTRIBUTOR_MAP = new HashMap<String, Map<String, Map<Version, ServiceDeploymentContributor>>>();
ServiceLoader<ServiceDeploymentContributor> loader = ServiceLoader.load( ServiceDeploymentContributor.class );
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/impl/ApplicationDeploymentContributor.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/impl/ApplicationDeploymentContributor.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/impl/ApplicationDeploymentContributor.java
new file mode 100644
index 0000000..3f68ede
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/impl/ApplicationDeploymentContributor.java
@@ -0,0 +1,214 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.deploy.impl;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.StringReader;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.JAXBException;
+import javax.xml.bind.Unmarshaller;
+
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.deploy.DeploymentContext;
+import org.apache.hadoop.gateway.deploy.DeploymentException;
+import org.apache.hadoop.gateway.deploy.ServiceDeploymentContributorBase;
+import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
+import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
+import org.apache.hadoop.gateway.filter.XForwardedHeaderFilter;
+import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptor;
+import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteRulesDescriptorFactory;
+import org.apache.hadoop.gateway.service.definition.Policy;
+import org.apache.hadoop.gateway.service.definition.Rewrite;
+import org.apache.hadoop.gateway.service.definition.Route;
+import org.apache.hadoop.gateway.service.definition.ServiceDefinition;
+import org.apache.hadoop.gateway.topology.Application;
+import org.apache.hadoop.gateway.topology.Service;
+import org.apache.hadoop.gateway.topology.Version;
+
+public class ApplicationDeploymentContributor extends ServiceDeploymentContributorBase {
+
+ private static final String SERVICE_DEFINITION_FILE_NAME = "service.xml";
+ private static final String REWRITE_RULES_FILE_NAME = "rewrite.xml";
+ private static final String XFORWARDED_FILTER_NAME = "XForwardedHeaderFilter";
+ private static final String XFORWARDED_FILTER_ROLE = "xforwardedheaders";
+
+ private ServiceDefinition serviceDefinition;
+
+ private UrlRewriteRulesDescriptor serviceRules;
+
+ private static ServiceDefinition loadServiceDefinition( Application application, File file ) throws JAXBException, FileNotFoundException {
+ ServiceDefinition definition;
+ if( !file.exists() ) {
+ definition = new ServiceDefinition();
+ definition.setName( application.getName() );
+ List<Route> routes = new ArrayList<Route>(1);
+ Route route;
+ route = new Route();
+ route.setPath( "/?**" );
+ routes.add( route );
+ route = new Route();
+ route.setPath( "/**?**" );
+ routes.add( route );
+ definition.setRoutes( routes );
+ } else {
+ JAXBContext context = JAXBContext.newInstance( ServiceDefinition.class );
+ Unmarshaller unmarshaller = context.createUnmarshaller();
+ FileInputStream inputStream = new FileInputStream( file );
+ definition = (ServiceDefinition) unmarshaller.unmarshal( inputStream );
+ }
+ return definition;
+ }
+
+ private static UrlRewriteRulesDescriptor loadRewriteRules( Application application, File file ) throws IOException {
+ UrlRewriteRulesDescriptor rules;
+ if( !file.exists() ) {
+ rules = UrlRewriteRulesDescriptorFactory.load( "xml", new StringReader( "<rules/>" ) );
+ } else {
+ FileReader reader = new FileReader( file );
+ rules = UrlRewriteRulesDescriptorFactory.load( "xml", reader );
+ reader.close();
+ }
+ return rules;
+ }
+
+ public ApplicationDeploymentContributor( GatewayConfig config, Application application ) throws DeploymentException {
+ try {
+ File appsDir = new File( config.getGatewayApplicationsDir() );
+ File appDir = new File( appsDir, application.getName() );
+ File serviceFile = new File( appDir, SERVICE_DEFINITION_FILE_NAME );
+ File rewriteFile = new File( appDir, REWRITE_RULES_FILE_NAME );
+ serviceDefinition = loadServiceDefinition( application, serviceFile );
+ serviceRules = loadRewriteRules( application, rewriteFile );
+ } catch ( IOException e ) {
+ throw new DeploymentException( "Failed to deploy application: " + application.getName(), e );
+ } catch ( JAXBException e ){
+ throw new DeploymentException( "Failed to deploy application: " + application.getName(), e );
+ }
+ }
+
+ @Override
+ public String getRole() {
+ return serviceDefinition.getRole();
+ }
+
+ @Override
+ public String getName() {
+ return serviceDefinition.getName();
+ }
+
+ @Override
+ public Version getVersion() {
+ return new Version(serviceDefinition.getVersion());
+ }
+
+ @Override
+ public void contributeService(DeploymentContext context, Service service) throws Exception {
+ contributeRewriteRules(context, service);
+ contributeResources(context, service);
+ }
+
+ private void contributeRewriteRules(DeploymentContext context, Service service) {
+ if ( serviceRules != null ) {
+ UrlRewriteRulesDescriptor clusterRules = context.getDescriptor("rewrite");
+ clusterRules.addRules(serviceRules);
+ }
+ }
+
+ private void contributeResources(DeploymentContext context, Service service) {
+ Map<String, String> filterParams = new HashMap<String, String>();
+ List<Route> bindings = serviceDefinition.getRoutes();
+ for ( Route binding : bindings ) {
+ List<Rewrite> filters = binding.getRewrites();
+ if ( filters != null && !filters.isEmpty() ) {
+ filterParams.clear();
+ for ( Rewrite filter : filters ) {
+ filterParams.put(filter.getTo(), filter.getApply());
+ }
+ }
+ try {
+ contributeResource(context, service, binding, filterParams);
+ } catch ( URISyntaxException e ) {
+ e.printStackTrace();
+ }
+ }
+
+ }
+
+ private void contributeResource( DeploymentContext context, Service service, Route binding, Map<String, String> filterParams) throws URISyntaxException {
+ List<FilterParamDescriptor> params = new ArrayList<FilterParamDescriptor>();
+ ResourceDescriptor resource = context.getGatewayDescriptor().addResource();
+ resource.role(service.getRole());
+ resource.pattern(binding.getPath());
+ //add x-forwarded filter if enabled in config
+ if (context.getGatewayConfig().isXForwardedEnabled()) {
+ resource.addFilter().name(XFORWARDED_FILTER_NAME).role(XFORWARDED_FILTER_ROLE).impl(XForwardedHeaderFilter.class);
+ }
+ List<Policy> policyBindings = binding.getPolicies();
+ if ( policyBindings == null ) {
+ policyBindings = serviceDefinition.getPolicies();
+ }
+ if ( policyBindings == null ) {
+ //add default set
+ addDefaultPolicies(context, service, filterParams, params, resource);
+ } else {
+ addPolicies(context, service, filterParams, params, resource, policyBindings);
+ }
+ }
+
+ private void addPolicies( DeploymentContext context, Service service, Map<String, String> filterParams, List<FilterParamDescriptor> params, ResourceDescriptor resource, List<Policy> policyBindings) throws URISyntaxException {
+ for ( Policy policyBinding : policyBindings ) {
+ String role = policyBinding.getRole();
+ if ( role == null ) {
+ throw new IllegalArgumentException("Policy defined has no role for service " + service.getName());
+ }
+ role = role.trim().toLowerCase();
+ if ( role.equals("rewrite") ) {
+ addRewriteFilter(context, service, filterParams, params, resource);
+ } else if ( topologyContainsProviderType(context, role) ) {
+ context.contributeFilter(service, resource, role, policyBinding.getName(), null);
+ }
+ }
+ }
+
+ private void addDefaultPolicies( DeploymentContext context, Service service, Map<String, String> filterParams, List<FilterParamDescriptor> params, ResourceDescriptor resource) throws URISyntaxException {
+ addWebAppSecFilters(context, service, resource);
+ addAuthenticationFilter(context, service, resource);
+ addRewriteFilter(context, service, filterParams, params, resource);
+ addIdentityAssertionFilter(context, service, resource);
+ addAuthorizationFilter(context, service, resource);
+ }
+
+ private void addRewriteFilter( DeploymentContext context, Service service, Map<String, String> filterParams, List<FilterParamDescriptor> params, ResourceDescriptor resource) throws URISyntaxException {
+ if ( !filterParams.isEmpty() ) {
+ for ( Map.Entry<String, String> filterParam : filterParams.entrySet() ) {
+ params.add(resource.createFilterParam().name(filterParam.getKey()).value(filterParam.getValue()));
+ }
+ }
+ addRewriteFilter(context, service, resource, params);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/descriptor/xml/XmlGatewayDescriptorExporter.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/descriptor/xml/XmlGatewayDescriptorExporter.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/descriptor/xml/XmlGatewayDescriptorExporter.java
index d9151a3..207fd54 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/descriptor/xml/XmlGatewayDescriptorExporter.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/descriptor/xml/XmlGatewayDescriptorExporter.java
@@ -17,26 +17,20 @@
*/
package org.apache.hadoop.gateway.descriptor.xml;
+import java.io.IOException;
+import java.io.Writer;
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.TransformerException;
+
import org.apache.hadoop.gateway.descriptor.FilterDescriptor;
import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
import org.apache.hadoop.gateway.descriptor.GatewayDescriptor;
import org.apache.hadoop.gateway.descriptor.GatewayDescriptorExporter;
import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
+import org.apache.hadoop.gateway.util.XmlUtils;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.transform.OutputKeys;
-import javax.xml.transform.Transformer;
-import javax.xml.transform.TransformerException;
-import javax.xml.transform.TransformerFactory;
-import javax.xml.transform.dom.DOMSource;
-import javax.xml.transform.stream.StreamResult;
-import java.io.IOException;
-import java.io.Writer;
-
public class XmlGatewayDescriptorExporter implements GatewayDescriptorExporter, XmlGatewayDescriptorTags {
@Override
@@ -47,10 +41,7 @@ public class XmlGatewayDescriptorExporter implements GatewayDescriptorExporter,
@Override
public void store( GatewayDescriptor descriptor, Writer writer ) throws IOException {
try {
- DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
- DocumentBuilder builder = builderFactory.newDocumentBuilder();
- Document document = builder.newDocument();
- document.setXmlStandalone( true );
+ Document document = XmlUtils.createDocument();
Element gateway = document.createElement( GATEWAY );
document.appendChild( gateway );
@@ -59,16 +50,7 @@ public class XmlGatewayDescriptorExporter implements GatewayDescriptorExporter,
gateway.appendChild( createResource( document, resource ) );
}
- TransformerFactory transformerFactory = TransformerFactory.newInstance();
- transformerFactory.setAttribute( "indent-number", 2 );
- Transformer transformer = transformerFactory.newTransformer();
- //transformer.setOutputProperty( OutputKeys.OMIT_XML_DECLARATION, "yes" );
- transformer.setOutputProperty( OutputKeys.STANDALONE, "yes" );
- transformer.setOutputProperty( OutputKeys.INDENT, "yes" );
-
- StreamResult result = new StreamResult( writer );
- DOMSource source = new DOMSource(document);
- transformer.transform( source, result );
+ XmlUtils.writeXml( document, writer );
} catch( ParserConfigurationException e ) {
throw new IOException( e );
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/filter/DefaultTopologyHandler.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/filter/DefaultTopologyHandler.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/filter/DefaultTopologyHandler.java
new file mode 100644
index 0000000..75a2aed
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/filter/DefaultTopologyHandler.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.filter;
+
+import java.io.IOException;
+import java.util.Collection;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.hadoop.gateway.GatewayMessages;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.services.GatewayServices;
+import org.apache.hadoop.gateway.services.topology.TopologyService;
+import org.apache.hadoop.gateway.topology.Topology;
+import org.eclipse.jetty.server.Handler;
+import org.eclipse.jetty.server.Request;
+import org.eclipse.jetty.server.handler.HandlerWrapper;
+
+public class DefaultTopologyHandler extends HandlerWrapper {
+
+ private static GatewayMessages LOG = MessagesFactory.get(GatewayMessages.class);
+
+ private GatewayConfig config;
+ private GatewayServices services;
+ private String staticRedirectContext = null;
+
+ public DefaultTopologyHandler( GatewayConfig config, GatewayServices services, Handler delegate ) {
+ if( config == null ) {
+ throw new IllegalArgumentException( "config==null" );
+ }
+ if( services == null ) {
+ throw new IllegalArgumentException( "services==null" );
+ }
+ this.config = config;
+ this.services = services;
+ String defaultTopologyName = config.getDefaultTopologyName();
+ if( defaultTopologyName != null ) {
+ staticRedirectContext = config.getDefaultAppRedirectPath();
+ if( staticRedirectContext != null && staticRedirectContext.trim().isEmpty() ) {
+ staticRedirectContext = null;
+ }
+ }
+ if( staticRedirectContext != null ) {
+ LOG.defaultTopologySetup( defaultTopologyName, staticRedirectContext );
+ }
+ setHandler( delegate );
+ }
+
+ @Override
+ public void handle( String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response ) throws IOException, ServletException {
+ if( !baseRequest.isHandled() ) {
+ String redirectContext = staticRedirectContext;
+ if( redirectContext == null ) {
+ TopologyService topologies = services.getService( GatewayServices.TOPOLOGY_SERVICE );
+ if( topologies != null ) {
+ Collection<Topology> candidates = topologies.getTopologies();
+ if( candidates != null && candidates.size() == 1 ) {
+ Topology topology = candidates.iterator().next();
+ redirectContext = "/" + config.getGatewayPath() + "/" + topology.getName();
+ }
+ }
+ }
+ if( redirectContext != null ) {
+ String newTarget = redirectContext + target;
+ ForwardedRequest newRequest = new ForwardedRequest( request, newTarget );
+ LOG.defaultTopologyForward( target, newTarget );
+ super.handle( newTarget, baseRequest, newRequest, response );
+ }
+ }
+ }
+
+ private static class ForwardedRequest extends HttpServletRequestWrapper {
+
+ private String contextPath;
+
+ public ForwardedRequest( HttpServletRequest request, String contextPath ) {
+ super( request );
+ this.contextPath = contextPath;
+ }
+
+ public String getContextPath() {
+ return contextPath;
+ }
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/filter/GatewayHelloFilter.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/filter/GatewayHelloFilter.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/filter/GatewayHelloFilter.java
new file mode 100644
index 0000000..ab56a34
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/filter/GatewayHelloFilter.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.filter;
+
+import java.io.IOException;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+
+public class GatewayHelloFilter implements Filter {
+
+ @Override
+ public void init( FilterConfig filterConfig ) throws ServletException {
+ System.out.println( "GatewayHelloFilter.init" );
+ }
+
+ @Override
+ public void doFilter( ServletRequest request, ServletResponse response, FilterChain chain ) throws IOException, ServletException {
+ System.out.println( "GatewayHelloFilter.doFilter" );
+ chain.doFilter( request, response );
+ }
+
+ @Override
+ public void destroy() {
+ System.out.println( "GatewayHelloFilter.destroy" );
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
index 90f7f79..b5e80d2 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/builder/BeanPropertyTopologyBuilder.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.gateway.topology.builder;
import java.util.ArrayList;
import java.util.List;
+import org.apache.hadoop.gateway.topology.Application;
import org.apache.hadoop.gateway.topology.Provider;
import org.apache.hadoop.gateway.topology.Service;
import org.apache.hadoop.gateway.topology.Topology;
@@ -28,10 +29,12 @@ public class BeanPropertyTopologyBuilder implements TopologyBuilder {
private String name;
private List<Provider> providers;
private List<Service> services;
+ private List<Application> applications;
public BeanPropertyTopologyBuilder() {
providers = new ArrayList<Provider>();
services = new ArrayList<Service>();
+ applications = new ArrayList<Application>();
}
public BeanPropertyTopologyBuilder name(String name) {
@@ -61,6 +64,15 @@ public class BeanPropertyTopologyBuilder implements TopologyBuilder {
return services;
}
+ public BeanPropertyTopologyBuilder addApplication( Application application ) {
+ applications.add(application);
+ return this;
+ }
+
+ public List<Application> applications() {
+ return applications;
+ }
+
public Topology build() {
Topology topology = new Topology();
topology.setName(name);
@@ -73,6 +85,10 @@ public class BeanPropertyTopologyBuilder implements TopologyBuilder {
topology.addService(service);
}
+ for (Application application : applications) {
+ topology.addApplication(application);
+ }
+
return topology;
}
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/validation/TopologyValidator.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/validation/TopologyValidator.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/validation/TopologyValidator.java
index e15282b..37d202f 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/validation/TopologyValidator.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/validation/TopologyValidator.java
@@ -18,23 +18,22 @@
package org.apache.hadoop.gateway.topology.validation;
-import org.apache.hadoop.gateway.config.GatewayConfig;
-import org.apache.hadoop.gateway.topology.Topology;
-import org.xml.sax.ErrorHandler;
-import org.xml.sax.SAXException;
-import org.xml.sax.SAXParseException;
-
-import javax.xml.XMLConstants;
-import javax.xml.transform.stream.StreamSource;
-import javax.xml.validation.Schema;
-import javax.xml.validation.SchemaFactory;
-import javax.xml.validation.Validator;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
+import javax.xml.XMLConstants;
+import javax.xml.transform.stream.StreamSource;
+import javax.xml.validation.Schema;
+import javax.xml.validation.SchemaFactory;
+import javax.xml.validation.Validator;
+
+import org.apache.hadoop.gateway.topology.Topology;
+import org.xml.sax.ErrorHandler;
+import org.xml.sax.SAXException;
+import org.xml.sax.SAXParseException;
public class TopologyValidator {
@@ -56,11 +55,10 @@ public class TopologyValidator {
public boolean validateTopology() {
errors = new LinkedList<String>();
try {
- File xml = new File(filePath);
-
SchemaFactory fact = SchemaFactory
.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
- Schema s = fact.newSchema( this.getClass().getClassLoader().getResource( "conf/topology-v1.xsd" ) );
+ URL schemaUrl = ClassLoader.getSystemResource( "conf/topology-v1.xsd" );
+ Schema s = fact.newSchema( schemaUrl );
Validator validator = s.newValidator();
final List<SAXParseException> exceptions = new LinkedList<>();
validator.setErrorHandler(new ErrorHandler() {
@@ -77,6 +75,7 @@ public class TopologyValidator {
}
});
+ File xml = new File(filePath);
validator.validate(new StreamSource(xml));
if(exceptions.size() > 0) {
for (SAXParseException e : exceptions) {
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
index b32f0c9..e573d63 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/xml/KnoxFormatXmlTopologyRules.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.gateway.topology.xml;
import org.apache.commons.digester3.Rule;
import org.apache.commons.digester3.binder.AbstractRulesModule;
+import org.apache.hadoop.gateway.topology.Application;
import org.apache.hadoop.gateway.topology.Param;
import org.apache.hadoop.gateway.topology.Provider;
import org.apache.hadoop.gateway.topology.Service;
@@ -31,6 +32,7 @@ public class KnoxFormatXmlTopologyRules extends AbstractRulesModule {
private static final String ROOT_TAG = "topology";
private static final String NAME_TAG = "name";
private static final String VERSION_TAG = "version";
+ private static final String APPLICATION_TAG = "application";
private static final String SERVICE_TAG = "service";
private static final String ROLE_TAG = "role";
private static final String URL_TAG = "url";
@@ -47,6 +49,15 @@ public class KnoxFormatXmlTopologyRules extends AbstractRulesModule {
forPattern( ROOT_TAG + "/" + NAME_TAG ).callMethod("name").usingElementBodyAsArgument();
forPattern( ROOT_TAG + "/" + VERSION_TAG ).callMethod("version").usingElementBodyAsArgument();
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG ).createObject().ofType( Application.class ).then().setNext( "addApplication" );
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG + "/" + ROLE_TAG ).setBeanProperty();
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG + "/" + NAME_TAG ).setBeanProperty();
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG + "/" + VERSION_TAG ).createObject().ofType(Version.class).then().setBeanProperty().then().setNext("setVersion");
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG + "/" + URL_TAG ).callMethod( "addUrl" ).usingElementBodyAsArgument();
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG + "/" + PARAM_TAG ).createObject().ofType( Param.class ).then().addRule( paramRule ).then().setNext( "addParam" );
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG + "/" + PARAM_TAG + "/" + NAME_TAG ).setBeanProperty();
+ forPattern( ROOT_TAG + "/" + APPLICATION_TAG + "/" + PARAM_TAG + "/" + VALUE_TAG ).setBeanProperty();
+
forPattern( ROOT_TAG + "/" + SERVICE_TAG ).createObject().ofType( Service.class ).then().setNext( "addService" );
forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + ROLE_TAG ).setBeanProperty();
forPattern( ROOT_TAG + "/" + SERVICE_TAG + "/" + NAME_TAG ).setBeanProperty();
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/util/KnoxCLI.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/util/KnoxCLI.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/util/KnoxCLI.java
index 780f944..c865bfb 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/util/KnoxCLI.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/util/KnoxCLI.java
@@ -17,6 +17,25 @@
*/
package org.apache.hadoop.gateway.util;
+import java.io.BufferedReader;
+import java.io.Console;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.PrintStream;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.UUID;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.SSLException;
+
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
@@ -29,11 +48,11 @@ import org.apache.hadoop.gateway.services.CLIGatewayServices;
import org.apache.hadoop.gateway.services.GatewayServices;
import org.apache.hadoop.gateway.services.Service;
import org.apache.hadoop.gateway.services.ServiceLifecycleException;
-import org.apache.hadoop.gateway.services.topology.TopologyService;
import org.apache.hadoop.gateway.services.security.AliasService;
import org.apache.hadoop.gateway.services.security.KeystoreService;
import org.apache.hadoop.gateway.services.security.KeystoreServiceException;
import org.apache.hadoop.gateway.services.security.MasterService;
+import org.apache.hadoop.gateway.services.topology.TopologyService;
import org.apache.hadoop.gateway.topology.Provider;
import org.apache.hadoop.gateway.topology.Topology;
import org.apache.hadoop.gateway.topology.validation.TopologyValidator;
@@ -58,26 +77,7 @@ import org.apache.shiro.util.Factory;
import org.apache.shiro.util.ThreadContext;
import org.eclipse.persistence.oxm.MediaType;
import org.jboss.shrinkwrap.api.exporter.ExplodedExporter;
-import org.jboss.shrinkwrap.api.spec.WebArchive;
-
-import javax.net.ssl.SSLContext;
-import javax.net.ssl.SSLException;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.PrintStream;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.UUID;
-import java.io.BufferedReader;
-import java.io.InputStreamReader;
-import java.io.Console;
+import org.jboss.shrinkwrap.api.spec.EnterpriseArchive;
/**
*
*/
@@ -1322,10 +1322,10 @@ public class KnoxCLI extends Configured implements Tool {
protected String getConfig(Topology t){
File tmpDir = new File(System.getProperty("java.io.tmpdir"));
DeploymentFactory.setGatewayServices(services);
- WebArchive archive = DeploymentFactory.createDeployment(getGatewayConfig(), t);
+ EnterpriseArchive archive = DeploymentFactory.createDeployment(getGatewayConfig(), t);
File war = archive.as(ExplodedExporter.class).exportExploded(tmpDir, t.getName() + "_deploy.tmp");
war.deleteOnExit();
- String config = war.getAbsolutePath() + "/WEB-INF/shiro.ini";
+ String config = war.getAbsolutePath() + "/%2F/WEB-INF/shiro.ini";
try{
FileUtils.forceDeleteOnExit(war);
} catch (IOException e) {
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/util/ServiceDefinitionsLoader.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/util/ServiceDefinitionsLoader.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/util/ServiceDefinitionsLoader.java
index 0f41b2b..737b405 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/util/ServiceDefinitionsLoader.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/util/ServiceDefinitionsLoader.java
@@ -114,7 +114,7 @@ public class ServiceDefinitionsLoader {
return files;
}
- private static UrlRewriteRulesDescriptor loadRewriteRules(File servicesDir) {
+ public static UrlRewriteRulesDescriptor loadRewriteRules(File servicesDir) {
File rewriteFile = new File(servicesDir, REWRITE_FILE);
if ( rewriteFile.exists() ) {
InputStream stream = null;
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/resources/conf/topology-v1.xsd
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/resources/conf/topology-v1.xsd b/gateway-server/src/main/resources/conf/topology-v1.xsd
index 8ddc7d9..992d8a7 100644
--- a/gateway-server/src/main/resources/conf/topology-v1.xsd
+++ b/gateway-server/src/main/resources/conf/topology-v1.xsd
@@ -18,9 +18,11 @@ limitations under the License.
<h:schema xmlns:h="http://www.w3.org/2001/XMLSchema">
<h:element name="topology">
<h:complexType>
- <h:choice maxOccurs="unbounded">
+ <h:sequence maxOccurs="1">
- <h:element name="gateway" maxOccurs="1">
+ <h:element name="name" minOccurs="0" maxOccurs="1"/>
+
+ <h:element name="gateway" minOccurs="0" maxOccurs="1">
<h:complexType>
<h:choice minOccurs="1" maxOccurs="unbounded">
@@ -47,24 +49,49 @@ limitations under the License.
</h:complexType>
</h:element>
-
</h:choice>
</h:complexType>
</h:element>
- <h:element name="service" maxOccurs="unbounded">
+ <h:element name="service" minOccurs="0" maxOccurs="unbounded">
<h:complexType>
<h:sequence>
<h:element name="role" type="h:string" minOccurs="1" maxOccurs="1"/>
- <h:element name="url" type="h:string" minOccurs="0"
- maxOccurs="unbounded" />
+ <h:element name="url" type="h:string" minOccurs="0" maxOccurs="unbounded" />
+ <h:element name="param" minOccurs="0" maxOccurs="unbounded">
+ <h:complexType>
+ <h:sequence minOccurs="0">
+ <h:element name="name" type="h:string" />
+ <h:element name="value" type="h:string" />
+ </h:sequence>
+ <h:attribute name="name" type="h:string" use="optional" />
+ <h:attribute name="value" type="h:string" use="optional" />
+ </h:complexType>
+ </h:element>
</h:sequence>
</h:complexType>
-
</h:element>
+ <h:element name="application" minOccurs="0" maxOccurs="unbounded">
+ <h:complexType>
+ <h:sequence>
+ <h:element name="name" type="h:string" minOccurs="1" maxOccurs="1"/>
+ <h:element name="url" type="h:string" minOccurs="0" maxOccurs="unbounded" />
+ <h:element name="param" minOccurs="0" maxOccurs="unbounded">
+ <h:complexType>
+ <h:sequence minOccurs="0">
+ <h:element name="name" type="h:string" />
+ <h:element name="value" type="h:string" />
+ </h:sequence>
+ <h:attribute name="name" type="h:string" use="optional" />
+ <h:attribute name="value" type="h:string" use="optional" />
+ </h:complexType>
+ </h:element>
+ </h:sequence>
+ </h:complexType>
+ </h:element>
- </h:choice>
+ </h:sequence>
</h:complexType>
</h:element>
</h:schema>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/test/java/org/apache/hadoop/gateway/GatewayGlobalConfigTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/GatewayGlobalConfigTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/GatewayGlobalConfigTest.java
index 2ba0f7a..445a119 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/GatewayGlobalConfigTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/GatewayGlobalConfigTest.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.gateway;
import org.apache.hadoop.gateway.config.GatewayConfig;
import org.apache.hadoop.gateway.config.impl.GatewayConfigImpl;
+import org.apache.hadoop.test.TestUtils;
import org.hamcrest.Matchers;
import org.junit.Test;
@@ -26,6 +27,7 @@ import java.io.File;
import java.net.URL;
import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.nullValue;
import static org.junit.Assert.*;
public class GatewayGlobalConfigTest {
@@ -76,16 +78,22 @@ public class GatewayGlobalConfigTest {
//assertThat( config.getShiroConfigFile(), is( "shiro.ini") );
}
- @Test
+ @Test( timeout = TestUtils.SHORT_TIMEOUT )
public void testDefaultTopologyName() {
GatewayConfig config = new GatewayConfigImpl();
- assertThat( config.getDefaultTopologyName(), is( "sandbox" ) );
+ assertThat( config.getDefaultTopologyName(), is( nullValue() ) );
+
+ ((GatewayConfigImpl)config).set("default.app.topology.name", "test-topo-name" );
+ assertThat( config.getDefaultTopologyName(), is( "test-topo-name" ) );
}
- @Test
+ @Test( timeout = TestUtils.SHORT_TIMEOUT )
public void testDefaultAppRedirectPath() {
GatewayConfig config = new GatewayConfigImpl();
- assertThat( config.getDefaultAppRedirectPath(), is( "/gateway/sandbox" ) );
+ assertThat( config.getDefaultAppRedirectPath(), nullValue() );
+
+ ((GatewayConfigImpl)config).set("default.app.topology.name", "test-topo-name" );
+ assertThat( config.getDefaultAppRedirectPath(), is("/gateway/test-topo-name") );
}
@Test
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.groovy
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.groovy b/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.groovy
deleted file mode 100644
index c292744..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.groovy
+++ /dev/null
@@ -1,61 +0,0 @@
-package org.apache.hadoop.gateway.config.impl
-
-import org.junit.Test
-
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.MatcherAssert.assertThat
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-public class GatewayConfigImplTest {
-
- @Test
- public void testHttpServerSettings() {
- GatewayConfigImpl config = new GatewayConfigImpl();
-
- // Check the defaults.
- assertThat( config.getHttpServerRequestBuffer(), is( 16*1024 ) );
- assertThat( config.getHttpServerRequestHeaderBuffer(), is( 8*1024 ) );
- assertThat( config.getHttpServerResponseBuffer(), is( 32*1024 ) );
- assertThat( config.getHttpServerResponseHeaderBuffer(), is( 8*1024 ) );
-
- assertThat( GatewayConfigImpl.HTTP_SERVER_REQUEST_BUFFER, is( "gateway.httpserver.requestBuffer" ) );
- assertThat( GatewayConfigImpl.HTTP_SERVER_REQUEST_HEADER_BUFFER, is( "gateway.httpserver.requestHeaderBuffer" ) );
- assertThat( GatewayConfigImpl.HTTP_SERVER_RESPONSE_BUFFER, is( "gateway.httpserver.responseBuffer" ) );
- assertThat( GatewayConfigImpl.HTTP_SERVER_RESPONSE_HEADER_BUFFER, is( "gateway.httpserver.responseHeaderBuffer" ) );
-
- config.setInt( GatewayConfigImpl.HTTP_SERVER_REQUEST_BUFFER, 32*1024 );
- assertThat( config.getHttpServerRequestBuffer(), is( 32*1024 ) );
-
- config.setInt( GatewayConfigImpl.HTTP_SERVER_REQUEST_HEADER_BUFFER, 4*1024 );
- assertThat( config.getHttpServerRequestHeaderBuffer(), is( 4*1024 ) );
-
- config.setInt( GatewayConfigImpl.HTTP_SERVER_RESPONSE_BUFFER, 16*1024 );
- assertThat( config.getHttpServerResponseBuffer(), is( 16*1024 ) );
-
- config.setInt( GatewayConfigImpl.HTTP_SERVER_RESPONSE_HEADER_BUFFER, 6*1024 );
- assertThat( config.getHttpServerResponseHeaderBuffer(), is( 6*1024 ) );
-
- // Restore the defaults.
- config.setInt( GatewayConfigImpl.HTTP_SERVER_REQUEST_BUFFER, 16*1024 );
- config.setInt( GatewayConfigImpl.HTTP_SERVER_REQUEST_HEADER_BUFFER, 8*1024 );
- config.setInt( GatewayConfigImpl.HTTP_SERVER_RESPONSE_BUFFER, 32*1024 );
- config.setInt( GatewayConfigImpl.HTTP_SERVER_RESPONSE_HEADER_BUFFER, 8*1024 );
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.java
new file mode 100644
index 0000000..8b94b56
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImplTest.java
@@ -0,0 +1,96 @@
+package org.apache.hadoop.gateway.config.impl;
+
+import org.apache.hadoop.test.TestUtils;
+import org.junit.Test;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+public class GatewayConfigImplTest {
+
+ @Test( timeout = TestUtils.SHORT_TIMEOUT )
+ public void testHttpServerSettings() {
+ GatewayConfigImpl config = new GatewayConfigImpl();
+
+ // Check the defaults.
+ assertThat( config.getHttpServerRequestBuffer(), is( 16*1024 ) );
+ assertThat( config.getHttpServerRequestHeaderBuffer(), is( 8*1024 ) );
+ assertThat( config.getHttpServerResponseBuffer(), is( 32*1024 ) );
+ assertThat( config.getHttpServerResponseHeaderBuffer(), is( 8*1024 ) );
+
+ assertThat( GatewayConfigImpl.HTTP_SERVER_REQUEST_BUFFER, is( "gateway.httpserver.requestBuffer" ) );
+ assertThat( GatewayConfigImpl.HTTP_SERVER_REQUEST_HEADER_BUFFER, is( "gateway.httpserver.requestHeaderBuffer" ) );
+ assertThat( GatewayConfigImpl.HTTP_SERVER_RESPONSE_BUFFER, is( "gateway.httpserver.responseBuffer" ) );
+ assertThat( GatewayConfigImpl.HTTP_SERVER_RESPONSE_HEADER_BUFFER, is( "gateway.httpserver.responseHeaderBuffer" ) );
+
+ config.setInt( GatewayConfigImpl.HTTP_SERVER_REQUEST_BUFFER, 32*1024 );
+ assertThat( config.getHttpServerRequestBuffer(), is( 32*1024 ) );
+
+ config.setInt( GatewayConfigImpl.HTTP_SERVER_REQUEST_HEADER_BUFFER, 4*1024 );
+ assertThat( config.getHttpServerRequestHeaderBuffer(), is( 4*1024 ) );
+
+ config.setInt( GatewayConfigImpl.HTTP_SERVER_RESPONSE_BUFFER, 16*1024 );
+ assertThat( config.getHttpServerResponseBuffer(), is( 16*1024 ) );
+
+ config.setInt( GatewayConfigImpl.HTTP_SERVER_RESPONSE_HEADER_BUFFER, 6*1024 );
+ assertThat( config.getHttpServerResponseHeaderBuffer(), is( 6*1024 ) );
+
+ // Restore the defaults.
+ config.setInt( GatewayConfigImpl.HTTP_SERVER_REQUEST_BUFFER, 16*1024 );
+ config.setInt( GatewayConfigImpl.HTTP_SERVER_REQUEST_HEADER_BUFFER, 8*1024 );
+ config.setInt( GatewayConfigImpl.HTTP_SERVER_RESPONSE_BUFFER, 32*1024 );
+ config.setInt( GatewayConfigImpl.HTTP_SERVER_RESPONSE_HEADER_BUFFER, 8*1024 );
+ }
+
+ @Test( timeout = TestUtils.SHORT_TIMEOUT )
+ public void testGetGatewayDeploymentsBackupVersionLimit() {
+ GatewayConfigImpl config = new GatewayConfigImpl();
+ assertThat( config.getGatewayDeploymentsBackupVersionLimit(), is(5) );
+
+ config.setInt( config.DEPLOYMENTS_BACKUP_VERSION_LIMIT, 3 );
+ assertThat( config.getGatewayDeploymentsBackupVersionLimit(), is(3) );
+
+ config.setInt( config.DEPLOYMENTS_BACKUP_VERSION_LIMIT, -3 );
+ assertThat( config.getGatewayDeploymentsBackupVersionLimit(), is(-1) );
+
+ config.setInt( config.DEPLOYMENTS_BACKUP_VERSION_LIMIT, 0 );
+ assertThat( config.getGatewayDeploymentsBackupVersionLimit(), is(0) );
+ }
+
+ @Test( timeout = TestUtils.SHORT_TIMEOUT )
+ public void testGetGatewayDeploymentsBackupAgeLimit() {
+ GatewayConfigImpl config = new GatewayConfigImpl();
+ assertThat( config.getGatewayDeploymentsBackupAgeLimit(), is(-1L) );
+
+ config.set( config.DEPLOYMENTS_BACKUP_AGE_LIMIT, "1" );
+ assertThat( config.getGatewayDeploymentsBackupAgeLimit(), is(86400000L) );
+
+ config.set( config.DEPLOYMENTS_BACKUP_AGE_LIMIT, "2" );
+ assertThat( config.getGatewayDeploymentsBackupAgeLimit(), is(86400000L*2L) );
+
+ config.set( config.DEPLOYMENTS_BACKUP_AGE_LIMIT, "0" );
+ assertThat( config.getGatewayDeploymentsBackupAgeLimit(), is(0L) );
+
+ config.set( config.DEPLOYMENTS_BACKUP_AGE_LIMIT, "X" );
+ assertThat( config.getGatewayDeploymentsBackupAgeLimit(), is(-1L) );
+ }
+
+
+}
[3/5] knox git commit: [KNOX-670] - Knox should be able to sost
simple web apps
Posted by km...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryTest.java
index 8669b2e..129f0fb 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryTest.java
@@ -17,55 +17,308 @@
*/
package org.apache.hadoop.gateway.deploy;
+import java.io.IOException;
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.TransformerException;
+
import org.apache.hadoop.gateway.config.GatewayConfig;
import org.apache.hadoop.gateway.config.impl.GatewayConfigImpl;
+import org.apache.hadoop.gateway.topology.Application;
+import org.apache.hadoop.gateway.topology.Service;
import org.apache.hadoop.gateway.topology.Topology;
-import org.jboss.shrinkwrap.api.spec.WebArchive;
+import org.apache.hadoop.test.TestUtils;
+import org.jboss.shrinkwrap.api.spec.EnterpriseArchive;
import org.junit.Test;
import org.w3c.dom.Document;
-import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import java.io.IOException;
-import java.io.InputStream;
-
+import static junit.framework.TestCase.fail;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.IsEqual.equalTo;
import static org.hamcrest.xml.HasXPath.hasXPath;
public class DeploymentFactoryTest {
- @Test
- public void testEmptyTopology() throws IOException, SAXException, ParserConfigurationException {
+ @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
+ public void testEmptyTopology() throws IOException, SAXException, ParserConfigurationException, TransformerException {
GatewayConfig config = new GatewayConfigImpl();
Topology topology = new Topology();
- topology.setName( "test-cluster" );
-
- WebArchive war = DeploymentFactory.createDeployment( config, topology );
- //File dir = new File( System.getProperty( "user.dir" ) );
- //File file = war.as( ExplodedExporter.class ).exportExploded( dir, "test-cluster.war" );
-
- Document wad = parse( war.get( "WEB-INF/web.xml" ).getAsset().openStream() );
- assertThat( wad, hasXPath( "/web-app/servlet/servlet-name", equalTo( "test-cluster" ) ) );
- assertThat( wad, hasXPath( "/web-app/servlet/servlet-class", equalTo( "org.apache.hadoop.gateway.GatewayServlet" ) ) );
- assertThat( wad, hasXPath( "/web-app/servlet/init-param/param-name", equalTo( "gatewayDescriptorLocation" ) ) );
- assertThat( wad, hasXPath( "/web-app/servlet/init-param/param-value", equalTo( "gateway.xml" ) ) );
- assertThat( wad, hasXPath( "/web-app/servlet-mapping/servlet-name", equalTo( "test-cluster" ) ) );
- assertThat( wad, hasXPath( "/web-app/servlet-mapping/url-pattern", equalTo( "/*" ) ) );
-
- Document gateway = parse( war.get( "WEB-INF/gateway.xml" ).getAsset().openStream() );
- assertThat( gateway, hasXPath( "/gateway" ) );
+ topology.setName( "test-topology" );
+
+ EnterpriseArchive archive = DeploymentFactory.createDeployment( config, topology );
+
+ Document xml = TestUtils.parseXml( archive.get( "/META-INF/topology.xml" ).getAsset().openStream() );
+ //TestUtils.dumpXml( xml );
+ assertThat( xml, hasXPath( "/topology/gateway" ) );
+ assertThat( xml, hasXPath( "/topology/name", equalTo( "test-topology" ) ) );
}
- private Document parse( InputStream stream ) throws IOException, SAXException, ParserConfigurationException {
- DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
- DocumentBuilder builder = factory.newDocumentBuilder();
- InputSource source = new InputSource( stream );
- return builder.parse( source );
+ @Test( timeout = TestUtils.SHORT_TIMEOUT )
+ public void test_validateNoAppsWithRootUrlsInServicesTopology() {
+ DeploymentFactory.validateNoAppsWithRootUrlsInServicesTopology( null );
+
+ Topology topology = new Topology();
+ topology.setName( "test-topology" );
+ DeploymentFactory.validateNoAppsWithRootUrlsInServicesTopology( topology );
+
+ Service service;
+ Application application;
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ service = new Service();
+ service.setName( "test-service" );
+ service.setRole( "test-service" );
+ topology.addService( service );
+ application = new Application();
+ application.setName( "test-application" );
+ topology.addApplication( application );
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ service = new Service();
+ service.setName( "test-service" );
+ service.setRole( "test-service" );
+ topology.addService( service );
+ application = new Application();
+ application.setName( "test-application" );
+ application.addUrl( "" );
+ topology.addApplication( application );
+ try {
+ DeploymentFactory.validateNoAppsWithRootUrlsInServicesTopology( topology );
+ fail( "Expected DeploymentException" );
+ } catch ( DeploymentException e ) {
+ // Expected.
+ }
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ service = new Service();
+ service.setName( "test-service" );
+ service.setRole( "test-service" );
+ topology.addService( service );
+ application = new Application();
+ application.setName( "test-application" );
+ application.addUrl( "/" );
+ topology.addApplication( application );
+ try {
+ DeploymentFactory.validateNoAppsWithRootUrlsInServicesTopology( topology );
+ fail( "Expected DeploymentException" );
+ } catch ( DeploymentException e ) {
+ // Expected.
+ }
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ service = new Service();
+ service.setName( "test-service" );
+ service.setRole( "test-service" );
+ topology.addService( service );
+ application = new Application();
+ application.setName( "test-application" );
+ application.addUrl( "/" );
+ topology.addApplication( application );
+ try {
+ DeploymentFactory.validateNoAppsWithRootUrlsInServicesTopology( topology );
+ fail( "Expected DeploymentException" );
+ } catch ( DeploymentException e ) {
+ // Expected.
+ }
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ service = new Service();
+ service.setName( "test-service" );
+ service.setRole( "test-service" );
+ topology.addService( service );
+ application = new Application();
+ application.setName( "test-application" );
+ application.addUrl( "/test-application" );
+ application.addUrl( "/" );
+ topology.addApplication( application );
+ try {
+ DeploymentFactory.validateNoAppsWithRootUrlsInServicesTopology( topology );
+ fail( "Expected DeploymentException" );
+ } catch ( DeploymentException e ) {
+ // Expected.
+ }
+
+ }
+
+ @Test( timeout = TestUtils.SHORT_TIMEOUT )
+ public void test_validateNoAppsWithDuplicateUrlsInTopology() {
+ DeploymentFactory.validateNoAppsWithDuplicateUrlsInTopology( null );
+
+ Topology topology = new Topology();
+ topology.setName( "test-topology" );
+ DeploymentFactory.validateNoAppsWithDuplicateUrlsInTopology( topology );
+
+ Application application;
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ application = new Application();
+ application.setName( "test-application-1" );
+ topology.addApplication( application );
+ DeploymentFactory.validateNoAppsWithDuplicateUrlsInTopology( topology );
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ application = new Application();
+ application.setName( "test-application-1" );
+ topology.addApplication( application );
+ application = new Application();
+ application.setName( "test-application-2" );
+ topology.addApplication( application );
+ DeploymentFactory.validateNoAppsWithDuplicateUrlsInTopology( topology );
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ application = new Application();
+ application.setName( "test-application-1" );
+ topology.addApplication( application );
+ application = new Application();
+ application.setName( "test-application-2" );
+ application.addUrl( "/test-application-2" );
+ topology.addApplication( application );
+ DeploymentFactory.validateNoAppsWithDuplicateUrlsInTopology( topology );
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ application = new Application();
+ application.setName( "test-application-1" );
+ topology.addApplication( application );
+ application = new Application();
+ application.setName( "test-application-2" );
+ application.addUrl( "/" );
+ topology.addApplication( application );
+ DeploymentFactory.validateNoAppsWithDuplicateUrlsInTopology( topology );
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ application = new Application();
+ application.setName( "test-application-1" );
+ topology.addApplication( application );
+ application = new Application();
+ application.setName( "test-application-2" );
+ application.addUrl( "/" );
+ topology.addApplication( application );
+ DeploymentFactory.validateNoAppsWithDuplicateUrlsInTopology( topology );
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ application = new Application();
+ application.setName( "test-application-1" );
+ application.addUrl( "/test-application-1" );
+ topology.addApplication( application );
+ application = new Application();
+ application.setName( "test-application-2" );
+ application.addUrl( "/test-application-2" );
+ topology.addApplication( application );
+ DeploymentFactory.validateNoAppsWithDuplicateUrlsInTopology( topology );
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ application = new Application();
+ application.setName( "test-application-1" );
+ application.addUrl( "/test-application-dup" );
+ topology.addApplication( application );
+ application = new Application();
+ application.setName( "test-application-2" );
+ application.addUrl( "/test-application-dup" );
+ topology.addApplication( application );
+ try {
+ DeploymentFactory.validateNoAppsWithDuplicateUrlsInTopology( topology );
+ fail( "Expected DeploymentException" );
+ } catch ( DeploymentException e ) {
+ // Expected.
+ }
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ application = new Application();
+ application.setName( "test-application-1" );
+ application.addUrl( "/" );
+ topology.addApplication( application );
+ application = new Application();
+ application.setName( "test-application-2" );
+ application.addUrl( "/" );
+ topology.addApplication( application );
+ try {
+ DeploymentFactory.validateNoAppsWithDuplicateUrlsInTopology( topology );
+ fail( "Expected DeploymentException" );
+ } catch ( DeploymentException e ) {
+ // Expected.
+ }
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ application = new Application();
+ application.setName( "test-application-1" );
+ application.addUrl( "" );
+ topology.addApplication( application );
+ application = new Application();
+ application.setName( "test-application-2" );
+ application.addUrl( "/" );
+ topology.addApplication( application );
+ try {
+ DeploymentFactory.validateNoAppsWithDuplicateUrlsInTopology( topology );
+ fail( "Expected DeploymentException" );
+ } catch ( DeploymentException e ) {
+ // Expected.
+ }
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ application = new Application();
+ application.setName( "test-application-1" );
+ topology.addApplication( application );
+ application = new Application();
+ application.setName( "test-application-2" );
+ application.addUrl( "/test-application-1" );
+ topology.addApplication( application );
+ try {
+ DeploymentFactory.validateNoAppsWithDuplicateUrlsInTopology( topology );
+ fail( "Expected DeploymentException" );
+ } catch ( DeploymentException e ) {
+ // Expected.
+ }
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ application = new Application();
+ application.setName( "test-application-1" );
+ topology.addApplication( application );
+ application = new Application();
+ application.setName( "test-application-1" );
+ topology.addApplication( application );
+ try {
+ DeploymentFactory.validateNoAppsWithDuplicateUrlsInTopology( topology );
+ fail( "Expected DeploymentException" );
+ } catch ( DeploymentException e ) {
+ // Expected.
+ }
+
+ topology = new Topology();
+ topology.setName( "test-topology" );
+ application = new Application();
+ application.setName( "test-application-1" );
+ application.addUrl( "/test-application-1" );
+ application.addUrl( "/test-application-3" );
+ topology.addApplication( application );
+ application = new Application();
+ application.setName( "test-application-2" );
+ application.addUrl( "/test-application-2" );
+ application.addUrl( "/test-application-3" );
+ topology.addApplication( application );
+ try {
+ DeploymentFactory.validateNoAppsWithDuplicateUrlsInTopology( topology );
+ fail( "Expected DeploymentException" );
+ } catch ( DeploymentException e ) {
+ // Expected.
+ }
+
}
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/test/java/org/apache/hadoop/gateway/jetty/JettyPathMapTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/jetty/JettyPathMapTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/jetty/JettyPathMapTest.java
index 749d2d6..6942234 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/jetty/JettyPathMapTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/jetty/JettyPathMapTest.java
@@ -17,33 +17,51 @@
*/
package org.apache.hadoop.gateway.jetty;
-import org.apache.hadoop.test.category.UnitTests;
-import org.apache.hadoop.test.category.FastTests;
import org.eclipse.jetty.http.PathMap;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
/**
*
*/
-@Category( { UnitTests.class, FastTests.class } )
+//@Category( { UnitTests.class, FastTests.class } )
public class JettyPathMapTest {
- @Ignore( "This doesn't work like I expected." )
- @Test
+ //@Ignore( "This doesn't work like I expected." )
+ //@Test
public void testPathMatching() {
- PathMap map = new PathMap();
- map.put( "/webhdfs", "/webhdfs" );
- map.put( "/webhdfs/dfshealth.jsp", "/webhdfs/dfshealth.jsp" );
- map.put( "/webhdfs/*", "/webhdfs/*" );
-
- assertThat( (String)map.match( "/webhdfs" ), equalTo( "/webhdfs" ) );
- assertThat( (String)map.match( "/webhdfs/dfshealth.jsp" ), equalTo( "/webhdfs/dfshealth.jsp" ) );
- assertThat( (String)map.match( "/webhdfs/v1" ), equalTo( "/webhdfs/*" ) );
+ PathMap map;
+
+ map = new PathMap();
+ map.put( "/path", "/path" );
+ assertThat( (String)map.match("/path"), is("/path") );
+
+ map = new PathMap();
+ map.put( "/path", "/path" );
+ map.put( "/path/", "/path/" );
+ assertThat( (String)map.match("/path"), is("/path") );
+ assertThat( (String)map.match("/path/"), is("/path/") );
+
+ map = new PathMap();
+ map.put( "/path/*", "/path/*" );
+ map.put( "/path", "/path" );
+ map.put( "/path/", "/path/" );
+ assertThat( (String)map.match("/path"), is("/path") );
+ assertThat( (String)map.match("/path/"), is("/path/") );
+ assertThat( (String)map.match("/path/sub"), is("/path/*") );
+
+ map = new PathMap();
+ map.put( "/path", "/path" );
+ map.put( "/path/", "/path/" );
+ map.put( "/path/*", "/path/*" );
+ assertThat( (String)map.match( "/path/sub" ), is("/path/*") );
+
+ // Here the addition of the * path "overwrites" the exact matches.
+ // Above this worked if the /path and /path/ were added after /path/*.
+ assertThat( (String)map.match("/path"), is("/path") );
+ assertThat( (String)map.match("/path/"), is("/path/") );
+
}
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest.java
new file mode 100644
index 0000000..3918b53
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.validation;
+
+import java.net.URL;
+
+import org.apache.hadoop.test.TestUtils;
+import org.junit.Test;
+
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.assertThat;
+
+public class TopologyValidatorTest {
+
+ @Test
+ public void testValidateApplications() throws Exception {
+ URL url;
+ TopologyValidator validator;
+
+ url = TestUtils.getResourceUrl( TopologyValidatorTest.class, "topology-valid-complete.xml" );
+ validator = new TopologyValidator( url );
+ assertThat( validator.getErrorString(), validator.validateTopology(), is( true ) );
+
+ url = TestUtils.getResourceUrl( TopologyValidatorTest.class, "topology-valid.xml" );
+ validator = new TopologyValidator( url );
+ assertThat( validator.getErrorString(), validator.validateTopology(), is( true ) );
+
+ url = TestUtils.getResourceUrl( TopologyValidatorTest.class, "topology-valid-with-name.xml" );
+ validator = new TopologyValidator( url );
+ assertThat( validator.getErrorString(), validator.validateTopology(), is( true ) );
+
+ }
+
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java
index 5981479..bb2c85e 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest.java
@@ -19,11 +19,13 @@ package org.apache.hadoop.gateway.topology.xml;
import org.apache.commons.digester3.Digester;
import org.apache.commons.digester3.binder.DigesterLoader;
+import org.apache.hadoop.gateway.topology.Application;
import org.apache.hadoop.gateway.topology.Provider;
import org.apache.hadoop.gateway.topology.Service;
import org.apache.hadoop.gateway.topology.Topology;
import org.apache.hadoop.gateway.topology.Version;
import org.apache.hadoop.gateway.topology.builder.TopologyBuilder;
+import org.apache.hadoop.test.TestUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@@ -236,4 +238,25 @@ public class TopologyRulesModuleTest {
assertThat( service.getParams(), hasEntry( is( "test-service-param-name-1" ), is( "test-service-param-value-1" ) ) );
assertThat( service.getParams(), hasEntry( is( "test-service-param-name-2" ), is( "test-service-param-value-2" ) ) );
}
+
+ @Test
+ public void testParseTopologyWithApplication() throws IOException, SAXException {
+ Digester digester = loader.newDigester();
+ String name = "topology-with-application.xml";
+ URL url = TestUtils.getResourceUrl( TopologyRulesModuleTest.class, name );
+ assertThat( "Failed to find URL for resource " + name, url, notNullValue() );
+ File file = new File( url.getFile() );
+ TopologyBuilder topologyBuilder = digester.parse( url );
+ Topology topology = topologyBuilder.build();
+ assertThat( "Failed to parse resource " + name, topology, notNullValue() );
+ topology.setTimestamp( file.lastModified() );
+
+ Application app = topology.getApplications().iterator().next();
+ assertThat( "Failed to find application", app, notNullValue() );
+ assertThat( app.getName(), is("test-app-name") );
+ assertThat( app.getUrl(), is("test-app-path") );
+ assertThat( app.getUrls().get( 0 ), is("test-app-path") );
+ assertThat( app.getParams().get( "test-param-name" ), is( "test-param-value" ) );
+ }
+
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest/topology-valid-complete.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest/topology-valid-complete.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest/topology-valid-complete.xml
new file mode 100644
index 0000000..ec38a81
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest/topology-valid-complete.xml
@@ -0,0 +1,40 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<topology>
+ <name>topology-name</name>
+ <gateway>
+ <provider>
+ <role>test-provider-role</role>
+ <name>test-provider-name</name>
+ <enabled>true</enabled>
+ </provider>
+ </gateway>
+ <service>
+ <role>test-service-role</role>
+ </service>
+ <service>
+ <role>test-service-role</role>
+ <url>test-service-url</url>
+ </service>
+ <application>
+ <name>test-app-name</name>
+ </application>
+ <application>
+ <name>test-app-name</name>
+ <url>test-app-url</url>
+ </application>
+</topology>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest/topology-valid-with-name.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest/topology-valid-with-name.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest/topology-valid-with-name.xml
new file mode 100644
index 0000000..7d5a697
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest/topology-valid-with-name.xml
@@ -0,0 +1,19 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<topology>
+ <name>topology-name</name>
+</topology>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest/topology-valid.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest/topology-valid.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest/topology-valid.xml
new file mode 100644
index 0000000..057bcbb
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/validation/TopologyValidatorTest/topology-valid.xml
@@ -0,0 +1,25 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<topology>
+ <application>
+ <name>test-application-name</name>
+ </application>
+ <application>
+ <name>test-application-name</name>
+ <url>test-application-path</url>
+ </application>
+</topology>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest/topology-with-application.xml
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest/topology-with-application.xml b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest/topology-with-application.xml
new file mode 100644
index 0000000..e6143d5
--- /dev/null
+++ b/gateway-server/src/test/resources/org/apache/hadoop/gateway/topology/xml/TopologyRulesModuleTest/topology-with-application.xml
@@ -0,0 +1,23 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<topology>
+ <application>
+ <name>test-app-name</name>
+ <url>test-app-path</url>
+ <param name="test-param-name" value="test-param-value"/>
+ </application>
+</topology>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-service-admin/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-service-admin/pom.xml b/gateway-service-admin/pom.xml
index d63ff11..4a043fe 100644
--- a/gateway-service-admin/pom.xml
+++ b/gateway-service-admin/pom.xml
@@ -64,5 +64,10 @@
<artifactId>easymock</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.xmlmatchers</groupId>
+ <artifactId>xml-matchers</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
</project>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-service-admin/src/test/java/org/apache/hadoop/gateway/service/admin/TopologyMarshallerTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-admin/src/test/java/org/apache/hadoop/gateway/service/admin/TopologyMarshallerTest.java b/gateway-service-admin/src/test/java/org/apache/hadoop/gateway/service/admin/TopologyMarshallerTest.java
new file mode 100644
index 0000000..ba62cf2
--- /dev/null
+++ b/gateway-service-admin/src/test/java/org/apache/hadoop/gateway/service/admin/TopologyMarshallerTest.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.service.admin;
+
+import java.io.StringWriter;
+import java.util.HashMap;
+import java.util.Map;
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.Marshaller;
+
+import org.apache.hadoop.gateway.topology.Application;
+import org.apache.hadoop.gateway.topology.Topology;
+import org.junit.Test;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.core.Is.is;
+import static org.xmlmatchers.transform.XmlConverters.the;
+import static org.xmlmatchers.xpath.HasXPath.hasXPath;
+import static org.xmlmatchers.xpath.XpathReturnType.returningAString;
+
+public class TopologyMarshallerTest {
+
+ @Test
+ public void testTopologyMarshalling() throws Exception {
+ Topology topology = new Topology();
+ Application app = new Application();
+ app.setName( "test-app-name" );
+ topology.addApplication( app );
+
+ StringWriter writer = new StringWriter();
+ String xml;
+
+ Map<String,Object> properties = new HashMap<String,Object>(2);
+ properties.put( "eclipselink-oxm-xml", "org/apache/hadoop/gateway/topology/topology_binding-xml.xml" );
+ properties.put( "eclipselink.media-type", "application/xml" );
+ JAXBContext jaxbContext = JAXBContext.newInstance( Topology.class.getPackage().getName(), Topology.class.getClassLoader() , properties );
+ Marshaller marshaller = jaxbContext.createMarshaller();
+ marshaller.setProperty( Marshaller.JAXB_FORMATTED_OUTPUT, true );
+ marshaller.marshal( topology, writer );
+ writer.close();
+ xml = writer.toString();
+ assertThat( the( xml ), hasXPath( "/topology/application/name", returningAString(), is("test-app-name") ) );
+ }
+
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-service-test/src/main/java/org/apache/hadoop/gateway/service/test/ServiceTestResource.java
----------------------------------------------------------------------
diff --git a/gateway-service-test/src/main/java/org/apache/hadoop/gateway/service/test/ServiceTestResource.java b/gateway-service-test/src/main/java/org/apache/hadoop/gateway/service/test/ServiceTestResource.java
index 0d26301..5ed4a21 100644
--- a/gateway-service-test/src/main/java/org/apache/hadoop/gateway/service/test/ServiceTestResource.java
+++ b/gateway-service-test/src/main/java/org/apache/hadoop/gateway/service/test/ServiceTestResource.java
@@ -17,6 +17,26 @@
*/
package org.apache.hadoop.gateway.service.test;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import javax.net.ssl.SSLContext;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlElementWrapper;
+import javax.xml.bind.annotation.XmlRootElement;
+
import org.apache.hadoop.gateway.config.GatewayConfig;
import org.apache.hadoop.gateway.services.GatewayServices;
import org.apache.hadoop.gateway.services.topology.TopologyService;
@@ -31,32 +51,8 @@ import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.glassfish.jersey.internal.util.Base64;
-import javax.net.ssl.SSLContext;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.GET;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.xml.bind.annotation.XmlElement;
-import javax.xml.bind.annotation.XmlElementWrapper;
-import javax.xml.bind.annotation.XmlRootElement;
-import javax.xml.bind.annotation.XmlAccessType;
-import javax.xml.bind.annotation.XmlAccessorType;
-
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
import static javax.ws.rs.core.MediaType.APPLICATION_XML;
-import static javax.ws.rs.core.Response.Status.NOT_FOUND;
-import static javax.ws.rs.core.Response.ok;
-import static javax.ws.rs.core.Response.status;
@Path( "/service-test" )
public class ServiceTestResource {
@@ -297,7 +293,7 @@ public class ServiceTestResource {
this.serviceName = s.getRole();
}
- public ServiceTest(Service s, String requestURL) {
+ public ServiceTest( Service s, String requestURL) {
this.serviceName = s.getRole();
this.requestURL = requestURL;
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-spi/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-spi/pom.xml b/gateway-spi/pom.xml
index 9454536..6c55072 100644
--- a/gateway-spi/pom.xml
+++ b/gateway-spi/pom.xml
@@ -59,10 +59,16 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
+ <!--
<dependency>
<groupId>org.eclipse.jetty.orbit</groupId>
<artifactId>javax.servlet</artifactId>
</dependency>
+ -->
+ <dependency>
+ <groupId>javax.servlet</groupId>
+ <artifactId>javax.servlet-api</artifactId>
+ </dependency>
<dependency>
<groupId>org.jboss.shrinkwrap</groupId>
<artifactId>shrinkwrap-api</artifactId>
@@ -138,6 +144,12 @@
<artifactId>hamcrest-library</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.velocity</groupId>
+ <artifactId>velocity</artifactId>
+ <scope>test</scope>
+ </dependency>
+
</dependencies>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
index 7e30b72..475649d 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/config/GatewayConfig.java
@@ -59,6 +59,12 @@ public interface GatewayConfig {
*/
String getGatewayServicesDir();
+ /**
+ * The location of the gateway applications's root directory
+ * @return The location of the gateway applications top level directory.
+ */
+ String getGatewayApplicationsDir();
+
String getHadoopConfDir();
String getGatewayHost();
@@ -119,4 +125,8 @@ public interface GatewayConfig {
int getHttpServerResponseHeaderBuffer();
+ int getGatewayDeploymentsBackupVersionLimit();
+
+ long getGatewayDeploymentsBackupAgeLimit();
+
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributorBase.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributorBase.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributorBase.java
index 8206b88..4508ad8 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributorBase.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/deploy/ServiceDeploymentContributorBase.java
@@ -74,11 +74,14 @@ public abstract class ServiceDeploymentContributorBase extends DeploymentContrib
}
}
- protected void addIdentityAssertionFilter(DeploymentContext context, Service service, ResourceDescriptor resource) {
- context.contributeFilter( service, resource, "identity-assertion", null, null );
+ protected void addIdentityAssertionFilter( DeploymentContext context, Service service, ResourceDescriptor resource) {
+ if( topologyContainsProviderType( context, "authentication" ) ||
+ topologyContainsProviderType( context, "federation" ) ) {
+ context.contributeFilter( service, resource, "identity-assertion", null, null );
+ }
}
- protected void addAuthorizationFilter(DeploymentContext context, Service service, ResourceDescriptor resource) {
+ protected void addAuthorizationFilter( DeploymentContext context, Service service, ResourceDescriptor resource) {
if (topologyContainsProviderType(context, "authorization")) {
context.contributeFilter( service, resource, "authorization", null, null );
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Application.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Application.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Application.java
new file mode 100644
index 0000000..1ae8962
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Application.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology;
+
+public class Application extends Service {
+
+ public String getRole() {
+ return getName();
+ }
+
+ public void setRole( String role ) {
+ setName( role );
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Routable.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Routable.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Routable.java
new file mode 100644
index 0000000..f1b1c49
--- /dev/null
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Routable.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.topology;
+
+public class Routable {
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Service.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Service.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Service.java
index 955f72f..c698c4c 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Service.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Service.java
@@ -81,7 +81,7 @@ public class Service {
return params;
}
- private Collection<Param> getParamsList(){
+ public Collection<Param> getParamsList(){
ArrayList<Param> paramList = new ArrayList<Param>();
@@ -95,6 +95,14 @@ public class Service {
return paramList;
}
+ public void setParamsList( Collection<Param> params ) {
+ if( params != null ) {
+ for( Param param : params ) {
+ addParam( param );
+ }
+ }
+ }
+
public void setParams(Map<String, String> params) {
this.params = params;
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Topology.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Topology.java b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Topology.java
index 7be8301..87f73df 100644
--- a/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Topology.java
+++ b/gateway-spi/src/main/java/org/apache/hadoop/gateway/topology/Topology.java
@@ -35,8 +35,9 @@ public class Topology {
public List<Provider> providerList = new ArrayList<Provider>();
private Map<String,Map<String,Provider>> providerMap = new HashMap<String,Map<String,Provider>>();
public List<Service> services = new ArrayList<Service>();
-
private MultiKeyMap serviceMap;
+ private List<Application> applications = new ArrayList<Application>();
+ private Map<String,Application> applicationMap = new HashMap<String,Application>();
public Topology() {
serviceMap = MultiKeyMap.decorate(new HashedMap());
@@ -70,7 +71,7 @@ public class Topology {
return services;
}
- public Service getService(String role, String name, Version version) {
+ public Service getService( String role, String name, Version version) {
return (Service)serviceMap.get(role, name, version);
}
@@ -79,6 +80,36 @@ public class Topology {
serviceMap.put(service.getRole(), service.getName(), service.getVersion(), service);
}
+ public Collection<Application> getApplications() {
+ return applications;
+ }
+
+ private static String fixApplicationUrl( String url ) {
+ if( url == null ) {
+ url = "/";
+ }
+ if( !url.startsWith( "/" ) ) {
+ url = "/" + url;
+ }
+ return url;
+ }
+
+ public Application getApplication(String url) {
+ return applicationMap.get( fixApplicationUrl( url ) );
+ }
+
+ public void addApplication( Application application ) {
+ applications.add( application );
+ List<String> urls = application.getUrls();
+ if( urls == null || urls.isEmpty() ) {
+ applicationMap.put( fixApplicationUrl( application.getName() ), application );
+ } else {
+ for( String url : application.getUrls() ) {
+ applicationMap.put( fixApplicationUrl( url ), application );
+ }
+ }
+ }
+
public Collection<Provider> getProviders() {
return providerList;
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/jaxb.index
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/jaxb.index b/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/jaxb.index
index 78e716f..3baf0c3 100644
--- a/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/jaxb.index
+++ b/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/jaxb.index
@@ -18,4 +18,5 @@
Topology
Param
Service
+Application
Provider
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-json.xml
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-json.xml b/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-json.xml
index fe8613d..7fb301b 100644
--- a/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-json.xml
+++ b/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-json.xml
@@ -24,12 +24,13 @@ or more contributor license agreements. See the NOTICE file
element-form-default="QUALIFIED"/>
<java-types>
<java-type name="Topology" xml-accessor-type="NONE">
- <xml-type prop-order="name providers services timestamp"/>
+ <xml-type prop-order="name providers services applications timestamp"/>
<xml-root-element/>
- <java-attributes prop-order="providers services">
+ <java-attributes prop-order="providers services applications">
+ <xml-element java-attribute="name" name="name"/>
<xml-element java-attribute="providers" name="providers"/>
<xml-element java-attribute="services" name="services"/>
- <xml-element java-attribute="name" name="name"/>
+ <xml-element java-attribute="applications" name="applications"/>
<xml-element java-attribute="timestamp" name="timestamp"/>
</java-attributes>
</java-type>
@@ -46,6 +47,7 @@ or more contributor license agreements. See the NOTICE file
</java-type>
<java-type name="Service" xml-accessor-type="NONE">
<java-attributes>
+ <xml-element java-attribute="name" name="name"/>
<xml-element java-attribute="role" name="role"/>
<xml-element java-attribute="urls" name="urls"/>
<xml-variable-node java-attribute="paramsList" java-variable-attribute="name">
@@ -53,6 +55,7 @@ or more contributor license agreements. See the NOTICE file
</xml-variable-node>
</java-attributes>
</java-type>
+ <java-type name="Application" xml-accessor-type="NONE"/>
<java-type name="Param" xml-accessor-type="NONE">
<java-attributes>
<xml-value java-attribute="value"/>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-xml.xml
----------------------------------------------------------------------
diff --git a/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-xml.xml b/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-xml.xml
index 6f397c5..50d8d58 100644
--- a/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-xml.xml
+++ b/gateway-spi/src/main/resources/org/apache/hadoop/gateway/topology/topology_binding-xml.xml
@@ -24,14 +24,16 @@ or more contributor license agreements. See the NOTICE file
element-form-default="QUALIFIED"/>
<java-types>
<java-type name="Topology" xml-accessor-type="NONE">
- <xml-type prop-order="providers services"/>
+ <xml-type prop-order="name providers services applications"/>
<xml-root-element/>
- <java-attributes prop-order="providers services">
+ <java-attributes>
+ <xml-element java-attribute="name" name="name"/>
<xml-elements java-attribute="providers">
<xml-element name="provider"/>
<xml-element-wrapper name="gateway"/>
</xml-elements>
<xml-element java-attribute="services" name="service"/>
+ <xml-element java-attribute="applications" name="application"/>
</java-attributes>
</java-type>
<java-type name="Provider" xml-accessor-type="NONE">
@@ -44,11 +46,13 @@ or more contributor license agreements. See the NOTICE file
</java-type>
<java-type name="Service" xml-accessor-type="NONE">
<java-attributes>
+ <xml-element java-attribute="name" name="name"/>
<xml-element java-attribute="role" name="role"/>
<xml-element java-attribute="urls" name="url"/>
<xml-element java-attribute="paramsList" name="param"/>
</java-attributes>
</java-type>
+ <java-type name="Application" xml-accessor-type="NONE"/>
<java-type name="Param" xml-accessor-type="NONE">
<java-attributes>
<xml-element java-attribute="name"/>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test-release/webhdfs-kerb-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-kerb-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java b/gateway-test-release/webhdfs-kerb-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
index 779eb2d..950952c 100644
--- a/gateway-test-release/webhdfs-kerb-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
+++ b/gateway-test-release/webhdfs-kerb-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
@@ -248,6 +248,11 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
}
@Override
+ public String getGatewayApplicationsDir() {
+ return gatewayHomeDir + "/conf/applications";
+ }
+
+ @Override
public boolean isXForwardedEnabled() {
return xForwardedEnabled;
}
@@ -294,4 +299,14 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
return 8*1024;
}
+ @Override
+ public int getGatewayDeploymentsBackupVersionLimit() {
+ return Integer.MAX_VALUE;
+ }
+
+ @Override
+ public long getGatewayDeploymentsBackupAgeLimit() {
+ return Long.MAX_VALUE;
+ }
+
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test-utils/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-test-utils/pom.xml b/gateway-test-utils/pom.xml
index eaf6f8f..4dc2790 100644
--- a/gateway-test-utils/pom.xml
+++ b/gateway-test-utils/pom.xml
@@ -91,11 +91,13 @@
<artifactId>jetty-servlet</artifactId>
<scope>provided</scope>
</dependency>
+ <!--
<dependency>
<groupId>org.eclipse.jetty.orbit</groupId>
<artifactId>javax.servlet</artifactId>
<scope>provided</scope>
</dependency>
+ -->
<dependency>
<groupId>org.apache.httpcomponents</groupId>
@@ -121,6 +123,12 @@
<scope>provided</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.velocity</groupId>
+ <artifactId>velocity</artifactId>
+ <scope>provided</scope>
+ </dependency>
+
</dependencies>
</project>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test-utils/src/main/java/org/apache/hadoop/test/TestUtils.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/TestUtils.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/TestUtils.java
index 2a78364..b6dfe54 100644
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/TestUtils.java
+++ b/gateway-test-utils/src/main/java/org/apache/hadoop/test/TestUtils.java
@@ -17,22 +17,6 @@
*/
package org.apache.hadoop.test;
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
-import org.junit.Test;
-import org.w3c.dom.Document;
-import org.xml.sax.InputSource;
-import org.xml.sax.SAXException;
-
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.transform.OutputKeys;
-import javax.xml.transform.Transformer;
-import javax.xml.transform.TransformerException;
-import javax.xml.transform.TransformerFactory;
-import javax.xml.transform.dom.DOMSource;
-import javax.xml.transform.stream.StreamResult;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
@@ -42,12 +26,38 @@ import java.io.Reader;
import java.io.StringWriter;
import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
+import java.net.ServerSocket;
import java.net.Socket;
import java.net.URL;
+import java.util.Properties;
import java.util.UUID;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.OutputKeys;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerException;
+import javax.xml.transform.TransformerFactory;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.transform.stream.StreamResult;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.velocity.Template;
+import org.apache.velocity.VelocityContext;
+import org.apache.velocity.app.VelocityEngine;
+import org.apache.velocity.runtime.RuntimeConstants;
+import org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader;
+import org.w3c.dom.Document;
+import org.xml.sax.InputSource;
+import org.xml.sax.SAXException;
public class TestUtils {
+ public static final long SHORT_TIMEOUT = 1000L;
+ public static final long MEDIUM_TIMEOUT = 20 * 1000L;
+ public static final long LONG_TIMEOUT = 60 * 1000L;
+
public static String getResourceName( Class clazz, String name ) {
name = clazz.getName().replaceAll( "\\.", "/" ) + "/" + name;
return name;
@@ -120,12 +130,16 @@ public class TestUtils {
public static void LOG_ENTER() {
StackTraceElement caller = Thread.currentThread().getStackTrace()[2];
+ System.out.flush();
System.out.println( String.format( "Running %s#%s", caller.getClassName(), caller.getMethodName() ) );
+ System.out.flush();
}
public static void LOG_EXIT() {
StackTraceElement caller = Thread.currentThread().getStackTrace()[2];
+ System.out.flush();
System.out.println( String.format( "Exiting %s#%s", caller.getClassName(), caller.getMethodName() ) );
+ System.out.flush();
}
public static void awaitPortOpen( InetSocketAddress address, int timeout, int delay ) throws InterruptedException {
@@ -166,4 +180,46 @@ public class TestUtils {
throw new IllegalStateException( "Timed out " + timeout + " waiting for URL " + url );
}
+ public static String merge( String resource, Properties properties ) {
+ ClasspathResourceLoader loader = new ClasspathResourceLoader();
+ loader.getResourceStream( resource );
+
+ VelocityEngine engine = new VelocityEngine();
+ Properties config = new Properties();
+ config.setProperty( RuntimeConstants.RUNTIME_LOG_LOGSYSTEM_CLASS, "org.apache.velocity.runtime.log.NullLogSystem" );
+ config.setProperty( RuntimeConstants.RESOURCE_LOADER, "classpath" );
+ config.setProperty( "classpath.resource.loader.class", ClasspathResourceLoader.class.getName() );
+ engine.init( config );
+
+ VelocityContext context = new VelocityContext( properties );
+ Template template = engine.getTemplate( resource );
+ StringWriter writer = new StringWriter();
+ template.merge( context, writer );
+ return writer.toString();
+ }
+
+ public static String merge( Class base, String resource, Properties properties ) {
+ String baseResource = base.getName().replaceAll( "\\.", "/" );
+ String fullResource = baseResource + "/" + resource;
+ return merge( fullResource, properties );
+ }
+
+ public static int findFreePort() throws IOException {
+ ServerSocket socket = new ServerSocket(0);
+ int port = socket.getLocalPort();
+ socket.close();
+ return port;
+ }
+
+ public static void waitUntilNextSecond() {
+ long before = System.currentTimeMillis();
+ long wait;
+ while( ( wait = ( 1000 - ( System.currentTimeMillis() - before ) ) ) > 0 ) {
+ try {
+ Thread.sleep( wait );
+ } catch( InterruptedException e ) {
+ // Ignore.
+ }
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
index 6a7db25..e099141 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
@@ -17,6 +17,20 @@
*/
package org.apache.hadoop.gateway;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.ServerSocket;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+import javax.ws.rs.core.MediaType;
+
import com.jayway.restassured.http.ContentType;
import com.mycila.xmltool.XMLDoc;
import com.mycila.xmltool.XMLTag;
@@ -37,34 +51,19 @@ import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.AfterClass;
import org.junit.BeforeClass;
-import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import javax.ws.rs.core.MediaType;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.ServerSocket;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.UUID;
-
import static com.jayway.restassured.RestAssured.given;
import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
-import static org.hamcrest.CoreMatchers.containsString;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
@@ -134,7 +133,6 @@ public class GatewayAdminTopologyFuncTest {
createNormalTopology().toStream( stream2 );
stream.close();
-
DefaultGatewayServices srvcs = new DefaultGatewayServices();
Map<String,String> options = new HashMap<String,String>();
options.put( "persist-master", "false" );
@@ -238,7 +236,6 @@ public class GatewayAdminTopologyFuncTest {
.addTag( "role" ).addText( "identity-assertion" )
.addTag( "enabled" ).addText( "true" )
.addTag( "name" ).addText( "Default" ).gotoParent()
- .addTag( "provider" )
.gotoRoot()
.addTag( "service" )
.addTag( "role" ).addText( "KNOX" )
@@ -506,7 +503,7 @@ public class GatewayAdminTopologyFuncTest {
}
@Test( timeout = LONG_TIMEOUT )
- public void testDeployTopology() throws ClassNotFoundException {
+ public void testDeployTopology() throws Exception {
LOG_ENTER();
Topology testTopology = createTestTopology();
@@ -519,42 +516,42 @@ public class GatewayAdminTopologyFuncTest {
GatewayServices srvs = GatewayServer.getGatewayServices();
TopologyService ts = srvs.getService(GatewayServices.TOPOLOGY_SERVICE);
-
- assertThat(testTopology, not(nullValue()));
- assertThat(testTopology.getName(), is("test-topology"));
-
- given()
- //.log().all()
- .auth().preemptive().basic(user, password)
- .expect()
- //.log().all()
- .statusCode(HttpStatus.SC_NOT_FOUND)
- .when()
- .get(url);
-
- ts.deployTopology(testTopology);
-
- given()
- //.log().all()
- .auth().preemptive().basic(user, password)
- .expect()
- //.log().all()
- .statusCode(HttpStatus.SC_OK)
- .contentType("text/plain")
- .body(is("test-service-response"))
- .when()
- .get(url).getBody();
-
- ts.deleteTopology(testTopology);
-
- given()
- //.log().all()
- .auth().preemptive().basic(user, password)
- .expect()
- //.log().all()
- .statusCode(HttpStatus.SC_NOT_FOUND)
- .when()
- .get(url);
+ try {
+ ts.stopMonitor();
+
+ assertThat( testTopology, not( nullValue() ) );
+ assertThat( testTopology.getName(), is( "test-topology" ) );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( "admin", "admin-password" ).header( "Accept", MediaType.APPLICATION_JSON ).expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK ).body( containsString( "ServerVersion" ) ).when().get( gatewayUrl + "/admin/api/v1/version" );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( user, password ).expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_NOT_FOUND ).when().get( url );
+
+ ts.deployTopology( testTopology );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( user, password ).expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK ).contentType( "text/plain" ).body( is( "test-service-response" ) ).when().get( url ).getBody();
+
+ ts.deleteTopology( testTopology );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( user, password ).expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_NOT_FOUND ).when().get( url );
+ } finally {
+ ts.startMonitor();
+ }
LOG_EXIT();
}
[5/5] knox git commit: [KNOX-670] - Knox should be able to sost
simple web apps
Posted by km...@apache.org.
[KNOX-670] - Knox should be able to sost simple web apps
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/a70a3b56
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/a70a3b56
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/a70a3b56
Branch: refs/heads/master
Commit: a70a3b56cf54e1c1088d57bf7f4c1dbe06b291c4
Parents: 82539e4
Author: Kevin Minder <km...@apache.org>
Authored: Thu Feb 25 15:54:13 2016 -0500
Committer: Kevin Minder <km...@apache.org>
Committed: Thu Feb 25 15:54:13 2016 -0500
----------------------------------------------------------------------
CHANGES | 8 +
.../pom.xml | 5 +
.../pom.xml | 5 +
.../pom.xml | 5 +
gateway-provider-rewrite/pom.xml | 7 +
.../impl/xml/XmlUrlRewriteRulesExporter.java | 12 +-
gateway-provider-security-authc-anon/pom.xml | 6 -
gateway-provider-security-authz-acls/pom.xml | 2 +
gateway-provider-security-hadoopauth/pom.xml | 2 +
gateway-provider-security-jwt/pom.xml | 2 +
gateway-provider-security-picketlink/pom.xml | 2 +
gateway-provider-security-preauth/pom.xml | 2 +
gateway-provider-security-shiro/pom.xml | 2 +
gateway-provider-security-webappsec/pom.xml | 2 +
gateway-release/home/data/applications/README | 1 +
gateway-release/pom.xml | 4 +
gateway-server/pom.xml | 11 +
.../apache/hadoop/gateway/GatewayFilter.java | 46 +-
.../apache/hadoop/gateway/GatewayMessages.java | 37 +-
.../apache/hadoop/gateway/GatewayServer.java | 374 +++++++---
.../apache/hadoop/gateway/GatewayServlet.java | 130 +++-
.../gateway/config/impl/GatewayConfigImpl.java | 48 +-
.../gateway/deploy/DeploymentContextImpl.java | 8 +-
.../gateway/deploy/DeploymentFactory.java | 458 +++++++++----
.../impl/ApplicationDeploymentContributor.java | 214 ++++++
.../xml/XmlGatewayDescriptorExporter.java | 34 +-
.../gateway/filter/DefaultTopologyHandler.java | 104 +++
.../gateway/filter/GatewayHelloFilter.java | 45 ++
.../builder/BeanPropertyTopologyBuilder.java | 16 +
.../topology/validation/TopologyValidator.java | 27 +-
.../xml/KnoxFormatXmlTopologyRules.java | 11 +
.../org/apache/hadoop/gateway/util/KnoxCLI.java | 46 +-
.../gateway/util/ServiceDefinitionsLoader.java | 2 +-
.../src/main/resources/conf/topology-v1.xsd | 43 +-
.../hadoop/gateway/GatewayGlobalConfigTest.java | 16 +-
.../config/impl/GatewayConfigImplTest.groovy | 61 --
.../config/impl/GatewayConfigImplTest.java | 96 +++
.../gateway/deploy/DeploymentFactoryTest.java | 315 ++++++++-
.../hadoop/gateway/jetty/JettyPathMapTest.java | 52 +-
.../validation/TopologyValidatorTest.java | 48 ++
.../topology/xml/TopologyRulesModuleTest.java | 23 +
.../topology-valid-complete.xml | 40 ++
.../topology-valid-with-name.xml | 19 +
.../TopologyValidatorTest/topology-valid.xml | 25 +
.../topology-with-application.xml | 23 +
gateway-service-admin/pom.xml | 5 +
.../service/admin/TopologyMarshallerTest.java | 60 ++
.../service/test/ServiceTestResource.java | 46 +-
gateway-spi/pom.xml | 12 +
.../hadoop/gateway/config/GatewayConfig.java | 10 +
.../ServiceDeploymentContributorBase.java | 9 +-
.../hadoop/gateway/topology/Application.java | 30 +
.../hadoop/gateway/topology/Routable.java | 22 +
.../apache/hadoop/gateway/topology/Service.java | 10 +-
.../hadoop/gateway/topology/Topology.java | 35 +-
.../apache/hadoop/gateway/topology/jaxb.index | 1 +
.../gateway/topology/topology_binding-json.xml | 9 +-
.../gateway/topology/topology_binding-xml.xml | 8 +-
.../hadoop/gateway/GatewayTestConfig.java | 15 +
gateway-test-utils/pom.xml | 8 +
.../java/org/apache/hadoop/test/TestUtils.java | 88 ++-
.../gateway/GatewayAdminTopologyFuncTest.java | 107 ++-
.../hadoop/gateway/GatewayAppFuncTest.java | 685 +++++++++++++++++++
.../hadoop/gateway/GatewayDeployFuncTest.java | 28 +-
.../hadoop/gateway/GatewayTestConfig.java | 66 +-
.../org/apache/hadoop/gateway/TestServlet.java | 32 +
.../deploy/DeploymentFactoryFuncTest.java | 257 +++++--
.../test-apps/dynamic-app/WEB-INF/web.xml | 15 +
.../GatewayAppFuncTest/test-apps/readme.txt | 18 +
.../test-apps/static-hello-app/index.html | 24 +
.../test-apps/static-json-app/one.json | 3 +
.../test-apps/static-json-app/rewrite.xml | 17 +
.../test-apps/static-json-app/service.xml | 21 +
.../test-apps/static-xml-app/test.xml | 17 +
.../test-default-app-name-topology.xml | 53 ++
.../test-dynamic-app-topology.xml | 54 ++
.../test-multi-apps-topology.xml | 65 ++
.../test-naked-app-topology.xml | 33 +
.../test-static-hello-topology.xml | 54 ++
.../test-svcs-and-apps-topology.xml | 62 ++
.../GatewayAppFuncTest/test-svcs/readme.txt | 18 +
.../test-svcs/webhdfs/2.4.0/rewrite.xml | 70 ++
.../test-svcs/webhdfs/2.4.0/service.xml | 43 ++
.../gateway/GatewayAppFuncTest/users.ldif | 42 ++
.../test-apps/minimal-test-app/rewrite.xml | 17 +
.../test-apps/minimal-test-app/service.xml | 21 +
gateway-util-common/pom.xml | 16 +-
.../org/apache/hadoop/gateway/util/Urls.java | 56 ++
.../apache/hadoop/gateway/util/XmlUtils.java | 72 ++
.../apache/hadoop/gateway/util/UrlsTest.java | 28 +
gateway-util-urltemplate/pom.xml | 4 +-
pom.xml | 29 +-
92 files changed, 4164 insertions(+), 670 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/CHANGES
----------------------------------------------------------------------
diff --git a/CHANGES b/CHANGES
index e5c7b63..8f364c3 100644
--- a/CHANGES
+++ b/CHANGES
@@ -1,4 +1,12 @@
------------------------------------------------------------------------------
+Release Notes - Apache Knox - Version 0.9.0
+------------------------------------------------------------------------------
+** New Feature
+ * [KNOX-670] - Knox should be able to sost simple web apps
+** Improvement
+** Bug
+
+------------------------------------------------------------------------------
Release Notes - Apache Knox - Version 0.8.0
------------------------------------------------------------------------------
** New Feature
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-provider-rewrite-func-hostmap-static/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-hostmap-static/pom.xml b/gateway-provider-rewrite-func-hostmap-static/pom.xml
index f9e3583..545b500 100644
--- a/gateway-provider-rewrite-func-hostmap-static/pom.xml
+++ b/gateway-provider-rewrite-func-hostmap-static/pom.xml
@@ -103,6 +103,11 @@
<artifactId>easymock</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.velocity</groupId>
+ <artifactId>velocity</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-provider-rewrite-func-service-registry/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-func-service-registry/pom.xml b/gateway-provider-rewrite-func-service-registry/pom.xml
index 7caddf2..52bbed5 100644
--- a/gateway-provider-rewrite-func-service-registry/pom.xml
+++ b/gateway-provider-rewrite-func-service-registry/pom.xml
@@ -70,6 +70,11 @@
<artifactId>gateway-test-utils</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.velocity</groupId>
+ <artifactId>velocity</artifactId>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-provider-rewrite-step-encrypt-uri/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite-step-encrypt-uri/pom.xml b/gateway-provider-rewrite-step-encrypt-uri/pom.xml
index 3e14f5a..101bb15 100644
--- a/gateway-provider-rewrite-step-encrypt-uri/pom.xml
+++ b/gateway-provider-rewrite-step-encrypt-uri/pom.xml
@@ -76,6 +76,11 @@
<artifactId>gateway-test-utils</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.velocity</groupId>
+ <artifactId>velocity</artifactId>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>${gateway-group}</groupId>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-provider-rewrite/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite/pom.xml b/gateway-provider-rewrite/pom.xml
index a25d2f3..243969e 100644
--- a/gateway-provider-rewrite/pom.xml
+++ b/gateway-provider-rewrite/pom.xml
@@ -60,10 +60,12 @@
<artifactId>gateway-spi</artifactId>
</dependency>
+ <!--
<dependency>
<groupId>org.eclipse.jetty.orbit</groupId>
<artifactId>javax.servlet</artifactId>
</dependency>
+ -->
<dependency>
<groupId>org.apache.commons</groupId>
@@ -151,6 +153,11 @@
<artifactId>easymock</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.velocity</groupId>
+ <artifactId>velocity</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/xml/XmlUrlRewriteRulesExporter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/xml/XmlUrlRewriteRulesExporter.java b/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/xml/XmlUrlRewriteRulesExporter.java
index 707a75c..6fa92d5 100644
--- a/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/xml/XmlUrlRewriteRulesExporter.java
+++ b/gateway-provider-rewrite/src/main/java/org/apache/hadoop/gateway/filter/rewrite/impl/xml/XmlUrlRewriteRulesExporter.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.gateway.filter.rewrite.api.*;
import org.apache.hadoop.gateway.filter.rewrite.i18n.UrlRewriteMessages;
import org.apache.hadoop.gateway.filter.rewrite.spi.UrlRewriteRulesExporter;
import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.util.XmlUtils;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
@@ -86,16 +87,7 @@ public class XmlUrlRewriteRulesExporter implements UrlRewriteRulesExporter, XmlR
}
}
- TransformerFactory transformerFactory = TransformerFactory.newInstance();
- transformerFactory.setAttribute( "indent-number", 2 );
- Transformer transformer = transformerFactory.newTransformer();
- //transformer.setOutputProperty( OutputKeys.OMIT_XML_DECLARATION, "yes" );
- transformer.setOutputProperty( OutputKeys.STANDALONE, "yes" );
- transformer.setOutputProperty( OutputKeys.INDENT, "yes" );
-
- StreamResult result = new StreamResult( writer );
- DOMSource source = new DOMSource(document);
- transformer.transform( source, result );
+ XmlUtils.writeXml( document, writer );
} catch( ParserConfigurationException e ) {
throw new IOException( e );
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-provider-security-authc-anon/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-security-authc-anon/pom.xml b/gateway-provider-security-authc-anon/pom.xml
index cde18f1..c6203b3 100755
--- a/gateway-provider-security-authc-anon/pom.xml
+++ b/gateway-provider-security-authc-anon/pom.xml
@@ -46,11 +46,6 @@
</dependency>
<dependency>
- <groupId>org.eclipse.jetty.orbit</groupId>
- <artifactId>javax.servlet</artifactId>
- </dependency>
-
- <dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
@@ -60,7 +55,6 @@
<artifactId>easymock</artifactId>
<scope>test</scope>
</dependency>
-
<dependency>
<groupId>org.apache.knox</groupId>
<artifactId>gateway-test-utils</artifactId>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-provider-security-authz-acls/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-security-authz-acls/pom.xml b/gateway-provider-security-authz-acls/pom.xml
index 210d925..1564144 100644
--- a/gateway-provider-security-authz-acls/pom.xml
+++ b/gateway-provider-security-authz-acls/pom.xml
@@ -35,10 +35,12 @@
<groupId>${gateway-group}</groupId>
<artifactId>gateway-util-common</artifactId>
</dependency>
+ <!--
<dependency>
<groupId>org.eclipse.jetty.orbit</groupId>
<artifactId>javax.servlet</artifactId>
</dependency>
+ -->
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-provider-security-hadoopauth/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-security-hadoopauth/pom.xml b/gateway-provider-security-hadoopauth/pom.xml
index ff04cef..8240dde 100755
--- a/gateway-provider-security-hadoopauth/pom.xml
+++ b/gateway-provider-security-hadoopauth/pom.xml
@@ -45,10 +45,12 @@
<artifactId>gateway-util-common</artifactId>
</dependency>
+ <!--
<dependency>
<groupId>org.eclipse.jetty.orbit</groupId>
<artifactId>javax.servlet</artifactId>
</dependency>
+ -->
<dependency>
<groupId>org.apache.hadoop</groupId>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-provider-security-jwt/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/pom.xml b/gateway-provider-security-jwt/pom.xml
index e720ba2..ef0727e 100644
--- a/gateway-provider-security-jwt/pom.xml
+++ b/gateway-provider-security-jwt/pom.xml
@@ -46,10 +46,12 @@
<artifactId>gateway-util-common</artifactId>
</dependency>
+ <!--
<dependency>
<groupId>org.eclipse.jetty.orbit</groupId>
<artifactId>javax.servlet</artifactId>
</dependency>
+ -->
<dependency>
<groupId>commons-io</groupId>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-provider-security-picketlink/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/pom.xml b/gateway-provider-security-picketlink/pom.xml
index 1cf4eab..b20e93c 100644
--- a/gateway-provider-security-picketlink/pom.xml
+++ b/gateway-provider-security-picketlink/pom.xml
@@ -45,10 +45,12 @@
<artifactId>gateway-util-common</artifactId>
</dependency>
+ <!--
<dependency>
<groupId>org.eclipse.jetty.orbit</groupId>
<artifactId>javax.servlet</artifactId>
</dependency>
+ -->
<dependency>
<groupId>org.picketlink</groupId>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-provider-security-preauth/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-security-preauth/pom.xml b/gateway-provider-security-preauth/pom.xml
index 6c238c9..cabe506 100644
--- a/gateway-provider-security-preauth/pom.xml
+++ b/gateway-provider-security-preauth/pom.xml
@@ -45,10 +45,12 @@
<artifactId>gateway-util-common</artifactId>
</dependency>
+ <!--
<dependency>
<groupId>org.eclipse.jetty.orbit</groupId>
<artifactId>javax.servlet</artifactId>
</dependency>
+ -->
<dependency>
<groupId>junit</groupId>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-provider-security-shiro/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-security-shiro/pom.xml b/gateway-provider-security-shiro/pom.xml
index 7ec2da1..763e9ce 100644
--- a/gateway-provider-security-shiro/pom.xml
+++ b/gateway-provider-security-shiro/pom.xml
@@ -55,10 +55,12 @@
<artifactId>shrinkwrap-descriptors-api-javaee</artifactId>
</dependency>
+ <!--
<dependency>
<groupId>org.eclipse.jetty.orbit</groupId>
<artifactId>javax.servlet</artifactId>
</dependency>
+ -->
<dependency>
<groupId>org.apache.shiro</groupId>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-provider-security-webappsec/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-security-webappsec/pom.xml b/gateway-provider-security-webappsec/pom.xml
index 01669e8..67c4b2d 100644
--- a/gateway-provider-security-webappsec/pom.xml
+++ b/gateway-provider-security-webappsec/pom.xml
@@ -50,10 +50,12 @@
<artifactId>cors-filter</artifactId>
</dependency>
+ <!--
<dependency>
<groupId>org.eclipse.jetty.orbit</groupId>
<artifactId>javax.servlet</artifactId>
</dependency>
+ -->
<dependency>
<groupId>junit</groupId>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-release/home/data/applications/README
----------------------------------------------------------------------
diff --git a/gateway-release/home/data/applications/README b/gateway-release/home/data/applications/README
new file mode 100644
index 0000000..149839b
--- /dev/null
+++ b/gateway-release/home/data/applications/README
@@ -0,0 +1 @@
+THIS DIRECTORY CAN CONTAIN APPLICATIONS THAT CAN BE REFERENCED FROM TOPOLOGIES.
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-release/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-release/pom.xml b/gateway-release/pom.xml
index c3cb2af..f02d169 100644
--- a/gateway-release/pom.xml
+++ b/gateway-release/pom.xml
@@ -232,6 +232,10 @@
</dependency>
<dependency>
<groupId>${gateway-group}</groupId>
+ <artifactId>gateway-provider-security-authc-anon</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>${gateway-group}</groupId>
<artifactId>gateway-provider-security-authz-acls</artifactId>
</dependency>
<dependency>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-server/pom.xml b/gateway-server/pom.xml
index 516ef23..b3df024 100644
--- a/gateway-server/pom.xml
+++ b/gateway-server/pom.xml
@@ -64,10 +64,12 @@
<artifactId>javax.servlet-api</artifactId>
</dependency>
-->
+ <!--
<dependency>
<groupId>org.eclipse.jetty.orbit</groupId>
<artifactId>javax.servlet</artifactId>
</dependency>
+ -->
<dependency>
<groupId>org.eclipse.persistence</groupId>
@@ -190,6 +192,15 @@
<groupId>org.apache.knox</groupId>
<artifactId>gateway-server-xforwarded-filter</artifactId>
</dependency>
+ <dependency>
+ <groupId>net.lingala.zip4j</groupId>
+ <artifactId>zip4j</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>joda-time</groupId>
+ <artifactId>joda-time</artifactId>
+ </dependency>
+
<!-- ********** ********** ********** ********** ********** ********** -->
<!-- ********** Test Dependencies ********** -->
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayFilter.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayFilter.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayFilter.java
index 98ad94e..da6ba61 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayFilter.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayFilter.java
@@ -89,30 +89,38 @@ public class GatewayFilter implements Filter {
@Override
public void doFilter( ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain ) throws IOException, ServletException {
doFilter( servletRequest, servletResponse );
+ if( filterChain != null ) {
+ filterChain.doFilter( servletRequest, servletResponse );
+ }
}
- @SuppressWarnings("unckecked")
+ @SuppressWarnings("unchecked")
public void doFilter( ServletRequest servletRequest, ServletResponse servletResponse ) throws IOException, ServletException {
HttpServletRequest httpRequest = (HttpServletRequest)servletRequest;
HttpServletResponse httpResponse = (HttpServletResponse)servletResponse;
//TODO: The resulting pathInfo + query needs to be added to the servlet context somehow so that filters don't need to rebuild it. This is done in HttpClientDispatch right now for example.
+ String servlet = httpRequest.getServletPath();
+ String path = httpRequest.getPathInfo();
String query = httpRequest.getQueryString();
- String path = httpRequest.getPathInfo() + ( query == null ? "" : "?" + query );
+ String requestPath = ( servlet == null ? "" : servlet ) + ( path == null ? "" : path );
+ String requestPathWithQuery = requestPath + ( query == null ? "" : "?" + query );
- Template pathTemplate;
+ Template pathWithQueryTemplate;
try {
- pathTemplate = Parser.parseLiteral( path );
+ pathWithQueryTemplate = Parser.parseLiteral( requestPathWithQuery );
} catch( URISyntaxException e ) {
throw new ServletException( e );
}
- String pathWithContext = httpRequest.getContextPath() + path;
- LOG.receivedRequest( httpRequest.getMethod(), pathTemplate );
+ String contextWithPathAndQuery = httpRequest.getContextPath() + requestPathWithQuery;
+ LOG.receivedRequest( httpRequest.getMethod(), requestPath );
- servletRequest.setAttribute( AbstractGatewayFilter.SOURCE_REQUEST_URL_ATTRIBUTE_NAME, pathTemplate );
- servletRequest.setAttribute( AbstractGatewayFilter.SOURCE_REQUEST_CONTEXT_URL_ATTRIBUTE_NAME, pathWithContext );
+ servletRequest.setAttribute(
+ AbstractGatewayFilter.SOURCE_REQUEST_URL_ATTRIBUTE_NAME, pathWithQueryTemplate );
+ servletRequest.setAttribute(
+ AbstractGatewayFilter.SOURCE_REQUEST_CONTEXT_URL_ATTRIBUTE_NAME, contextWithPathAndQuery );
- Matcher<Chain>.Match match = chains.match( pathTemplate );
+ Matcher<Chain>.Match match = chains.match( pathWithQueryTemplate );
assignCorrelationRequestId();
// Populate Audit/correlation parameters
@@ -120,7 +128,9 @@ public class GatewayFilter implements Filter {
auditContext.setTargetServiceName( match == null ? null : match.getValue().getResourceRole() );
auditContext.setRemoteIp( servletRequest.getRemoteAddr() );
auditContext.setRemoteHostname( servletRequest.getRemoteHost() );
- auditor.audit( Action.ACCESS, pathWithContext, ResourceType.URI, ActionOutcome.UNAVAILABLE, RES.requestMethod(((HttpServletRequest)servletRequest).getMethod()));
+ auditor.audit(
+ Action.ACCESS, contextWithPathAndQuery, ResourceType.URI,
+ ActionOutcome.UNAVAILABLE, RES.requestMethod(((HttpServletRequest)servletRequest).getMethod()));
if( match != null ) {
Chain chain = match.getValue();
@@ -129,27 +139,27 @@ public class GatewayFilter implements Filter {
chain.doFilter( servletRequest, servletResponse );
} catch( IOException e ) {
LOG.failedToExecuteFilter( e );
- auditor.audit( Action.ACCESS, pathWithContext, ResourceType.URI, ActionOutcome.FAILURE );
+ auditor.audit( Action.ACCESS, contextWithPathAndQuery, ResourceType.URI, ActionOutcome.FAILURE );
throw e;
} catch( ServletException e ) {
LOG.failedToExecuteFilter( e );
- auditor.audit( Action.ACCESS, pathWithContext, ResourceType.URI, ActionOutcome.FAILURE );
+ auditor.audit( Action.ACCESS, contextWithPathAndQuery, ResourceType.URI, ActionOutcome.FAILURE );
throw e;
} catch( RuntimeException e ) {
LOG.failedToExecuteFilter( e );
- auditor.audit( Action.ACCESS, pathWithContext, ResourceType.URI, ActionOutcome.FAILURE );
+ auditor.audit( Action.ACCESS, contextWithPathAndQuery, ResourceType.URI, ActionOutcome.FAILURE );
throw e;
} catch( ThreadDeath e ) {
LOG.failedToExecuteFilter( e );
- auditor.audit( Action.ACCESS, pathWithContext, ResourceType.URI, ActionOutcome.FAILURE );
+ auditor.audit( Action.ACCESS, contextWithPathAndQuery, ResourceType.URI, ActionOutcome.FAILURE );
throw e;
} catch( Throwable e ) {
LOG.failedToExecuteFilter( e );
- auditor.audit( Action.ACCESS, pathWithContext, ResourceType.URI, ActionOutcome.FAILURE );
+ auditor.audit( Action.ACCESS, contextWithPathAndQuery, ResourceType.URI, ActionOutcome.FAILURE );
throw new ServletException( e );
}
} else {
- LOG.failedToMatchPath( path );
+ LOG.failedToMatchPath( requestPath );
httpResponse.setStatus( HttpServletResponse.SC_NOT_FOUND );
}
//KAM[ Don't do this or the Jetty default servlet will overwrite any response setup by the filter.
@@ -180,7 +190,7 @@ public class GatewayFilter implements Filter {
addHolder( holder );
}
-// public void addFilter( String path, String name, Class<WarDirFilter> clazz, Map<String,String> params ) throws URISyntaxException {
+// public void addFilter( String path, String name, Class<RegexDirFilter> clazz, Map<String,String> params ) throws URISyntaxException {
// Holder holder = new Holder( path, name, clazz, params );
// addHolder( holder );
// }
@@ -262,7 +272,7 @@ public class GatewayFilter implements Filter {
this.resourceRole = resourceRole;
}
-// private Holder( String path, String name, Class<WarDirFilter> clazz, Map<String,String> params ) throws URISyntaxException {
+// private Holder( String path, String name, Class<RegexDirFilter> clazz, Map<String,String> params ) throws URISyntaxException {
// this.path = path;
// this.template = Parser.parse( path );
// this.name = name;
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
index 7f68601..8ac83b2 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayMessages.java
@@ -89,6 +89,15 @@ public interface GatewayMessages {
@Message( level = MessageLevel.DEBUG, text = "Redeployed topology {0}." )
void redeployedTopology( String clusterName );
+ @Message( level = MessageLevel.INFO, text = "Activating topology {0}" )
+ void activatingTopology( String name );
+
+ @Message( level = MessageLevel.INFO, text = "Activating topology {0} archive {1}" )
+ void activatingTopologyArchive( String topology, String archive );
+
+ @Message( level = MessageLevel.INFO, text = "Deactivating topology {0}" )
+ void deactivatingTopology( String name );
+
@Message( level = MessageLevel.ERROR, text = "Failed to deploy topology {0}: {1}" )
void failedToDeployTopology( String name, @StackTrace(level=MessageLevel.DEBUG) Throwable e );
@@ -107,9 +116,18 @@ public interface GatewayMessages {
@Message( level = MessageLevel.ERROR, text = "Failed to undeploy topology {0}: {1}" )
void failedToUndeployTopology( String name, @StackTrace(level=MessageLevel.DEBUG) Exception e );
+ @Message( level = MessageLevel.INFO, text = "Deleting topology {0}" )
+ void deletingTopology( String topologyName );
+
@Message( level = MessageLevel.INFO, text = "Deleting deployed topology {0}" )
void deletingDeployment( String warDirName );
+ @Message( level = MessageLevel.DEBUG, text = "Purge backups of deployed topology {0}" )
+ void cleanupDeployments( String topologyName );
+
+ @Message( level = MessageLevel.INFO, text = "Deleting backup deployed topology {0}" )
+ void cleanupDeployment( String absolutePath );
+
@Message( level = MessageLevel.INFO, text = "Creating gateway home directory: {0}" )
void creatingGatewayHomeDir( File homeDir );
@@ -162,7 +180,7 @@ public interface GatewayMessages {
void credentialStoreForClusterFoundNotCreating(String clusterName);
@Message( level = MessageLevel.DEBUG, text = "Received request: {0} {1}" )
- void receivedRequest( String method, Template uri );
+ void receivedRequest( String method, String uri );
@Message( level = MessageLevel.DEBUG, text = "Dispatch request: {0} {1}" )
void dispatchRequest( String method, URI uri );
@@ -350,7 +368,7 @@ public interface GatewayMessages {
@Message( level = MessageLevel.DEBUG, text = "Finalize service: {1}/{0}" )
void finalizeService( String name, String role );
- @Message( level = MessageLevel.INFO, text = "Configured services directory is {0}" )
+ @Message( level = MessageLevel.DEBUG, text = "Configured services directory is {0}" )
void usingServicesDirectory(String path);
@Message( level = MessageLevel.ERROR, text = "Failed to unmarshall service definition file {0} file : {1}" )
@@ -376,4 +394,19 @@ public interface GatewayMessages {
@Message( level = MessageLevel.ERROR, text = "Unable to get password: {0}" )
void unableToGetPassword(@StackTrace( level = MessageLevel.DEBUG ) Exception e);
+
+ @Message( level = MessageLevel.DEBUG, text = "Initialize application: {0}" )
+ void initializeApplication( String name );
+
+ @Message( level = MessageLevel.DEBUG, text = "Contribute application: {0}" )
+ void contributeApplication( String name );
+
+ @Message( level = MessageLevel.DEBUG, text = "Finalize application: {0}" )
+ void finalizeApplication( String name );
+
+ @Message( level = MessageLevel.INFO, text = "Default topology {0} at {1}" )
+ void defaultTopologySetup( String defaultTopologyName, String redirectContext );
+
+ @Message( level = MessageLevel.DEBUG, text = "Default topology forward from {0} to {1}" )
+ void defaultTopologyForward( String oldTarget, String newTarget );
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayServer.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayServer.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayServer.java
index 21fb9ea..a6943b6 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayServer.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayServer.java
@@ -17,10 +17,34 @@
*/
package org.apache.hadoop.gateway;
+import java.io.File;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.InetSocketAddress;
+import java.net.ServerSocket;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.ServiceLoader;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.regex.Pattern;
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.TransformerException;
+
+import net.lingala.zip4j.core.ZipFile;
+import net.lingala.zip4j.exception.ZipException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.FileUtils;
-import org.apache.commons.io.IOUtils;
import org.apache.hadoop.gateway.audit.api.Action;
import org.apache.hadoop.gateway.audit.api.ActionOutcome;
import org.apache.hadoop.gateway.audit.api.AuditServiceFactory;
@@ -29,20 +53,25 @@ import org.apache.hadoop.gateway.audit.api.ResourceType;
import org.apache.hadoop.gateway.audit.log4j.audit.AuditConstants;
import org.apache.hadoop.gateway.config.GatewayConfig;
import org.apache.hadoop.gateway.config.impl.GatewayConfigImpl;
+import org.apache.hadoop.gateway.deploy.DeploymentException;
import org.apache.hadoop.gateway.deploy.DeploymentFactory;
import org.apache.hadoop.gateway.filter.CorrelationHandler;
+import org.apache.hadoop.gateway.filter.DefaultTopologyHandler;
import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
import org.apache.hadoop.gateway.i18n.resources.ResourcesFactory;
import org.apache.hadoop.gateway.services.GatewayServices;
import org.apache.hadoop.gateway.services.registry.ServiceRegistry;
import org.apache.hadoop.gateway.services.security.SSLService;
import org.apache.hadoop.gateway.services.topology.TopologyService;
+import org.apache.hadoop.gateway.topology.Application;
import org.apache.hadoop.gateway.topology.Topology;
import org.apache.hadoop.gateway.topology.TopologyEvent;
import org.apache.hadoop.gateway.topology.TopologyListener;
import org.apache.hadoop.gateway.trace.AccessHandler;
import org.apache.hadoop.gateway.trace.ErrorHandler;
import org.apache.hadoop.gateway.trace.TraceHandler;
+import org.apache.hadoop.gateway.util.Urls;
+import org.apache.hadoop.gateway.util.XmlUtils;
import org.apache.log4j.PropertyConfigurator;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Handler;
@@ -54,27 +83,12 @@ import org.eclipse.jetty.server.nio.SelectChannelConnector;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.eclipse.jetty.webapp.WebAppContext;
import org.jboss.shrinkwrap.api.exporter.ExplodedExporter;
-import org.jboss.shrinkwrap.api.spec.WebArchive;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FilenameFilter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.net.InetSocketAddress;
-import java.net.ServerSocket;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.security.ProviderException;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.ServiceLoader;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.regex.Pattern;
+import org.jboss.shrinkwrap.api.spec.EnterpriseArchive;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.xml.sax.SAXException;
public class GatewayServer {
private static GatewayResources res = ResourcesFactory.get(GatewayResources.class);
@@ -220,21 +234,39 @@ public class GatewayServer {
return properties;
}
- private static void extractToFile( String resource, File file ) throws IOException {
- InputStream input = ClassLoader.getSystemResourceAsStream( resource );
- OutputStream output = new FileOutputStream( file );
- IOUtils.copy( input, output );
- output.close();
- input.close();
- }
-
-
public static void redeployTopologies( String topologyName ) {
TopologyService ts = getGatewayServices().getService(GatewayServices.TOPOLOGY_SERVICE);
ts.reloadTopologies();
ts.redeployTopologies(topologyName);
}
+ private void cleanupTopologyDeployments() {
+ File deployDir = new File( config.getGatewayDeploymentDir() );
+ TopologyService ts = getGatewayServices().getService(GatewayServices.TOPOLOGY_SERVICE);
+ for( Topology topology : ts.getTopologies() ) {
+ cleanupTopologyDeployments( deployDir, topology );
+ }
+ }
+
+ private void cleanupTopologyDeployments( File deployDir, Topology topology ) {
+ log.cleanupDeployments( topology.getName() );
+ File[] files = deployDir.listFiles( new RegexDirFilter( topology.getName() + "\\.(war|topo)\\.[0-9A-Fa-f]+" ) );
+ if( files != null ) {
+ Arrays.sort( files, new FileModificationTimeDescendingComparator() );
+ int verLimit = config.getGatewayDeploymentsBackupVersionLimit();
+ long ageLimit = config.getGatewayDeploymentsBackupAgeLimit();
+ long keepTime = System.currentTimeMillis() - ageLimit;
+ for( int i=1; i<files.length; i++ ) {
+ File file = files[i];
+ if( ( ( verLimit >= 0 ) && ( i > verLimit ) ) ||
+ ( ( ageLimit >= 0 ) && ( file.lastModified() < keepTime ) ) ) {
+ log.cleanupDeployment( file.getAbsolutePath() );
+ FileUtils.deleteQuietly( file );
+ }
+ }
+ }
+ }
+
public static GatewayServer startGateway( GatewayConfig config, GatewayServices svcs ) throws Exception {
log.startingGateway();
server = new GatewayServer( config );
@@ -287,7 +319,10 @@ public class GatewayServer {
return connector;
}
- private static HandlerCollection createHandlers( final GatewayConfig config, final ContextHandlerCollection contexts ) {
+ private static HandlerCollection createHandlers(
+ final GatewayConfig config,
+ final GatewayServices services,
+ final ContextHandlerCollection contexts ) {
HandlerCollection handlers = new HandlerCollection();
RequestLogHandler logHandler = new RequestLogHandler();
logHandler.setRequestLog( new AccessHandler() );
@@ -299,11 +334,16 @@ public class GatewayServer {
CorrelationHandler correlationHandler = new CorrelationHandler();
correlationHandler.setHandler( traceHandler );
- handlers.setHandlers( new Handler[]{ correlationHandler, logHandler } );
+ DefaultTopologyHandler defaultTopoHandler = new DefaultTopologyHandler( config, services, contexts );
+
+ handlers.setHandlers( new Handler[]{ correlationHandler, defaultTopoHandler, logHandler } );
return handlers;
}
private synchronized void start() throws Exception {
+ errorHandler = new ErrorHandler();
+ errorHandler.setShowStacks( false );
+ errorHandler.setTracedBodyFilter( System.getProperty( "org.apache.knox.gateway.trace.body.status.filter" ) );
// Create the global context handler.
contexts = new ContextHandlerCollection();
@@ -313,9 +353,16 @@ public class GatewayServer {
// Start Jetty.
jetty = new Server();
jetty.addConnector( createConnector( config ) );
- jetty.setHandler( createHandlers( config, contexts ) );
+ jetty.setHandler( createHandlers( config, services, contexts ) );
jetty.setThreadPool( new QueuedThreadPool( config.getThreadPoolMax() ) );
+ // Load the current topologies.
+ File topologiesDir = calculateAbsoluteTopologiesDir();
+ log.loadingTopologiesFromDirectory(topologiesDir.getAbsolutePath());
+ monitor = services.getService(GatewayServices.TOPOLOGY_SERVICE);
+ monitor.addTopologyChangeListener(listener);
+ monitor.reloadTopologies();
+
try {
jetty.start();
}
@@ -324,14 +371,7 @@ public class GatewayServer {
throw e;
}
- // Create a dir/file based cluster topology provider.
- File topologiesDir = calculateAbsoluteTopologiesDir();
- monitor = services.getService(GatewayServices.TOPOLOGY_SERVICE);
- monitor.addTopologyChangeListener(listener);
-
- // Load the current topologies.
- log.loadingTopologiesFromDirectory(topologiesDir.getAbsolutePath());
- monitor.reloadTopologies();
+ cleanupTopologyDeployments();
// Start the topology monitor.
log.monitoringTopologyChangesInDirectory(topologiesDir.getAbsolutePath());
@@ -361,53 +401,158 @@ public class GatewayServer {
return addresses;
}
- private synchronized void internalDeploy( Topology topology, File warFile ) {
- String name = topology.getName();
- String warPath = warFile.getAbsolutePath();
- errorHandler = new ErrorHandler();
- errorHandler.setShowStacks(false);
- errorHandler.setTracedBodyFilter( System.getProperty( "org.apache.knox.gateway.trace.body.status.filter" ) );
+ private WebAppContext createWebAppContext( Topology topology, File warFile, String warPath ) throws IOException, ZipException, TransformerException, SAXException, ParserConfigurationException {
+ String topoName = topology.getName();
WebAppContext context = new WebAppContext();
- context.setDefaultsDescriptor( null );
- if (!name.equals("_default")) {
- context.setContextPath( "/" + config.getGatewayPath() + "/" + name );
- }
- else {
- context.setContextPath( "/" );
- }
- context.setWar( warPath );
- context.setErrorHandler(errorHandler);
- // internalUndeploy( topology ); KNOX-152
- context.setAttribute( GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE, name );
+ String contextPath;
+ contextPath = "/" + Urls.trimLeadingAndTrailingSlashJoin( config.getGatewayPath(), topoName, warPath );
+ context.setContextPath( contextPath );
+ context.setWar( warFile.getAbsolutePath() );
+ context.setAttribute( GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE, topoName );
context.setAttribute( "org.apache.knox.gateway.frontend.uri", getFrontendUri( context, config ) );
context.setAttribute( GatewayConfig.GATEWAY_CONFIG_ATTRIBUTE, config );
- deployments.put( name, context );
- contexts.addHandler( context );
- try {
- context.start();
- } catch( Exception e ) {
- auditor
- .audit(Action.DEPLOY, topology.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
- log.failedToDeployTopology( name, e );
+ context.setTempDirectory( warFile );
+ context.setErrorHandler( errorHandler );
+ return context;
+ }
+
+ private static void explodeWar( File source, File target ) throws IOException, ZipException {
+ if( source.isDirectory() ) {
+ FileUtils.copyDirectory( source, target );
+ } else {
+ ZipFile zip = new ZipFile( source );
+ zip.extractAll( target.getAbsolutePath() );
+ }
+ }
+
+ private void mergeWebXmlOverrides( File webInfDir ) throws IOException, SAXException, ParserConfigurationException, TransformerException {
+ File webXmlFile = new File( webInfDir, "web.xml" );
+ Document webXmlDoc;
+ if( webXmlFile.exists() ) {
+ // Backup original web.xml file.
+ File originalWebXmlFile = new File( webInfDir, "original-web.xml" );
+ FileUtils.copyFile( webXmlFile, originalWebXmlFile );
+ webXmlDoc = XmlUtils.readXml( webXmlFile );
+ } else {
+ webXmlDoc = XmlUtils.createDocument();
+ webXmlDoc.appendChild( webXmlDoc.createElement( "web-app" ) );
+ }
+ File overrideWebXmlFile = new File( webInfDir, "override-web.xml" );
+ if( overrideWebXmlFile.exists() ) {
+ Document overrideWebXmlDoc = XmlUtils.readXml( overrideWebXmlFile );
+ Element originalRoot = webXmlDoc.getDocumentElement();
+ Element overrideRoot = overrideWebXmlDoc.getDocumentElement();
+ NodeList overrideNodes = overrideRoot.getChildNodes();
+ for( int i = 0, n = overrideNodes.getLength(); i < n; i++ ) {
+ Node overrideNode = overrideNodes.item( i );
+ if( overrideNode.getNodeType() == Node.ELEMENT_NODE ) {
+ Node importedNode = webXmlDoc.importNode( overrideNode, true );
+ originalRoot.appendChild( importedNode );
+ }
+ }
+ XmlUtils.writeXml( webXmlDoc, webXmlFile );
+ }
+ }
+
+ private synchronized void internalDeployApplications( Topology topology, File topoDir ) throws IOException, ZipException, ParserConfigurationException, TransformerException, SAXException {
+ if( topology != null ) {
+ Collection<Application> applications = topology.getApplications();
+ if( applications != null ) {
+ for( Application application : applications ) {
+ List<String> urls = application.getUrls();
+ if( urls == null || urls.isEmpty() ) {
+ internalDeployApplication( topology, topoDir, application, application.getName() );
+ } else {
+ for( String url : urls ) {
+ internalDeployApplication( topology, topoDir, application, url );
+ }
+ }
+ }
+ }
+ }
+ }
+
+ private synchronized void internalDeployApplication( Topology topology, File topoDir, Application application, String url ) throws IOException, ZipException, TransformerException, SAXException, ParserConfigurationException {
+ File appsDir = new File( config.getGatewayApplicationsDir() );
+ File appDir = new File( appsDir, application.getName() );
+ if( !appDir.exists() ) {
+ appDir = new File( appsDir, application.getName() + ".war" );
+ }
+ if( !appDir.exists() ) {
+ throw new DeploymentException( "Application archive does not exist: " + appDir.getAbsolutePath() );
+ }
+ File warFile = new File( topoDir, Urls.encode( "/" + Urls.trimLeadingAndTrailingSlash( url ) ) );
+ File webInfDir = new File( warFile, "WEB-INF" );
+ explodeWar( appDir, warFile );
+ mergeWebXmlOverrides( webInfDir );
+ }
+
+ private synchronized void internalActivateTopology( Topology topology, File topoDir ) throws IOException, ZipException, ParserConfigurationException, TransformerException, SAXException {
+ log.activatingTopology( topology.getName() );
+ File[] files = topoDir.listFiles( new RegexDirFilter( "%.*" ) );
+ if( files != null ) {
+ for( File file : files ) {
+ internalActivateArchive( topology, file );
+ }
}
}
- private synchronized void internalUndeploy( Topology topology ) {
- WebAppContext context = deployments.remove( topology.getName() );
- if( context != null ) {
- ServiceRegistry sr = getGatewayServices().getService(GatewayServices.SERVICE_REGISTRY_SERVICE);
- if (sr != null) {
- sr.removeClusterServices(topology.getName());
+ private synchronized void internalActivateArchive( Topology topology, File warDir ) throws IOException, ZipException, ParserConfigurationException, TransformerException, SAXException {
+ log.activatingTopologyArchive( topology.getName(), warDir.getName() );
+ WebAppContext newContext = createWebAppContext( topology, warDir, Urls.decode( warDir.getName() ) );
+ WebAppContext oldContext = deployments.get( newContext.getContextPath() );
+ deployments.put( newContext.getContextPath(), newContext );
+ if( oldContext != null ) {
+ contexts.removeHandler( oldContext );
+ }
+ contexts.addHandler( newContext );
+ if( contexts.isRunning() ) {
+ try {
+ newContext.start();
+ } catch( Exception e ) {
+ auditor.audit( Action.DEPLOY, topology.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE );
+ log.failedToDeployTopology( topology.getName(), e );
+ }
+ }
+ }
+
+ private synchronized void internalDeactivateTopology( Topology topology ) {
+
+ log.deactivatingTopology( topology.getName() );
+
+ String topoName = topology.getName();
+ String topoPath = "/" + Urls.trimLeadingAndTrailingSlashJoin( config.getGatewayPath(), topoName );
+ String topoPathSlash = topoPath + "/";
+
+ ServiceRegistry sr = getGatewayServices().getService(GatewayServices.SERVICE_REGISTRY_SERVICE);
+ if (sr != null) {
+ sr.removeClusterServices( topoName );
+ }
+
+ // Find all the deployed contexts we need to deactivate.
+ List<WebAppContext> deactivate = new ArrayList<WebAppContext>();
+ if( deployments != null ) {
+ for( WebAppContext app : deployments.values() ) {
+ String appPath = app.getContextPath();
+ if( appPath.equals( topoPath ) || appPath.startsWith( topoPathSlash ) ) {
+ deactivate.add( app );
+ }
}
- contexts.removeHandler( context ) ;
+ }
+ // Deactivate the required deployed contexts.
+ for( WebAppContext context : deactivate ) {
+ String contextPath = context.getContextPath();
+ deployments.remove( contextPath );
+ contexts.removeHandler( context );
try {
context.stop();
} catch( Exception e ) {
- auditor.audit(Action.UNDEPLOY, topology.getName(), ResourceType.TOPOLOGY,
- ActionOutcome.FAILURE);
+ auditor.audit(Action.UNDEPLOY, topology.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE);
log.failedToUndeployTopology( topology.getName(), e );
}
}
+ deactivate.clear();
+
}
// Using an inner class to hide the handleTopologyEvent method from consumers of GatewayServer.
@@ -429,13 +574,14 @@ public class GatewayServer {
}
private void handleDeleteDeployment(Topology topology, File deployDir) {
- File[] files = deployDir.listFiles( new WarDirFilter( topology.getName() + "\\.war\\.[0-9A-Fa-f]+" ) );
+ log.deletingTopology( topology.getName() );
+ File[] files = deployDir.listFiles( new RegexDirFilter( topology.getName() + "\\.(war|topo)\\.[0-9A-Fa-f]+" ) );
if( files != null ) {
auditor.audit(Action.UNDEPLOY, topology.getName(), ResourceType.TOPOLOGY,
ActionOutcome.UNAVAILABLE);
+ internalDeactivateTopology( topology );
for( File file : files ) {
log.deletingDeployment( file.getAbsolutePath() );
- internalUndeploy( topology );
FileUtils.deleteQuietly( file );
}
}
@@ -443,48 +589,47 @@ public class GatewayServer {
private void handleCreateDeployment(Topology topology, File deployDir) {
try {
- File warDir = calculateDeploymentDir( topology );
- if( !warDir.exists() ) {
+ File topoDir = calculateDeploymentDir( topology );
+ if( !topoDir.exists() ) {
auditor.audit( Action.DEPLOY, topology.getName(), ResourceType.TOPOLOGY, ActionOutcome.UNAVAILABLE );
// KNOX-564 - Topology should fail to deploy with no providers configured.
+//TODO:APPS:This should only fail if there are services in the topology.
if(topology.getProviders().isEmpty()) {
- throw new ProviderException("No providers found inside topology.");
+ throw new DeploymentException("No providers found inside topology.");
}
- log.deployingTopology( topology.getName(), warDir.getAbsolutePath() );
- internalUndeploy( topology ); // KNOX-152
- WebArchive war = null;
- war = DeploymentFactory.createDeployment( config, topology );
+ log.deployingTopology( topology.getName(), topoDir.getAbsolutePath() );
+ internalDeactivateTopology( topology ); // KNOX-152
+
+ EnterpriseArchive ear = DeploymentFactory.createDeployment( config, topology );
if( !deployDir.exists() ) {
deployDir.mkdirs();
+ if( !deployDir.exists() ) {
+ throw new DeploymentException( "Failed to create topology deployment temporary directory: " + deployDir.getAbsolutePath() );
+ }
+ }
+ File tmp = ear.as( ExplodedExporter.class ).exportExploded( deployDir, topoDir.getName() + ".tmp" );
+ if( !tmp.renameTo( topoDir ) ) {
+ FileUtils.deleteQuietly( tmp );
+ throw new DeploymentException( "Failed to create topology deployment directory: " + topoDir.getAbsolutePath() );
}
- File tmp = war.as( ExplodedExporter.class ).exportExploded( deployDir, warDir.getName() + ".tmp" );
- tmp.renameTo( warDir );
- internalDeploy( topology, warDir );
- handleDefaultTopology(topology, deployDir);
+ internalDeployApplications( topology, topoDir );
+ internalActivateTopology( topology, topoDir );
log.deployedTopology( topology.getName());
} else {
auditor.audit( Action.REDEPLOY, topology.getName(), ResourceType.TOPOLOGY, ActionOutcome.UNAVAILABLE );
- log.redeployingTopology( topology.getName(), warDir.getAbsolutePath() );
- internalDeploy( topology, warDir );
- handleDefaultTopology(topology, deployDir);
+ log.redeployingTopology( topology.getName(), topoDir.getAbsolutePath() );
+ internalActivateTopology( topology, topoDir );
log.redeployedTopology( topology.getName() );
}
+ cleanupTopologyDeployments( deployDir, topology );
} catch( Throwable e ) {
auditor.audit( Action.DEPLOY, topology.getName(), ResourceType.TOPOLOGY, ActionOutcome.FAILURE );
log.failedToDeployTopology( topology.getName(), e );
}
}
- public void handleDefaultTopology(Topology topology, File deployDir) {
- if (topology.getName().equals(config.getDefaultTopologyName())) {
- topology.setName("_default");
- handleCreateDeployment(topology, deployDir);
- topology.setName(config.getDefaultTopologyName());
- }
- }
-
}
private static File calculateAbsoluteTopologiesDir( GatewayConfig config ) {
@@ -508,12 +653,16 @@ public class GatewayServer {
}
private File calculateDeploymentDir( Topology topology ) {
- File warDir = new File( calculateAbsoluteDeploymentsDir(), calculateDeploymentName( topology ) );
- return warDir;
+ File dir = new File( calculateAbsoluteDeploymentsDir(), calculateDeploymentName( topology ) );
+ return dir;
+ }
+
+ private String calculateDeploymentExtension( Topology topology ) {
+ return ".topo.";
}
private String calculateDeploymentName( Topology topology ) {
- String name = topology.getName() + ".war." + Long.toHexString( topology.getTimestamp() );
+ String name = topology.getName() + calculateDeploymentExtension( topology ) + Long.toHexString( topology.getTimestamp() );
return name;
}
@@ -523,11 +672,11 @@ public class GatewayServer {
socket.close();
}
- private class WarDirFilter implements FilenameFilter {
+ private class RegexDirFilter implements FilenameFilter {
Pattern pattern;
- WarDirFilter( String regex ) {
+ RegexDirFilter( String regex ) {
pattern = Pattern.compile( regex );
}
@@ -556,4 +705,19 @@ public class GatewayServer {
return frontendUri;
}
+ private static class FileModificationTimeDescendingComparator implements Comparator<File> {
+ @Override
+ public int compare( File left, File right ) {
+ long leftTime = ( left == null ? Long.MIN_VALUE : left.lastModified() );
+ long rightTime = ( right == null ? Long.MIN_VALUE : right.lastModified() );
+ if( leftTime > rightTime ) {
+ return -1;
+ } else if ( leftTime < rightTime ) {
+ return 1;
+ } else {
+ return 0;
+ }
+ }
+ }
+
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayServlet.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayServlet.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayServlet.java
index 6eea100..cb9f7a5 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayServlet.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/GatewayServlet.java
@@ -17,6 +17,22 @@
*/
package org.apache.hadoop.gateway;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.net.URISyntaxException;
+import java.util.Enumeration;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.Servlet;
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletResponse;
+
import org.apache.hadoop.gateway.audit.api.Action;
import org.apache.hadoop.gateway.audit.api.ActionOutcome;
import org.apache.hadoop.gateway.audit.api.AuditService;
@@ -29,25 +45,8 @@ import org.apache.hadoop.gateway.descriptor.GatewayDescriptorFactory;
import org.apache.hadoop.gateway.filter.AbstractGatewayFilter;
import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
import org.apache.hadoop.gateway.i18n.resources.ResourcesFactory;
-import org.apache.hadoop.gateway.services.GatewayServices;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterConfig;
-import javax.servlet.Servlet;
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.net.URISyntaxException;
-import java.util.Enumeration;
-
-public class GatewayServlet implements Servlet {
+public class GatewayServlet implements Servlet, Filter {
public static final String GATEWAY_DESCRIPTOR_LOCATION_DEFAULT = "gateway.xml";
public static final String GATEWAY_DESCRIPTOR_LOCATION_PARAM = "gatewayDescriptorLocation";
@@ -107,6 +106,24 @@ public class GatewayServlet implements Servlet {
}
@Override
+ public void init( FilterConfig filterConfig ) throws ServletException {
+ try {
+ if( filter == null ) {
+ filter = createFilter( filterConfig );
+ }
+ if( filter != null ) {
+ filter.init( filterConfig );
+ }
+ } catch( ServletException e ) {
+ LOG.failedToInitializeServletInstace( e );
+ throw e;
+ } catch( RuntimeException e ) {
+ LOG.failedToInitializeServletInstace( e );
+ throw e;
+ }
+ }
+
+ @Override
public ServletConfig getServletConfig() {
return filterConfig.getServletConfig();
}
@@ -118,7 +135,38 @@ public class GatewayServlet implements Servlet {
GatewayFilter f = filter;
if( f != null ) {
try {
+ f.doFilter( servletRequest, servletResponse, null );
+ } catch( IOException e ) {
+ LOG.failedToExecuteFilter( e );
+ throw e;
+ } catch( ServletException e ) {
+ LOG.failedToExecuteFilter( e );
+ throw e;
+ } catch( RuntimeException e ) {
+ LOG.failedToExecuteFilter( e );
+ throw e;
+ }
+ } else {
+ ((HttpServletResponse)servletResponse).setStatus( HttpServletResponse.SC_SERVICE_UNAVAILABLE );
+ }
+ String requestUri = (String)servletRequest.getAttribute( AbstractGatewayFilter.SOURCE_REQUEST_CONTEXT_URL_ATTRIBUTE_NAME );
+ int status = ((HttpServletResponse)servletResponse).getStatus();
+ auditor.audit( Action.ACCESS, requestUri, ResourceType.URI, ActionOutcome.SUCCESS, res.responseStatus( status ) );
+ } finally {
+ auditService.detachContext();
+ }
+ }
+
+ @Override
+ public void doFilter( ServletRequest servletRequest, ServletResponse servletResponse, FilterChain chain ) throws IOException, ServletException {
+ try {
+ auditService.createContext();
+ GatewayFilter f = filter;
+ if( f != null ) {
+ try {
f.doFilter( servletRequest, servletResponse );
+ //TODO: This should really happen naturally somehow as part of being a filter. This way will cause problems eventually.
+ chain.doFilter( servletRequest, servletResponse );
} catch( IOException e ) {
LOG.failedToExecuteFilter( e );
throw e;
@@ -153,19 +201,9 @@ public class GatewayServlet implements Servlet {
filter = null;
}
- private static GatewayFilter createFilter( ServletConfig servletConfig ) throws ServletException {
- GatewayFilter filter = null;
+ private static GatewayFilter createFilter( InputStream stream ) throws ServletException {
try {
- InputStream stream = null;
- String location = servletConfig.getInitParameter( GATEWAY_DESCRIPTOR_LOCATION_PARAM );
- if( location != null ) {
- stream = servletConfig.getServletContext().getResourceAsStream( location );
- if( stream == null ) {
- stream = servletConfig.getServletContext().getResourceAsStream( "/WEB-INF/" + location );
- }
- } else {
- stream = servletConfig.getServletContext().getResourceAsStream( GATEWAY_DESCRIPTOR_LOCATION_DEFAULT );
- }
+ GatewayFilter filter = null;
if( stream != null ) {
try {
GatewayDescriptor descriptor = GatewayDescriptorFactory.load( "xml", new InputStreamReader( stream ) );
@@ -174,11 +212,43 @@ public class GatewayServlet implements Servlet {
stream.close();
}
}
+ return filter;
} catch( IOException e ) {
throw new ServletException( e );
} catch( URISyntaxException e ) {
throw new ServletException( e );
}
+ }
+
+ private static GatewayFilter createFilter( FilterConfig filterConfig ) throws ServletException {
+ GatewayFilter filter;
+ InputStream stream;
+ String location = filterConfig.getInitParameter( GATEWAY_DESCRIPTOR_LOCATION_PARAM );
+ if( location != null ) {
+ stream = filterConfig.getServletContext().getResourceAsStream( location );
+ if( stream == null ) {
+ stream = filterConfig.getServletContext().getResourceAsStream( "/WEB-INF/" + location );
+ }
+ } else {
+ stream = filterConfig.getServletContext().getResourceAsStream( GATEWAY_DESCRIPTOR_LOCATION_DEFAULT );
+ }
+ filter = createFilter( stream );
+ return filter;
+ }
+
+ private static GatewayFilter createFilter( ServletConfig servletConfig ) throws ServletException {
+ GatewayFilter filter;
+ InputStream stream;
+ String location = servletConfig.getInitParameter( GATEWAY_DESCRIPTOR_LOCATION_PARAM );
+ if( location != null ) {
+ stream = servletConfig.getServletContext().getResourceAsStream( location );
+ if( stream == null ) {
+ stream = servletConfig.getServletContext().getResourceAsStream( "/WEB-INF/" + location );
+ }
+ } else {
+ stream = servletConfig.getServletContext().getResourceAsStream( GATEWAY_DESCRIPTOR_LOCATION_DEFAULT );
+ }
+ filter = createFilter( stream );
return filter;
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
index 6aa5418..cdaa96d 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/config/impl/GatewayConfigImpl.java
@@ -22,6 +22,9 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.gateway.GatewayMessages;
import org.apache.hadoop.gateway.config.GatewayConfig;
import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.joda.time.Period;
+import org.joda.time.format.PeriodFormatter;
+import org.joda.time.format.PeriodFormatterBuilder;
import java.io.File;
import java.net.InetSocketAddress;
@@ -67,7 +70,7 @@ import java.util.Map;
public class GatewayConfigImpl extends Configuration implements GatewayConfig {
private static final String GATEWAY_DEFAULT_TOPOLOGY_NAME_PARAM = "default.app.topology.name";
- private static final String GATEWAY_DEFAULT_TOPOLOGY_NAME = "sandbox";
+ private static final String GATEWAY_DEFAULT_TOPOLOGY_NAME = null;
private static GatewayMessages log = MessagesFactory.get( GatewayMessages.class );
@@ -77,6 +80,8 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
private static final String DEFAULT_STACKS_SERVICES_DIR = "services";
+ private static final String DEFAULT_APPLICATIONS_DIR = "applications";
+
public static final String[] GATEWAY_CONFIG_FILENAMES = {
GATEWAY_CONFIG_DIR_PREFIX + "/" + GATEWAY_CONFIG_FILE_PREFIX + "-default.xml",
GATEWAY_CONFIG_DIR_PREFIX + "/" + GATEWAY_CONFIG_FILE_PREFIX + "-site.xml"
@@ -103,6 +108,7 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
public static final String SECURITY_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".security.dir";
public static final String DATA_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".data.dir";
public static final String STACKS_SERVICES_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".services.dir";
+ public static final String APPLICATIONS_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".applications.dir";
public static final String HADOOP_CONF_DIR = GATEWAY_CONFIG_FILE_PREFIX + ".hadoop.conf.dir";
public static final String FRONTEND_URL = GATEWAY_CONFIG_FILE_PREFIX + ".frontend.url";
private static final String TRUST_ALL_CERTS = GATEWAY_CONFIG_FILE_PREFIX + ".trust.all.certs";
@@ -118,6 +124,8 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
public static final String HTTP_SERVER_REQUEST_HEADER_BUFFER = GATEWAY_CONFIG_FILE_PREFIX + ".httpserver.requestHeaderBuffer";
public static final String HTTP_SERVER_RESPONSE_BUFFER = GATEWAY_CONFIG_FILE_PREFIX + ".httpserver.responseBuffer";
public static final String HTTP_SERVER_RESPONSE_HEADER_BUFFER = GATEWAY_CONFIG_FILE_PREFIX + ".httpserver.responseHeaderBuffer";
+ public static final String DEPLOYMENTS_BACKUP_VERSION_LIMIT = GATEWAY_CONFIG_FILE_PREFIX + ".deployment.backup.versionLimit";
+ public static final String DEPLOYMENTS_BACKUP_AGE_LIMIT = GATEWAY_CONFIG_FILE_PREFIX + ".deployment.backup.ageLimit";
// These config property names are not inline with the convention of using the
// GATEWAY_CONFIG_FILE_PREFIX as is done by those above. These are left for
@@ -190,6 +198,11 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
}
@Override
+ public String getGatewayApplicationsDir() {
+ return get(APPLICATIONS_DIR, getGatewayDataDir() + File.separator + DEFAULT_APPLICATIONS_DIR);
+ }
+
+ @Override
public String getHadoopConfDir() {
return get( HADOOP_CONF_DIR );
}
@@ -371,7 +384,12 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
*/
@Override
public String getDefaultAppRedirectPath() {
- return "/" + getGatewayPath() + "/" + getDefaultTopologyName();
+ String defTopo = getDefaultTopologyName();
+ if( defTopo == null ) {
+ return null;
+ } else {
+ return "/" + getGatewayPath() + "/" + defTopo;
+ }
}
/* (non-Javadoc)
@@ -497,4 +515,30 @@ public class GatewayConfigImpl extends Configuration implements GatewayConfig {
return i;
}
+ @Override
+ public int getGatewayDeploymentsBackupVersionLimit() {
+ int i = getInt( DEPLOYMENTS_BACKUP_VERSION_LIMIT, 5 );
+ if( i < 0 ) {
+ i = -1;
+ }
+ return i;
+ }
+
+ @Override
+ public long getGatewayDeploymentsBackupAgeLimit() {
+ PeriodFormatter f = new PeriodFormatterBuilder().appendDays().toFormatter();
+ String s = get( DEPLOYMENTS_BACKUP_AGE_LIMIT, "-1" );
+ long d;
+ try {
+ Period p = Period.parse( s, f );
+ d = p.toStandardDuration().getMillis();
+ if( d < 0 ) {
+ d = -1;
+ }
+ } catch( Exception e ) {
+ d = -1;
+ }
+ return d;
+ }
+
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentContextImpl.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentContextImpl.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentContextImpl.java
index 59480ce..918cdbb 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentContextImpl.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/deploy/DeploymentContextImpl.java
@@ -39,7 +39,6 @@ public class DeploymentContextImpl implements DeploymentContext {
private WebArchive webArchive;
private WebAppDescriptor webAppDescriptor;
Map<String,List<ProviderDeploymentContributor>> providers;
- Map<String,List<ServiceDeploymentContributor>> services;
private Map<String,Object> descriptors;
public DeploymentContextImpl(
@@ -48,15 +47,13 @@ public class DeploymentContextImpl implements DeploymentContext {
GatewayDescriptor gatewayDescriptor,
WebArchive webArchive,
WebAppDescriptor webAppDescriptor,
- Map<String,List<ProviderDeploymentContributor>> providers,
- Map<String,List<ServiceDeploymentContributor>> services ) {
+ Map<String,List<ProviderDeploymentContributor>> providers ) {
this.gatewayConfig = gatewayConfig;
this.topology = topology;
this.gatewayDescriptor = gatewayDescriptor;
this.webArchive = webArchive;
this.webAppDescriptor = webAppDescriptor;
this.providers = providers;
- this.services = services;
this.descriptors = new HashMap<String,Object>();
}
@@ -118,9 +115,6 @@ public class DeploymentContextImpl implements DeploymentContext {
List<FilterParamDescriptor> params ) {
ProviderDeploymentContributor contributor = DeploymentFactory.getProviderContributor( providers, role, name );
Provider provider = getTopology().getProvider( role, name );
-// if( provider != null ) {
-// System.out.println("=================== provider found by name: " + name + " with actual name of: " + provider.getName());
-// }
if( provider == null ) {
provider = new Provider();
provider.setRole( role );
[2/5] knox git commit: [KNOX-670] - Knox should be able to sost
simple web apps
Posted by km...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAppFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAppFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAppFuncTest.java
new file mode 100644
index 0000000..52f5eed
--- /dev/null
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAppFuncTest.java
@@ -0,0 +1,685 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway;
+
+import java.io.File;
+import java.net.URL;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.UUID;
+
+import com.jayway.restassured.RestAssured;
+import org.apache.commons.io.FileUtils;
+import org.apache.directory.server.protocol.shared.transport.TcpTransport;
+import org.apache.hadoop.gateway.security.ldap.SimpleLdapDirectoryServer;
+import org.apache.hadoop.gateway.services.DefaultGatewayServices;
+import org.apache.hadoop.gateway.services.GatewayServices;
+import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.apache.hadoop.gateway.services.topology.TopologyService;
+import org.apache.hadoop.test.TestUtils;
+import org.apache.hadoop.test.mock.MockServer;
+import org.apache.http.HttpStatus;
+import org.apache.log4j.Appender;
+import org.hamcrest.MatcherAssert;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static com.jayway.restassured.RestAssured.given;
+import static com.jayway.restassured.config.ConnectionConfig.connectionConfig;
+import static com.jayway.restassured.config.RestAssuredConfig.newConfig;
+import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
+import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.notNullValue;
+import static org.hamcrest.Matchers.arrayWithSize;
+import static org.hamcrest.Matchers.hasItemInArray;
+import static org.hamcrest.core.Is.is;
+import static org.hamcrest.core.IsNot.not;
+import static org.junit.Assert.assertThat;
+import static org.xmlmatchers.transform.XmlConverters.the;
+import static org.xmlmatchers.xpath.HasXPath.hasXPath;
+import static uk.co.datumedge.hamcrest.json.SameJSONAs.sameJSONAs;
+
+public class GatewayAppFuncTest {
+
+ private static Logger LOG = LoggerFactory.getLogger( GatewayAppFuncTest.class );
+ private static Class DAT = GatewayAppFuncTest.class;
+
+ private static Enumeration<Appender> appenders;
+ private static GatewayTestConfig config;
+ private static DefaultGatewayServices services;
+ private static GatewayServer gateway;
+ private static int gatewayPort;
+ private static String gatewayUrl;
+ private static String clusterUrl;
+ private static SimpleLdapDirectoryServer ldap;
+ private static TcpTransport ldapTransport;
+ private static int ldapPort;
+ private static Properties params;
+ private static TopologyService topos;
+ private static MockServer mockWebHdfs;
+
+ @BeforeClass
+ public static void setupSuite() throws Exception {
+ LOG_ENTER();
+ RestAssured.config = newConfig().connectionConfig(connectionConfig().closeIdleConnectionsAfterEachResponse());
+ //appenders = NoOpAppender.setUp();
+ setupLdap();
+ setupGateway();
+ LOG_EXIT();
+ }
+
+ @AfterClass
+ public static void cleanupSuite() throws Exception {
+ LOG_ENTER();
+ gateway.stop();
+ ldap.stop( true );
+ FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
+ //NoOpAppender.tearDown( appenders );
+ LOG_EXIT();
+ }
+
+ @After
+ public void cleanupTest() throws Exception {
+ FileUtils.cleanDirectory( new File( config.getGatewayTopologyDir() ) );
+ FileUtils.cleanDirectory( new File( config.getGatewayDeploymentDir() ) );
+ }
+
+ public static void setupLdap() throws Exception {
+ URL usersUrl = TestUtils.getResourceUrl( DAT, "users.ldif" );
+ ldapPort = TestUtils.findFreePort();
+ ldapTransport = new TcpTransport( ldapPort );
+ ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
+ ldap.start();
+ LOG.info( "LDAP port = " + ldapTransport.getPort() );
+ }
+
+ public static void setupGateway() throws Exception {
+
+ File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+ File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+ gatewayDir.mkdirs();
+
+ config = new GatewayTestConfig();
+ config.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+ URL svcsFileUrl = TestUtils.getResourceUrl( DAT, "test-svcs/readme.txt" );
+ File svcsFile = new File( svcsFileUrl.getFile() );
+ File svcsDir = svcsFile.getParentFile();
+ config.setGatewayServicesDir( svcsDir.getAbsolutePath() );
+
+ URL appsFileUrl = TestUtils.getResourceUrl( DAT, "test-apps/readme.txt" );
+ File appsFile = new File( appsFileUrl.getFile() );
+ File appsDir = appsFile.getParentFile();
+ config.setGatewayApplicationsDir( appsDir.getAbsolutePath() );
+
+ File topoDir = new File( config.getGatewayTopologyDir() );
+ topoDir.mkdirs();
+
+ File deployDir = new File( config.getGatewayDeploymentDir() );
+ deployDir.mkdirs();
+
+
+ setupMockServers();
+ startGatewayServer();
+ }
+
+ public static void setupMockServers() throws Exception {
+ mockWebHdfs = new MockServer( "WEBHDFS", true );
+ }
+
+ public static void startGatewayServer() throws Exception {
+ services = new DefaultGatewayServices();
+ Map<String,String> options = new HashMap<String,String>();
+ options.put( "persist-master", "false" );
+ options.put( "master", "password" );
+ try {
+ services.init( config, options );
+ } catch ( ServiceLifecycleException e ) {
+ e.printStackTrace(); // I18N not required.
+ }
+ topos = services.getService(GatewayServices.TOPOLOGY_SERVICE);
+
+ gateway = GatewayServer.startGateway( config, services );
+ MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
+
+ gatewayPort = gateway.getAddresses()[0].getPort();
+ gatewayUrl = "http://localhost:" + gatewayPort + "/" + config.getGatewayPath();
+ clusterUrl = gatewayUrl + "/test-topology";
+
+ LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
+
+ params = new Properties();
+ params.put( "LDAP_URL", "ldap://localhost:" + ldapTransport.getPort() );
+ params.put( "WEBHDFS_URL", "http://localhost:" + mockWebHdfs.getPort() );
+ }
+
+ @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
+ public void testSimpleStaticHelloAppDeployUndeploy() throws Exception {
+ LOG_ENTER();
+
+ String topoStr = TestUtils.merge( DAT, "test-static-hello-topology.xml", params );
+ File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
+ FileUtils.writeStringToFile( topoFile, topoStr );
+
+ topos.reloadTopologies();
+
+ String username = "guest";
+ String password = "guest-password";
+ String serviceUrl = clusterUrl + "/static-hello-app-path/index.html";
+ String body = given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .contentType( "text/html" )
+ .when().get( serviceUrl ).asString();
+ assertThat( the(body), hasXPath( "/html/head/title/text()", equalTo("Static Hello Application") ) );
+
+ serviceUrl = clusterUrl + "/static-hello-app-path/";
+ body = given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .contentType( "text/html" )
+ .when().get( serviceUrl ).asString();
+ assertThat( the(body), hasXPath( "/html/head/title/text()", equalTo("Static Hello Application") ) );
+
+ serviceUrl = clusterUrl + "/static-hello-app-path";
+ body = given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .contentType( "text/html" )
+ .when().get( serviceUrl ).asString();
+ assertThat( the(body), hasXPath( "/html/head/title/text()", equalTo("Static Hello Application") ) );
+
+ assertThat( "Failed to delete test topology file", FileUtils.deleteQuietly( topoFile ), is(true) );
+ topos.reloadTopologies();
+
+ given()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ .statusCode( HttpStatus.SC_NOT_FOUND )
+ .when().get( serviceUrl );
+
+ LOG_EXIT();
+ }
+
+ @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
+ public void testSimpleDynamicAppDeployUndeploy() throws Exception {
+ LOG_ENTER();
+
+ String topoStr = TestUtils.merge( DAT, "test-dynamic-app-topology.xml", params );
+ File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
+ FileUtils.writeStringToFile( topoFile, topoStr );
+
+ topos.reloadTopologies();
+
+ String username = "guest";
+ String password = "guest-password";
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
+ .when().get( clusterUrl + "/dynamic-app-path" );
+
+ assertThat( "Failed to delete test topology file", FileUtils.deleteQuietly( topoFile ), is(true) );
+ topos.reloadTopologies();
+
+ given()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ .statusCode( HttpStatus.SC_NOT_FOUND )
+ .when()
+ .get( clusterUrl + "/dynamic-app-path" );
+
+ LOG_EXIT();
+ }
+
+ @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
+ public void testNakedAppDeploy() throws Exception {
+ LOG_ENTER();
+
+ String topoStr = TestUtils.merge( DAT, "test-naked-app-topology.xml", params );
+ File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
+ FileUtils.writeStringToFile( topoFile, topoStr );
+
+ topos.reloadTopologies();
+
+ given()
+ //.log().all()
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .body( is( gatewayUrl + "/test-topology/dynamic-app/?null" ) )
+ .when().get( gatewayUrl + "/test-topology/dynamic-app" );
+
+ LOG_EXIT();
+ }
+
+ @Test//( timeout = TestUtils.MEDIUM_TIMEOUT )
+ public void testDefaultAppName() throws Exception {
+ LOG_ENTER();
+
+ String topoStr = TestUtils.merge( DAT, "test-default-app-name-topology.xml", params );
+ File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
+ FileUtils.writeStringToFile( topoFile, topoStr );
+
+ topos.reloadTopologies();
+
+ String username = "guest";
+ String password = "guest-password";
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .body( is( clusterUrl + "/dynamic-app/?null" ) )
+ .when().get( clusterUrl + "/dynamic-app" );
+
+ assertThat( "Failed to delete test topology file", FileUtils.deleteQuietly( topoFile ), is(true) );
+ topos.reloadTopologies();
+
+ given()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ .statusCode( HttpStatus.SC_NOT_FOUND )
+ .when()
+ .get( clusterUrl + "/dynamic-app" );
+
+ File deployDir = new File( config.getGatewayDeploymentDir() );
+ assertThat( deployDir.listFiles(), is(arrayWithSize(0)) );
+
+ LOG_EXIT();
+ }
+
+ @Test//( timeout = TestUtils.MEDIUM_TIMEOUT )
+ public void testMultiApps() throws Exception {
+ LOG_ENTER();
+
+ String topoStr = TestUtils.merge( DAT, "test-multi-apps-topology.xml", params );
+ File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
+ FileUtils.writeStringToFile( topoFile, topoStr );
+
+ topos.reloadTopologies();
+
+ String username = "guest";
+ String password = "guest-password";
+
+ String body = given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .contentType( "text/html" )
+ .when().get( clusterUrl + "/static-hello-app-path/index.html" ).asString();
+ assertThat( the(body), hasXPath( "/html/head/title/text()", equalTo("Static Hello Application") ) );
+
+ body = given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ .contentType( "" )
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .when().get( clusterUrl + "/static-json-app/one.json" ).asString();
+ assertThat( body, sameJSONAs( "{'test-name-one':'test-value-one'}" ) );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
+ .when().get( clusterUrl + "/dynamic-app-path" );
+
+ body = given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ .contentType( "application/xml" )
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .when().get( clusterUrl + "/test.xml" ).asString();
+ assertThat( the(body), hasXPath( "/test" ) );
+
+ assertThat( FileUtils.deleteQuietly( topoFile ), is(true) );
+ topos.reloadTopologies();
+
+ given()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ .statusCode( HttpStatus.SC_NOT_FOUND )
+ .when().get( clusterUrl + "/static-hello-app-path/index.html" );
+ given()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ .statusCode( HttpStatus.SC_NOT_FOUND )
+ .when().get( clusterUrl + "/static-json-app/one.json" );
+ given()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ .statusCode( HttpStatus.SC_NOT_FOUND )
+ .when().get( clusterUrl + "/dynamic-app-path" );
+ given()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ .statusCode( HttpStatus.SC_NOT_FOUND )
+ .when().get( clusterUrl + "/test.xml" );
+
+ LOG_EXIT();
+ }
+
+ @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
+ public void testServicesAndApplications() throws Exception {
+ LOG_ENTER();
+
+ String topoStr = TestUtils.merge( DAT, "test-svcs-and-apps-topology.xml", params );
+ File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
+ FileUtils.writeStringToFile( topoFile, topoStr );
+
+ topos.reloadTopologies();
+
+ String username = "guest";
+ String password = "guest-password";
+
+ mockWebHdfs.expect()
+ .method( "GET" )
+ .pathInfo( "/v1/" )
+ .queryParam( "op", "GETHOMEDIRECTORY" )
+ .queryParam( "user.name", "guest" )
+ .respond()
+ .status( HttpStatus.SC_OK )
+ .content( "{\"path\":\"/users/guest\"}", Charset.forName("UTF-8") )
+ .contentType( "application/json" );
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .queryParam( "op", "GETHOMEDIRECTORY" )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .contentType( "application/json" )
+ .body( "path", is( "/users/guest") )
+ .when().get( clusterUrl + "/webhdfs/v1" );
+ assertThat( mockWebHdfs.isEmpty(), is(true) );
+
+ String body = given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .contentType( "application/xml" )
+ .when().get( clusterUrl + "/static-xml-app/test.xml" ).asString();
+ assertThat( the(body), hasXPath( "test" ) );
+
+ body = given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ .contentType( "" )
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .when().get( clusterUrl + "/app-two/one.json" ).asString();
+ assertThat( body, sameJSONAs( "{'test-name-one':'test-value-one'}" ) );
+
+ assertThat( FileUtils.deleteQuietly( topoFile ), is(true) );
+ topos.reloadTopologies();
+
+ given()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ .statusCode( HttpStatus.SC_NOT_FOUND )
+ .when().get( clusterUrl + "/app-one/index.html" );
+ given()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ .statusCode( HttpStatus.SC_NOT_FOUND )
+ .when().get( clusterUrl + "/app-two/one.json" );
+ given()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ .statusCode( HttpStatus.SC_NOT_FOUND )
+ .when().get( clusterUrl + "/test.xml" );
+
+ File deployDir = new File( config.getGatewayDeploymentDir() );
+ assertThat( deployDir.listFiles(), is(arrayWithSize(0)) );
+
+ LOG_EXIT();
+ }
+
+ @Test//( timeout = TestUtils.MEDIUM_TIMEOUT )
+ public void testDeploymentCleanup() throws Exception {
+ LOG_ENTER();
+
+ String username = "guest";
+ String password = "guest-password";
+
+ int oldVersionLimit = config.getGatewayDeploymentsBackupVersionLimit();
+
+ try {
+ gateway.stop();
+ config.setGatewayDeploymentsBackupVersionLimit( 1 );
+ startGatewayServer();
+
+ String topoStr = TestUtils.merge( DAT, "test-dynamic-app-topology.xml", params );
+ File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
+ FileUtils.writeStringToFile( topoFile, topoStr );
+ topos.reloadTopologies();
+
+ File deployDir = new File( config.getGatewayDeploymentDir() );
+ String[] topoDirs1 = deployDir.list();
+ assertThat( topoDirs1, is(arrayWithSize(1)) );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
+ .when().get( clusterUrl + "/dynamic-app-path" );
+
+ TestUtils.waitUntilNextSecond();
+ FileUtils.touch( topoFile );
+
+ topos.reloadTopologies();
+ String[] topoDirs2 = deployDir.list();
+ assertThat( topoDirs2, is(arrayWithSize(2)) );
+ assertThat( topoDirs2, hasItemInArray(topoDirs1[0]) );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
+ .when().get( clusterUrl + "/dynamic-app-path" );
+
+ TestUtils.waitUntilNextSecond();
+ FileUtils.touch( topoFile );
+ topos.reloadTopologies();
+
+ String[] topoDirs3 = deployDir.list();
+ assertThat( topoDirs3, is(arrayWithSize(2)) );
+ assertThat( topoDirs3, not(hasItemInArray(topoDirs1[0])) );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
+ .when().get( clusterUrl + "/dynamic-app-path" );
+
+ } finally {
+ gateway.stop();
+ config.setGatewayDeploymentsBackupAgeLimit( oldVersionLimit );
+ startGatewayServer();
+ }
+
+ LOG_EXIT();
+ }
+
+ @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
+ public void testDefaultTopology() throws Exception {
+ LOG_ENTER();
+
+ try {
+ gateway.stop();
+ config.setGatewayDeploymentsBackupVersionLimit( 1 );
+ startGatewayServer();
+
+ String topoStr = TestUtils.merge( DAT, "test-dynamic-app-topology.xml", params );
+ File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
+ FileUtils.writeStringToFile( topoFile, topoStr );
+
+ topos.reloadTopologies();
+
+ File deployDir = new File( config.getGatewayDeploymentDir() );
+ String[] topoDirs = deployDir.list();
+ assertThat( topoDirs, is(arrayWithSize(1)) );
+
+ String username = "guest";
+ String password = "guest-password";
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
+ .when().get( clusterUrl + "/dynamic-app-path" );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
+ .when().get( clusterUrl + "/dynamic-app-path" );
+
+ topoStr = TestUtils.merge( DAT, "test-dynamic-app-topology.xml", params );
+ topoFile = new File( config.getGatewayTopologyDir(), "test-topology-2.xml" );
+ FileUtils.writeStringToFile( topoFile, topoStr );
+
+ topos.reloadTopologies();
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .body( is( gatewayUrl + "/test-topology" + "/dynamic-app-path/?null" ) )
+ .when().get( gatewayUrl + "/test-topology/dynamic-app-path" );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .body( is( gatewayUrl + "/test-topology-2" + "/dynamic-app-path/?null" ) )
+ .when().get( gatewayUrl + "/test-topology-2/dynamic-app-path" );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_NOT_FOUND )
+ .body( is( clusterUrl + "/dynamic-app-path/?null" ) );
+
+ gateway.stop();
+ config.setDefaultTopologyName( "test-topology" );
+ startGatewayServer();
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .body( is( gatewayUrl + "/test-topology" + "/dynamic-app-path/?null" ) )
+ .when().get( gatewayUrl + "/test-topology/dynamic-app-path" );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .statusCode( HttpStatus.SC_OK )
+ .body( is( gatewayUrl + "/test-topology-2" + "/dynamic-app-path/?null" ) )
+ .when().get( gatewayUrl + "/test-topology-2/dynamic-app-path" );
+
+ given()
+ //.log().all()
+ .auth().preemptive().basic( username, password )
+ .expect()
+ //.log().all()
+ .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
+ .when().get( clusterUrl + "/dynamic-app-path" );
+
+ } finally {
+ gateway.stop();
+ config.setDefaultTopologyName( null );
+ startGatewayServer();
+ }
+
+ LOG_EXIT();
+ }
+
+ public static Collection<String> toNames( File[] files ) {
+ List<String> names = new ArrayList<String>( files.length );
+ for( File file : files ) {
+ names.add( file.getAbsolutePath() );
+ }
+ return names;
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayDeployFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayDeployFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayDeployFuncTest.java
index 990d776..534f3b0 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayDeployFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayDeployFuncTest.java
@@ -218,7 +218,7 @@ public class GatewayDeployFuncTest {
public void testDeployRedeployUndeploy() throws InterruptedException, IOException {
LOG_ENTER();
long sleep = 200;
- int numFilesInWar = 5;
+ int numFilesInWebInf = 4; // # files in WEB-INF (ie gateway.xml, rewrite.xml, shiro.ini, web.xml)
String username = "guest";
String password = "guest-password";
String serviceUrl = clusterUrl + "/test-service-path/test-service-resource";
@@ -226,7 +226,7 @@ public class GatewayDeployFuncTest {
File topoDir = new File( config.getGatewayTopologyDir() );
File deployDir = new File( config.getGatewayDeploymentDir() );
- File warDir;
+ File earDir;
// Make sure deployment directory is empty.
assertThat( topoDir.listFiles().length, is( 0 ) );
@@ -235,10 +235,10 @@ public class GatewayDeployFuncTest {
File descriptor = writeTestTopology( "test-cluster", createTopology() );
long writeTime = System.currentTimeMillis();
- warDir = waitForFiles( deployDir, "test-cluster.war\\.[0-9A-Fa-f]+", 1, 0, sleep );
- for( File webInfDir : warDir.listFiles() ) {
- waitForFiles( webInfDir, ".*", numFilesInWar, 0, sleep );
- }
+ earDir = waitForFiles( deployDir, "test-cluster\\.topo\\.[0-9A-Fa-f]+", 1, 0, sleep );
+ File warDir = new File( earDir, "%2F" );
+ File webInfDir = new File( warDir, "WEB-INF" );
+ waitForFiles( webInfDir, ".*", numFilesInWebInf, 0, sleep );
waitForAccess( serviceUrl, username, password, sleep );
// Wait to make sure a second has passed to ensure the the file timestamps are different.
@@ -252,10 +252,10 @@ public class GatewayDeployFuncTest {
assertThat( topoTimestampAfter, greaterThan( topoTimestampBefore ) );
// Check to make sure there are two war directories with the same root.
- warDir = waitForFiles( deployDir, "test-cluster.war\\.[0-9A-Fa-f]+", 2, 1, sleep );
- for( File webInfDir : warDir.listFiles() ) {
- waitForFiles( webInfDir, ".*", numFilesInWar, 0, sleep );
- }
+ earDir = waitForFiles( deployDir, "test-cluster\\.topo\\.[0-9A-Fa-f]+", 2, 1, sleep );
+ warDir = new File( earDir, "%2F" );
+ webInfDir = new File( warDir, "WEB-INF" );
+ waitForFiles( webInfDir, ".*", numFilesInWebInf, 0, sleep );
waitForAccess( serviceUrl, username, password, sleep );
// Wait to make sure a second has passed to ensure the the file timestamps are different.
@@ -269,10 +269,10 @@ public class GatewayDeployFuncTest {
assertThat( topoTimestampAfter, greaterThan( topoTimestampBefore ) );
// Check to make sure there are two war directories with the same root.
- warDir = waitForFiles( deployDir, "test-cluster.war\\.[0-9A-Fa-f]+", 3, 2, sleep );
- for( File webInfDir : warDir.listFiles() ) {
- waitForFiles( webInfDir, ".*", numFilesInWar, 0, sleep );
- }
+ earDir = waitForFiles( deployDir, "test-cluster\\.topo\\.[0-9A-Fa-f]+", 3, 2, sleep );
+ warDir = new File( earDir, "%2F" );
+ webInfDir = new File( warDir, "WEB-INF" );
+ waitForFiles( webInfDir, ".*", numFilesInWebInf, 0, sleep );
waitForAccess( serviceUrl, username, password, sleep );
// Delete the test topology.
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
index 37d1f6f..29e8a15 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
@@ -38,11 +38,18 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
private String kerberosLoginConfig = "/etc/knox/conf/krb5JAASLogin.conf";
private String frontendUrl = null;
private boolean xForwardedEnabled = true;
+ private String gatewayApplicationsDir = null;
+ private String gatewayServicesDir;
+ private String defaultTopologyName = "default";
public void setGatewayHomeDir( String gatewayHomeDir ) {
this.gatewayHomeDir = gatewayHomeDir;
}
+ public String getGatewayHomeDir() {
+ return this.gatewayHomeDir;
+ }
+
@Override
public String getGatewayConfDir() {
return gatewayHomeDir;
@@ -151,12 +158,13 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
return kerberosLoginConfig;
}
- /* (non-Javadoc)
- * @see org.apache.hadoop.gateway.config.GatewayConfig#getDefaultTopologyName()
- */
@Override
public String getDefaultTopologyName() {
- return "default";
+ return defaultTopologyName;
+ }
+
+ public void setDefaultTopologyName( String defaultTopologyName ) {
+ this.defaultTopologyName = defaultTopologyName;
}
/* (non-Javadoc)
@@ -237,10 +245,31 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
// this.kerberosLoginConfig = kerberosLoginConfig;
// }
- @Override
- public String getGatewayServicesDir() {
- return gatewayHomeDir + "/data/services";
- }
+ @Override
+ public String getGatewayServicesDir() {
+ if( gatewayServicesDir != null ) {
+ return gatewayServicesDir;
+ } else {
+ return getGatewayDataDir() + "/services";
+ }
+ }
+
+ public void setGatewayServicesDir( String gatewayServicesDir ) {
+ this.gatewayServicesDir = gatewayServicesDir;
+ }
+
+ @Override
+ public String getGatewayApplicationsDir() {
+ if( gatewayApplicationsDir != null ) {
+ return gatewayApplicationsDir;
+ } else {
+ return getGatewayConfDir() + "/applications";
+ }
+ }
+
+ public void setGatewayApplicationsDir( String gatewayApplicationsDir ) {
+ this.gatewayApplicationsDir = gatewayApplicationsDir;
+ }
@Override
public boolean isXForwardedEnabled() {
@@ -289,4 +318,25 @@ public class GatewayTestConfig extends Configuration implements GatewayConfig {
return 8*1024;
}
+ private int backupVersionLimit = -1;
+
+ public void setGatewayDeploymentsBackupVersionLimit( int newBackupVersionLimit ) {
+ backupVersionLimit = newBackupVersionLimit;
+ }
+
+ public int getGatewayDeploymentsBackupVersionLimit() {
+ return backupVersionLimit;
+ }
+
+ private long backupAgeLimit = -1;
+
+ @Override
+ public long getGatewayDeploymentsBackupAgeLimit() {
+ return backupAgeLimit;
+ }
+
+ public void setGatewayDeploymentsBackupAgeLimit( long newBackupAgeLimit ) {
+ backupAgeLimit = newBackupAgeLimit;
+ }
+
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/java/org/apache/hadoop/gateway/TestServlet.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/TestServlet.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/TestServlet.java
new file mode 100644
index 0000000..a0b0935
--- /dev/null
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/TestServlet.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway;
+
+import java.io.IOException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+public class TestServlet extends HttpServlet {
+
+ @Override
+ protected void doGet( HttpServletRequest request, HttpServletResponse response ) throws IOException {
+ response.setContentType( "text/plain" );
+ response.getWriter().write( request.getRequestURL().toString() + "?" + request.getQueryString() );
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
index fc28dba..76d923f 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
@@ -17,50 +17,56 @@
*/
package org.apache.hadoop.gateway.deploy;
+import java.io.File;
+import java.io.IOException;
+import java.io.StringWriter;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.util.Arrays;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+import javax.xml.parsers.ParserConfigurationException;
+import javax.xml.transform.OutputKeys;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerException;
+import javax.xml.transform.TransformerFactory;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.transform.stream.StreamResult;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathExpressionException;
+import javax.xml.xpath.XPathFactory;
+
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.gateway.GatewayTestConfig;
import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.filter.XForwardedHeaderFilter;
+import org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteServletFilter;
import org.apache.hadoop.gateway.services.DefaultGatewayServices;
import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.apache.hadoop.gateway.topology.Application;
import org.apache.hadoop.gateway.topology.Param;
import org.apache.hadoop.gateway.topology.Provider;
import org.apache.hadoop.gateway.topology.Service;
import org.apache.hadoop.gateway.topology.Topology;
+import org.apache.hadoop.test.TestUtils;
import org.apache.hadoop.test.log.NoOpAppender;
import org.apache.log4j.Appender;
+import org.jboss.shrinkwrap.api.Archive;
+import org.jboss.shrinkwrap.api.ArchivePath;
+import org.jboss.shrinkwrap.api.spec.EnterpriseArchive;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
-import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.transform.OutputKeys;
-import javax.xml.transform.Transformer;
-import javax.xml.transform.TransformerException;
-import javax.xml.transform.TransformerFactory;
-import javax.xml.transform.dom.DOMSource;
-import javax.xml.transform.stream.StreamResult;
-import javax.xml.xpath.XPathConstants;
-import javax.xml.xpath.XPathExpressionException;
-import javax.xml.xpath.XPathFactory;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.StringWriter;
-import java.net.URISyntaxException;
-import java.util.Arrays;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.UUID;
-
import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.IsEqual.equalTo;
@@ -71,10 +77,10 @@ import static org.junit.Assert.fail;
public class DeploymentFactoryFuncTest {
private static final long SHORT_TIMEOUT = 1000L;
- private static final long MEDIUM_TIMEOUT = 10 * SHORT_TIMEOUT;
+ private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
private static final long LONG_TIMEOUT = 10 * MEDIUM_TIMEOUT;
- @Test( timeout = SHORT_TIMEOUT )
+ @Test( timeout = MEDIUM_TIMEOUT )
public void testGenericProviderDeploymentContributor() throws ParserConfigurationException, SAXException, IOException, TransformerException {
LOG_ENTER();
GatewayConfig config = new GatewayTestConfig();
@@ -122,9 +128,9 @@ public class DeploymentFactoryFuncTest {
provider.addParam( param );
topology.addProvider( provider );
- WebArchive war = DeploymentFactory.createDeployment( config, topology );
+ EnterpriseArchive war = DeploymentFactory.createDeployment( config, topology );
- Document gateway = parse( war.get( "WEB-INF/gateway.xml" ).getAsset().openStream() );
+ Document gateway = TestUtils.parseXml( war.get( "%2F/WEB-INF/gateway.xml" ).getAsset().openStream() );
//dump( gateway );
//by default the first filter will be the X-Forwarded header filter
@@ -200,7 +206,7 @@ public class DeploymentFactoryFuncTest {
}
@Test( timeout = MEDIUM_TIMEOUT )
- public void testSimpleTopology() throws IOException, SAXException, ParserConfigurationException, URISyntaxException {
+ public void testSimpleTopology() throws IOException, SAXException, ParserConfigurationException, URISyntaxException, TransformerException {
LOG_ENTER();
GatewayConfig config = new GatewayTestConfig();
//Testing without x-forwarded headers filter
@@ -250,19 +256,23 @@ public class DeploymentFactoryFuncTest {
authorizer.setEnabled( true );
topology.addProvider( authorizer );
- WebArchive war = DeploymentFactory.createDeployment( config, topology );
+ EnterpriseArchive war = DeploymentFactory.createDeployment( config, topology );
// File dir = new File( System.getProperty( "user.dir" ) );
// File file = war.as( ExplodedExporter.class ).exportExploded( dir, "test-cluster.war" );
- Document web = parse( war.get( "WEB-INF/web.xml" ).getAsset().openStream() );
- assertThat( web, hasXPath( "/web-app/servlet/servlet-name", equalTo( "test-cluster" ) ) );
+ Document web = TestUtils.parseXml( war.get( "%2F/WEB-INF/web.xml" ).getAsset().openStream() );
+ //TestUtils.dumpXml( web );
+ assertThat( web, hasXPath( "/web-app" ) );
+ assertThat( web, hasXPath( "/web-app/servlet" ) );
+ assertThat( web, hasXPath( "/web-app/servlet/servlet-name" ) );
+ assertThat( web, hasXPath( "/web-app/servlet/servlet-name", equalTo( "test-cluster-knox-gateway-servlet" ) ) );
assertThat( web, hasXPath( "/web-app/servlet/servlet-class", equalTo( "org.apache.hadoop.gateway.GatewayServlet" ) ) );
assertThat( web, hasXPath( "/web-app/servlet/init-param/param-name", equalTo( "gatewayDescriptorLocation" ) ) );
- assertThat( web, hasXPath( "/web-app/servlet/init-param/param-value", equalTo( "gateway.xml" ) ) );
- assertThat( web, hasXPath( "/web-app/servlet-mapping/servlet-name", equalTo( "test-cluster" ) ) );
+ assertThat( web, hasXPath( "/web-app/servlet/init-param/param-value", equalTo( "/WEB-INF/gateway.xml" ) ) );
+ assertThat( web, hasXPath( "/web-app/servlet-mapping/servlet-name", equalTo( "test-cluster-knox-gateway-servlet" ) ) );
assertThat( web, hasXPath( "/web-app/servlet-mapping/url-pattern", equalTo( "/*" ) ) );
- Document gateway = parse( war.get( "WEB-INF/gateway.xml" ).getAsset().openStream() );
+ Document gateway = TestUtils.parseXml( war.get( "%2F/WEB-INF/gateway.xml" ).getAsset().openStream() );
assertThat( gateway, hasXPath( "/gateway/resource[1]/pattern", equalTo( "/webhdfs/v1/?**" ) ) );
//assertThat( gateway, hasXPath( "/gateway/resource[1]/target", equalTo( "http://localhost:50070/webhdfs/v1/?{**}" ) ) );
@@ -373,7 +383,7 @@ public class DeploymentFactoryFuncTest {
ha.setEnabled(true);
topology.addProvider(ha);
- for (int i = 0; i < 100; i++) {
+ for (int i = 0; i < 10; i++) {
createAndTestDeployment(config, topology);
}
LOG_EXIT();
@@ -381,15 +391,15 @@ public class DeploymentFactoryFuncTest {
private void createAndTestDeployment(GatewayConfig config, Topology topology) throws IOException, SAXException, ParserConfigurationException {
- WebArchive war = DeploymentFactory.createDeployment(config, topology);
+ EnterpriseArchive war = DeploymentFactory.createDeployment(config, topology);
// File dir = new File( System.getProperty( "user.dir" ) );
// File file = war.as( ExplodedExporter.class ).exportExploded( dir, "test-cluster.war" );
- Document web = parse(war.get("WEB-INF/web.xml").getAsset().openStream());
+ Document web = TestUtils.parseXml(war.get("%2F/WEB-INF/web.xml").getAsset().openStream());
assertThat(web, hasXPath("/web-app/servlet/servlet-class", equalTo("org.apache.hadoop.gateway.GatewayServlet")));
assertThat(web, hasXPath("/web-app/servlet/init-param/param-name", equalTo("gatewayDescriptorLocation")));
- assertThat(web, hasXPath("/web-app/servlet/init-param/param-value", equalTo("gateway.xml")));
- assertThat(web, hasXPath("/web-app/servlet-mapping/servlet-name", equalTo("test-cluster")));
+ assertThat(web, hasXPath("/web-app/servlet/init-param/param-value", equalTo("/WEB-INF/gateway.xml")));
+ assertThat(web, hasXPath("/web-app/servlet-mapping/servlet-name", equalTo("test-cluster-knox-gateway-servlet")));
assertThat(web, hasXPath("/web-app/servlet-mapping/url-pattern", equalTo("/*")));
//testing the order of listener classes generated
assertThat(web, hasXPath("/web-app/listener[2]/listener-class", equalTo("org.apache.hadoop.gateway.services.GatewayServicesContextListener")));
@@ -397,7 +407,7 @@ public class DeploymentFactoryFuncTest {
assertThat(web, hasXPath("/web-app/listener[4]/listener-class", equalTo("org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteServletContextListener")));
}
- @Test( timeout = MEDIUM_TIMEOUT )
+ @Test( timeout = LONG_TIMEOUT )
public void testDeploymentWithServiceParams() throws Exception {
LOG_ENTER();
GatewayConfig config = new GatewayTestConfig();
@@ -452,8 +462,8 @@ public class DeploymentFactoryFuncTest {
service.addParam( param );
topology.addService( service );
- WebArchive war = DeploymentFactory.createDeployment( config, topology );
- Document doc = parse( war.get( "WEB-INF/gateway.xml" ).getAsset().openStream() );
+ EnterpriseArchive war = DeploymentFactory.createDeployment( config, topology );
+ Document doc = TestUtils.parseXml( war.get( "%2F/WEB-INF/gateway.xml" ).getAsset().openStream() );
// dump( doc );
Node resourceNode, filterNode, paramNode;
@@ -488,11 +498,145 @@ public class DeploymentFactoryFuncTest {
LOG_EXIT();
}
- private Document parse( InputStream stream ) throws IOException, SAXException, ParserConfigurationException {
- DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
- DocumentBuilder builder = factory.newDocumentBuilder();
- InputSource source = new InputSource( stream );
- return builder.parse( source );
+ @Test( timeout = MEDIUM_TIMEOUT )
+ public void testDeploymentWithApplication() throws Exception {
+ LOG_ENTER();
+ GatewayConfig config = new GatewayTestConfig();
+ File targetDir = new File(System.getProperty("user.dir"), "target");
+ File gatewayDir = new File(targetDir, "gateway-home-" + UUID.randomUUID());
+ gatewayDir.mkdirs();
+ ((GatewayTestConfig) config).setGatewayHomeDir(gatewayDir.getAbsolutePath());
+ File deployDir = new File(config.getGatewayDeploymentDir());
+ deployDir.mkdirs();
+ addStacksDir(config, targetDir);
+ URL serviceUrl = TestUtils.getResourceUrl( DeploymentFactoryFuncTest.class, "test-apps/minimal-test-app/service.xml" );
+ File serviceFile = new File( serviceUrl.toURI() );
+ File appsDir = serviceFile.getParentFile().getParentFile();
+ ((GatewayTestConfig)config).setGatewayApplicationsDir(appsDir.getAbsolutePath());
+
+ DefaultGatewayServices srvcs = new DefaultGatewayServices();
+ Map<String, String> options = new HashMap<String, String>();
+ options.put("persist-master", "false");
+ options.put("master", "password");
+ try {
+ DeploymentFactory.setGatewayServices(srvcs);
+ srvcs.init(config, options);
+ } catch (ServiceLifecycleException e) {
+ e.printStackTrace(); // I18N not required.
+ }
+
+ Topology topology = new Topology();
+ topology.setName( "test-topology" );
+
+ Application app;
+
+ app = new Application();
+ app.setName( "minimal-test-app" );
+ app.addUrl( "/minimal-test-app-path" );
+ topology.addApplication( app );
+
+ EnterpriseArchive archive = DeploymentFactory.createDeployment( config, topology );
+ assertThat( archive, notNullValue() );
+
+ Document doc;
+
+ doc = TestUtils.parseXml( archive.get( "META-INF/topology.xml" ).getAsset().openStream() );
+ assertThat( doc, notNullValue() );
+
+ doc = TestUtils.parseXml( archive.get( "%2Fminimal-test-app-path/WEB-INF/gateway.xml" ).getAsset().openStream() );
+ assertThat( doc, notNullValue() );
+ //dump( doc );
+ assertThat( doc, hasXPath("/gateway/resource/pattern", equalTo("/**?**")));
+ assertThat( doc, hasXPath("/gateway/resource/filter[1]/role", equalTo("xforwardedheaders")));
+ assertThat( doc, hasXPath("/gateway/resource/filter[1]/name", equalTo("XForwardedHeaderFilter")));
+ assertThat( doc, hasXPath("/gateway/resource/filter[1]/class", equalTo(XForwardedHeaderFilter.class.getName())));
+ assertThat( doc, hasXPath("/gateway/resource/filter[2]/role", equalTo("rewrite")));
+ assertThat( doc, hasXPath("/gateway/resource/filter[2]/name", equalTo("url-rewrite")));
+ assertThat( doc, hasXPath("/gateway/resource/filter[2]/class", equalTo(UrlRewriteServletFilter.class.getName())));
+
+ LOG_EXIT();
+ }
+
+ @Test( timeout = MEDIUM_TIMEOUT )
+ public void testDeploymentWithServicesAndApplications() throws Exception {
+ LOG_ENTER();
+ GatewayConfig config = new GatewayTestConfig();
+ File targetDir = new File(System.getProperty("user.dir"), "target");
+ File gatewayDir = new File(targetDir, "gateway-home-" + UUID.randomUUID());
+ gatewayDir.mkdirs();
+ ((GatewayTestConfig) config).setGatewayHomeDir(gatewayDir.getAbsolutePath());
+ File deployDir = new File(config.getGatewayDeploymentDir());
+ deployDir.mkdirs();
+ addStacksDir(config, targetDir);
+ URL serviceUrl = TestUtils.getResourceUrl( DeploymentFactoryFuncTest.class, "test-apps/minimal-test-app/service.xml" );
+ File serviceFile = new File( serviceUrl.toURI() );
+ File appsDir = serviceFile.getParentFile().getParentFile();
+ ((GatewayTestConfig)config).setGatewayApplicationsDir(appsDir.getAbsolutePath());
+
+ DefaultGatewayServices srvcs = new DefaultGatewayServices();
+ Map<String, String> options = new HashMap<String, String>();
+ options.put("persist-master", "false");
+ options.put("master", "password");
+ try {
+ DeploymentFactory.setGatewayServices(srvcs);
+ srvcs.init(config, options);
+ } catch (ServiceLifecycleException e) {
+ e.printStackTrace(); // I18N not required.
+ }
+
+ Topology topology = new Topology();
+ topology.setName( "test-topology" );
+
+ Application app;
+
+ topology.setName( "test-cluster" );
+ Service service = new Service();
+ service.setRole( "WEBHDFS" );
+ service.addUrl( "http://localhost:50070/test-service-url" );
+ topology.addService( service );
+
+ app = new Application();
+ app.setName( "minimal-test-app" );
+ app.addUrl( "/minimal-test-app-path-one" );
+ topology.addApplication( app );
+
+ app.setName( "minimal-test-app" );
+ app.addUrl( "/minimal-test-app-path-two" );
+ topology.addApplication( app );
+
+ EnterpriseArchive archive = DeploymentFactory.createDeployment( config, topology );
+ assertThat( archive, notNullValue() );
+
+ Document doc;
+ org.jboss.shrinkwrap.api.Node node;
+
+ node = archive.get( "META-INF/topology.xml" );
+ assertThat( "Find META-INF/topology.xml", node, notNullValue() );
+ doc = TestUtils.parseXml( node.getAsset().openStream() );
+ assertThat( "Parse META-INF/topology.xml", doc, notNullValue() );
+
+ node = archive.get( "%2F" );
+ assertThat( "Find %2F", node, notNullValue() );
+ node = archive.get( "%2F/WEB-INF/gateway.xml" );
+ assertThat( "Find %2F/WEB-INF/gateway.xml", node, notNullValue() );
+ doc = TestUtils.parseXml( node.getAsset().openStream() );
+ assertThat( "Parse %2F/WEB-INF/gateway.xml", doc, notNullValue() );
+
+ WebArchive war = archive.getAsType( WebArchive.class, "%2Fminimal-test-app-path-one" );
+ assertThat( "Find %2Fminimal-test-app-path-one", war, notNullValue() );
+ node = war.get( "/WEB-INF/gateway.xml" );
+ assertThat( "Find %2Fminimal-test-app-path-one/WEB-INF/gateway.xml", node, notNullValue() );
+ doc = TestUtils.parseXml( node.getAsset().openStream() );
+ assertThat( "Parse %2Fminimal-test-app-path-one/WEB-INF/gateway.xml", doc, notNullValue() );
+
+ war = archive.getAsType( WebArchive.class, "%2Fminimal-test-app-path-two" );
+ assertThat( "Find %2Fminimal-test-app-path-two", war, notNullValue() );
+ node = war.get( "/WEB-INF/gateway.xml" );
+ assertThat( "Find %2Fminimal-test-app-path-two/WEB-INF/gateway.xml", node, notNullValue() );
+ doc = TestUtils.parseXml( node.getAsset().openStream() );
+ assertThat( "Parse %2Fminimal-test-app-path-two/WEB-INF/gateway.xml", doc, notNullValue() );
+
+ LOG_EXIT();
}
private void addStacksDir(GatewayConfig config, File targetDir) {
@@ -532,4 +676,21 @@ public class DeploymentFactoryFuncTest {
return XPathFactory.newInstance().newXPath().compile( expression ).evaluate( scope );
}
+ private static void dump( org.jboss.shrinkwrap.api.Node node, String prefix ) {
+ System.out.println( prefix + ": " + node.getPath() );
+ Set<org.jboss.shrinkwrap.api.Node> children = node.getChildren();
+ if( children != null && !children.isEmpty() ) {
+ for( org.jboss.shrinkwrap.api.Node child : children ) {
+ dump( child, prefix + " " );
+ }
+ }
+ }
+
+ private static void dump( Archive archive ) {
+ Map<ArchivePath,org.jboss.shrinkwrap.api.Node> content = archive.getContent();
+ for( Map.Entry<ArchivePath,org.jboss.shrinkwrap.api.Node> entry : content.entrySet() ) {
+ dump( entry.getValue(), " " );
+ }
+ }
+
}
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/dynamic-app/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/dynamic-app/WEB-INF/web.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/dynamic-app/WEB-INF/web.xml
new file mode 100644
index 0000000..730d1e6
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/dynamic-app/WEB-INF/web.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<web-app
+ xmlns="http://java.sun.com/xml/ns/javaee"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_3_0.xsd"
+ version="3.0">
+ <servlet>
+ <servlet-name>dynamic-app-servlet</servlet-name>
+ <servlet-class>org.apache.hadoop.gateway.TestServlet</servlet-class>
+ </servlet>
+ <servlet-mapping>
+ <servlet-name>dynamic-app-servlet</servlet-name>
+ <url-pattern>/*</url-pattern>
+ </servlet-mapping>
+</web-app>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/readme.txt
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/readme.txt b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/readme.txt
new file mode 100644
index 0000000..cd2eef8
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/readme.txt
@@ -0,0 +1,18 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+This file is here to help the tests find the parent directory.
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-hello-app/index.html
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-hello-app/index.html b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-hello-app/index.html
new file mode 100644
index 0000000..27be025
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-hello-app/index.html
@@ -0,0 +1,24 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<html>
+ <head>
+ <title>Static Hello Application</title>
+ </head>
+ <body>
+ <p>Static Hello Application</p>
+ </body>
+</html>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-json-app/one.json
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-json-app/one.json b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-json-app/one.json
new file mode 100644
index 0000000..185a2a8
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-json-app/one.json
@@ -0,0 +1,3 @@
+{
+ "test-name-one":"test-value-one"
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-json-app/rewrite.xml
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-json-app/rewrite.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-json-app/rewrite.xml
new file mode 100644
index 0000000..656e229
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-json-app/rewrite.xml
@@ -0,0 +1,17 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<rules/>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-json-app/service.xml
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-json-app/service.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-json-app/service.xml
new file mode 100644
index 0000000..4fda1cc
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-json-app/service.xml
@@ -0,0 +1,21 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="STATIC-JSON-APP" name="static-json-app" version="1.0.0">
+ <routes>
+ <route path="/**?**"/>
+ </routes>
+</service>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-xml-app/test.xml
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-xml-app/test.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-xml-app/test.xml
new file mode 100644
index 0000000..f637baf
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-apps/static-xml-app/test.xml
@@ -0,0 +1,17 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<test/>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-default-app-name-topology.xml
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-default-app-name-topology.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-default-app-name-topology.xml
new file mode 100644
index 0000000..019c88d
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-default-app-name-topology.xml
@@ -0,0 +1,53 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<topology>
+ <gateway>
+ <provider>
+ <role>authentication</role>
+ <name>ShiroProvider</name>
+ <enabled>true</enabled>
+ <param>
+ <name>main.ldapRealm</name>
+ <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.userDnTemplate</name>
+ <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.url</name>
+ <value>$LDAP_URL</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
+ <value>simple</value>
+ </param>
+ <param>
+ <name>urls./**</name>
+ <value>authcBasic</value>
+ </param>
+ </provider>
+ <provider>
+ <role>identity-assertion</role>
+ <name>Default</name>
+ <enabled>true</enabled>
+ </provider>
+ </gateway>
+ <application>
+ <name>dynamic-app</name>
+ </application>
+</topology>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-dynamic-app-topology.xml
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-dynamic-app-topology.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-dynamic-app-topology.xml
new file mode 100644
index 0000000..f25d343
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-dynamic-app-topology.xml
@@ -0,0 +1,54 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<topology>
+ <gateway>
+ <provider>
+ <role>authentication</role>
+ <name>ShiroProvider</name>
+ <enabled>true</enabled>
+ <param>
+ <name>main.ldapRealm</name>
+ <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.userDnTemplate</name>
+ <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.url</name>
+ <value>$LDAP_URL</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
+ <value>simple</value>
+ </param>
+ <param>
+ <name>urls./**</name>
+ <value>authcBasic</value>
+ </param>
+ </provider>
+ <provider>
+ <role>identity-assertion</role>
+ <name>Default</name>
+ <enabled>true</enabled>
+ </provider>
+ </gateway>
+ <application>
+ <name>dynamic-app</name>
+ <url>dynamic-app-path</url>
+ </application>
+</topology>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-multi-apps-topology.xml
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-multi-apps-topology.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-multi-apps-topology.xml
new file mode 100644
index 0000000..dab8738
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-multi-apps-topology.xml
@@ -0,0 +1,65 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<topology>
+ <gateway>
+ <provider>
+ <role>authentication</role>
+ <name>ShiroProvider</name>
+ <enabled>true</enabled>
+ <param>
+ <name>main.ldapRealm</name>
+ <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.userDnTemplate</name>
+ <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.url</name>
+ <value>$LDAP_URL</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
+ <value>simple</value>
+ </param>
+ <param>
+ <name>urls./**</name>
+ <value>authcBasic</value>
+ </param>
+ </provider>
+ <provider>
+ <role>identity-assertion</role>
+ <name>Default</name>
+ <enabled>true</enabled>
+ </provider>
+ </gateway>
+ <application>
+ <name>static-hello-app</name>
+ <url>static-hello-app-path</url>
+ </application>
+ <application>
+ <name>dynamic-app</name>
+ <url>/dynamic-app-path</url>
+ </application>
+ <application>
+ <name>static-json-app</name>
+ </application>
+ <application>
+ <name>static-xml-app</name>
+ <url>/</url>
+ </application>
+</topology>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-naked-app-topology.xml
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-naked-app-topology.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-naked-app-topology.xml
new file mode 100644
index 0000000..9e98d5b
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-naked-app-topology.xml
@@ -0,0 +1,33 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<topology>
+ <gateway>
+ <provider>
+ <role>authentication</role>
+ <name>Anonymous</name>
+ <enabled>true</enabled>
+ </provider>
+ <provider>
+ <role>identity-assertion</role>
+ <name>Default</name>
+ <enabled>true</enabled>
+ </provider>
+ </gateway>
+ <application>
+ <name>dynamic-app</name>
+ </application>
+</topology>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-static-hello-topology.xml
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-static-hello-topology.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-static-hello-topology.xml
new file mode 100644
index 0000000..b212825
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-static-hello-topology.xml
@@ -0,0 +1,54 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<topology>
+ <gateway>
+ <provider>
+ <role>authentication</role>
+ <name>ShiroProvider</name>
+ <enabled>true</enabled>
+ <param>
+ <name>main.ldapRealm</name>
+ <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.userDnTemplate</name>
+ <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.url</name>
+ <value>$LDAP_URL</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
+ <value>simple</value>
+ </param>
+ <param>
+ <name>urls./**</name>
+ <value>authcBasic</value>
+ </param>
+ </provider>
+ <provider>
+ <role>identity-assertion</role>
+ <name>Default</name>
+ <enabled>true</enabled>
+ </provider>
+ </gateway>
+ <application>
+ <name>static-hello-app</name>
+ <url>static-hello-app-path</url>
+ </application>
+</topology>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs-and-apps-topology.xml
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs-and-apps-topology.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs-and-apps-topology.xml
new file mode 100644
index 0000000..b18595b
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs-and-apps-topology.xml
@@ -0,0 +1,62 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<topology>
+ <gateway>
+ <provider>
+ <role>authentication</role>
+ <name>ShiroProvider</name>
+ <enabled>true</enabled>
+ <param>
+ <name>main.ldapRealm</name>
+ <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.userDnTemplate</name>
+ <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.url</name>
+ <value>$LDAP_URL</value>
+ </param>
+ <param>
+ <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
+ <value>simple</value>
+ </param>
+ <param>
+ <name>urls./**</name>
+ <value>authcBasic</value>
+ </param>
+ </provider>
+ <provider>
+ <role>identity-assertion</role>
+ <name>Default</name>
+ <enabled>true</enabled>
+ </provider>
+ </gateway>
+ <service>
+ <role>WEBHDFS</role>
+ <url>$WEBHDFS_URL</url>
+ </service>
+ <application>
+ <name>static-xml-app</name>
+ <url>static-xml-app</url>
+ </application>
+ <application>
+ <name>static-json-app</name>
+ <url>/app-two</url>
+ </application>
+</topology>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs/readme.txt
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs/readme.txt b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs/readme.txt
new file mode 100644
index 0000000..cd2eef8
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs/readme.txt
@@ -0,0 +1,18 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+This file is here to help the tests find the parent directory.
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs/webhdfs/2.4.0/rewrite.xml
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs/webhdfs/2.4.0/rewrite.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs/webhdfs/2.4.0/rewrite.xml
new file mode 100644
index 0000000..efbd93d
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs/webhdfs/2.4.0/rewrite.xml
@@ -0,0 +1,70 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<rules>
+
+ <rule dir="OUT" name="WEBHDFS/webhdfs/outbound" pattern="hdfs://*:*/{path=**}?{**}">
+ <rewrite template="{$frontend[url]}/webhdfs/v1/{path=**}?{**}"/>
+ </rule>
+
+ <rule dir="OUT" name="WEBHDFS/webhdfs/outbound" pattern="webhdfs://*:*/{path=**}?{**}">
+ <rewrite template="{$frontend[url]}/webhdfs/v1/{path=**}?{**}"/>
+ </rule>
+
+ <rule dir="OUT" name="WEBHDFS/webhdfs/outbound/namenode/headers/location">
+ <match pattern="{scheme}://{host}:{port}/{path=**}?{**}"/>
+ <rewrite template="{$frontend[url]}/webhdfs/data/v1/{path=**}?{scheme}?host={$hostmap(host)}?{port}?{**}"/>
+ <encrypt-query/>
+ </rule>
+
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/hdfs" pattern="hdfs:/{path=**}?{**}">
+ <rewrite template="{$serviceMappedUrl[NAMENODE]}/{path=**}?{**}"/>
+ </rule>
+
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/webhdfs" pattern="webhdfs:/{path=**}?{**}">
+ <rewrite template="{$serviceUrl[WEBHDFS]}/{path=**}?{**}"/>
+ </rule>
+
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/namenode/root" pattern="*://*:*/**/webhdfs/{version}/?{**}">
+ <rewrite template="{$serviceUrl[WEBHDFS]}/{version}/?{**}"/>
+ </rule>
+
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/namenode/file" pattern="*://*:*/**/webhdfs/{version}/{path=**}?{**}">
+ <rewrite template="{$serviceUrl[WEBHDFS]}/{version}/{path=**}?{**}"/>
+ </rule>
+
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/namenode/home" pattern="*://*:*/**/webhdfs/{version}/~?{**}">
+ <rewrite template="{$serviceUrl[WEBHDFS]}/{version}/user/{$username}?{**}"/>
+ </rule>
+
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/namenode/home/file" pattern="*://*:*/**/webhdfs/{version}/~/{path=**}?{**}">
+ <rewrite template="{$serviceUrl[WEBHDFS]}/{version}/user/{$username}/{path=**}?{**}"/>
+ </rule>
+
+ <rule dir="IN" name="WEBHDFS/webhdfs/inbound/datanode">
+ <decrypt-query/>
+ <match pattern="*://*:*/**/webhdfs/data/*/{path=**}?{scheme}?{host}?{port}?{**}"/>
+ <rewrite template="{scheme}://{host}:{port}/{path=**}?{**}"/>
+ </rule>
+
+ <filter name="WEBHDFS/webhdfs/outbound/namenode/headers">
+ <content type="application/x-http-headers">
+ <apply path="Location" rule="WEBHDFS/webhdfs/outbound/namenode/headers/location"/>
+ </content>
+ </filter>
+
+</rules>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs/webhdfs/2.4.0/service.xml
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs/webhdfs/2.4.0/service.xml b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs/webhdfs/2.4.0/service.xml
new file mode 100644
index 0000000..9d39a32
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/test-svcs/webhdfs/2.4.0/service.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<service role="WEBHDFS" name="webhdfs" version="2.4.0">
+ <routes>
+ <route path="/webhdfs/v1/?**">
+ <rewrite apply="WEBHDFS/webhdfs/inbound/namenode/root" to="request.url"/>
+ </route>
+ <route path="/webhdfs/v1/**?**">
+ <rewrite apply="WEBHDFS/webhdfs/inbound/namenode/file" to="request.url"/>
+ <rewrite apply="WEBHDFS/webhdfs/outbound/namenode/headers" to="response.headers"/>
+ </route>
+ <route path="/webhdfs/v1/~?**">
+ <rewrite apply="WEBHDFS/webhdfs/inbound/namenode/home" to="request.url"/>
+ </route>
+ <route path="/webhdfs/v1/~/**?**">
+ <rewrite apply="WEBHDFS/webhdfs/inbound/namenode/home/file" to="request.url"/>
+ <rewrite apply="WEBHDFS/webhdfs/outbound/namenode/headers" to="response.headers"/>
+ </route>
+ <route path="/webhdfs/data/v1/**?**">
+ <rewrite apply="WEBHDFS/webhdfs/inbound/datanode" to="request.url"/>
+ <dispatch contributor-name="http-client" />
+ </route>
+ </routes>
+ <dispatch classname="org.apache.hadoop.gateway.hdfs.dispatch.HdfsHttpClientDispatch" ha-classname="org.apache.hadoop.gateway.hdfs.dispatch.WebHdfsHaDispatch"/>
+ <testURLs>
+ <testURL>/webhdfs/v1/?op=LISTSTATUS</testURL>
+ </testURLs>
+</service>
http://git-wip-us.apache.org/repos/asf/knox/blob/a70a3b56/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/users.ldif
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/users.ldif b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/users.ldif
new file mode 100644
index 0000000..b982cb3
--- /dev/null
+++ b/gateway-test/src/test/resources/org/apache/hadoop/gateway/GatewayAppFuncTest/users.ldif
@@ -0,0 +1,42 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+version: 1
+
+dn: dc=hadoop,dc=apache,dc=org
+objectclass: organization
+objectclass: dcObject
+o: Hadoop
+dc: hadoop
+
+# entry for a sample people container
+# please replace with site specific values
+dn: ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: people
+
+# entry for a sample end user
+# please replace with site specific values
+dn: uid=guest,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: Guest
+sn: User
+uid: guest
+userPassword:guest-password
\ No newline at end of file