You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by su...@apache.org on 2016/01/20 19:58:48 UTC

knox git commit: KNOX-651 Initial changes to add a 'release' test project

Repository: knox
Updated Branches:
  refs/heads/master 9619a398f -> e6a7cb402


KNOX-651 Initial changes to add a 'release' test project


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/e6a7cb40
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/e6a7cb40
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/e6a7cb40

Branch: refs/heads/master
Commit: e6a7cb402aafc28cb1d257aecbcc4c4b7d67ba51
Parents: 9619a39
Author: Sumit Gupta <su...@apache.org>
Authored: Wed Jan 20 13:57:08 2016 -0500
Committer: Sumit Gupta <su...@apache.org>
Committed: Wed Jan 20 13:57:08 2016 -0500

----------------------------------------------------------------------
 gateway-test-release/pom.xml                    | 192 ++++++++++++
 .../hadoop/gateway/GatewayTestConfig.java       | 297 +++++++++++++++++++
 .../hadoop/gateway/GatewayTestDriver.java       | 212 +++++++++++++
 .../hadoop/gateway/SecureClusterTest.java       | 291 ++++++++++++++++++
 .../hadoop/gateway/SecureClusterTest/users.ldif |  61 ++++
 .../hadoop/test/category/FunctionalTests.java   |  21 --
 .../hadoop/test/category/IntegrationTests.java  |  21 --
 .../hadoop/test/category/ReleaseTest.java       |  21 ++
 .../apache/hadoop/test/category/VerifyTest.java |  21 ++
 .../hadoop/gateway/GatewayBasicFuncTest.java    |   4 +-
 .../hadoop/gateway/GatewayFuncTestDriver.java   |  81 +++--
 .../gateway/GatewayLdapPosixGroupFuncTest.java  |   4 +-
 .../hadoop/gateway/WebHdfsHaFuncTest.java       |   6 +-
 pom.xml                                         |  34 ++-
 14 files changed, 1174 insertions(+), 92 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/e6a7cb40/gateway-test-release/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-test-release/pom.xml b/gateway-test-release/pom.xml
new file mode 100644
index 0000000..b0312e1
--- /dev/null
+++ b/gateway-test-release/pom.xml
@@ -0,0 +1,192 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <artifactId>gateway</artifactId>
+        <groupId>org.apache.knox</groupId>
+        <version>0.8.0-SNAPSHOT</version>
+    </parent>
+
+    <artifactId>gateway-test-integration</artifactId>
+    <name>gateway-test-integration</name>
+    <description>Gateway Integration tests parent pom</description>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-client</artifactId>
+            <version>2.6.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-minicluster</artifactId>
+            <version>2.6.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common</artifactId>
+            <version>2.6.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common</artifactId>
+            <type>test-jar</type>
+            <version>2.6.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-hdfs</artifactId>
+            <type>test-jar</type>
+            <version>2.6.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common-test</artifactId>
+            <version>0.22.0</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-hdfs</artifactId>
+            <version>2.6.0</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-minikdc</artifactId>
+            <version>2.6.0</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.directory.jdbm</groupId>
+            <artifactId>apacheds-jdbm1</artifactId>
+            <version>2.0.0-M3</version>
+        </dependency>
+        <dependency>
+            <groupId>commons-logging</groupId>
+            <artifactId>commons-logging</artifactId>
+            <version>1.2</version>
+        </dependency>
+        <dependency>
+            <groupId>com.google.guava</groupId>
+            <artifactId>guava</artifactId>
+            <version>19.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.httpcomponents</groupId>
+            <artifactId>httpclient</artifactId>
+            <version>4.3.6</version>
+        </dependency>
+        <dependency>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+            <version>1.2.17</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+            <version>1.6.6</version>
+        </dependency>
+        <dependency>
+            <groupId>commons-io</groupId>
+            <artifactId>commons-io</artifactId>
+            <version>2.4</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.knox</groupId>
+            <artifactId>gateway-release</artifactId>
+            <version>0.8.0-SNAPSHOT</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.apache.directory.server</groupId>
+                    <artifactId>apacheds-all</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-core</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.hamcrest</groupId>
+            <artifactId>hamcrest-library</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.directory.server</groupId>
+            <artifactId>apacheds-core-integ</artifactId>
+            <version>2.0.0-M15</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.apache.directory.api</groupId>
+                    <artifactId>api-ldap-schema-data</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>javax.servlet</groupId>
+            <artifactId>servlet-api</artifactId>
+            <version>2.5</version>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>commons-beanutils</groupId>
+            <artifactId>commons-beanutils</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>com.mycila.xmltool</groupId>
+            <artifactId>xmltool</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.knox</groupId>
+            <artifactId>gateway-test-utils</artifactId>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.knox</groupId>
+            <artifactId>gateway-test</artifactId>
+            <version>0.8.0-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.felix</groupId>
+                <artifactId>maven-bundle-plugin</artifactId>
+                <inherited>true</inherited>
+                <extensions>true</extensions>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>

http://git-wip-us.apache.org/repos/asf/knox/blob/e6a7cb40/gateway-test-release/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
----------------------------------------------------------------------
diff --git a/gateway-test-release/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java b/gateway-test-release/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
new file mode 100644
index 0000000..779eb2d
--- /dev/null
+++ b/gateway-test-release/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
@@ -0,0 +1,297 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.List;
+
+public class GatewayTestConfig extends Configuration implements GatewayConfig {
+
+  private String gatewayHomeDir = "gateway-home";
+  private String hadoopConfDir = "hadoop";
+  private String gatewayHost = "localhost";
+  private int gatewayPort = 0;
+  private String gatewayPath = "gateway";
+  private boolean hadoopKerberosSecured = false;
+  private String kerberosConfig = "/etc/knox/conf/krb5.conf";
+  private boolean kerberosDebugEnabled = false;
+  private String kerberosLoginConfig = "/etc/knox/conf/krb5JAASLogin.conf";
+  private String frontendUrl = null;
+  private boolean xForwardedEnabled = true;
+
+  public void setGatewayHomeDir( String gatewayHomeDir ) {
+    this.gatewayHomeDir = gatewayHomeDir;
+  }
+
+  @Override
+  public String getGatewayConfDir() {
+    return gatewayHomeDir;
+  }
+
+  @Override
+  public String getGatewayDataDir() {
+    return gatewayHomeDir;
+  }
+
+  @Override
+  public String getGatewaySecurityDir() {
+    return gatewayHomeDir + "/security";
+  }
+
+  @Override
+  public String getGatewayTopologyDir() {
+    return gatewayHomeDir + "/topologies";
+  }
+
+  @Override
+  public String getGatewayDeploymentDir() {
+    return gatewayHomeDir + "/deployments";
+  }
+
+//  public void setDeploymentDir( String clusterConfDir ) {
+//    this.deployDir = clusterConfDir;
+//  }
+
+  @Override
+  public String getHadoopConfDir() {
+    return hadoopConfDir;
+  }
+
+//  public void setHadoopConfDir( String hadoopConfDir ) {
+//    this.hadoopConfDir = hadoopConfDir;
+//  }
+
+  @Override
+  public String getGatewayHost() {
+    return gatewayHost;
+  }
+
+//  public void setGatewayHost( String gatewayHost ) {
+//    this.gatewayHost = gatewayHost;
+//  }
+
+  @Override
+  public int getGatewayPort() {
+    return gatewayPort;
+  }
+
+//  public void setGatewayPort( int gatewayPort ) {
+//    this.gatewayPort = gatewayPort;
+//  }
+
+  @Override
+  public String getGatewayPath() {
+    return gatewayPath;
+  }
+
+  public void setGatewayPath( String gatewayPath ) {
+    this.gatewayPath = gatewayPath;
+  }
+
+  @Override
+  public InetSocketAddress getGatewayAddress() throws UnknownHostException {
+    return new InetSocketAddress( getGatewayHost(), getGatewayPort() );
+  }
+
+  @Override
+  public boolean isSSLEnabled() {
+    // TODO Auto-generated method stub
+    return false;
+  }
+
+  @Override
+  public boolean isHadoopKerberosSecured() {
+    return hadoopKerberosSecured;
+  }
+
+  public void setHadoopKerberosSecured(boolean hadoopKerberosSecured) {
+    this.hadoopKerberosSecured = hadoopKerberosSecured;
+  }
+  
+  @Override
+  public String getKerberosConfig() {
+    return kerberosConfig;
+  }
+  
+  public void setKerberosConfig(String kerberosConfig) {
+    this.kerberosConfig = kerberosConfig;
+  }
+
+
+  @Override
+  public boolean isKerberosDebugEnabled() {
+    return kerberosDebugEnabled;
+  }
+  
+//  public void setKerberosDebugEnabled(boolean kerberosConfigEnabled) {
+//    this.kerberosDebugEnabled = kerberosDebugEnabled;
+//  }
+  
+  @Override
+  public String getKerberosLoginConfig() {
+    return kerberosLoginConfig;
+  }
+
+  public void setKerberosLoginConfig(String kerberosLoginConfig) {
+    this.kerberosLoginConfig = kerberosLoginConfig;
+  }
+
+  /* (non-Javadoc)
+     * @see org.apache.hadoop.gateway.config.GatewayConfig#getDefaultTopologyName()
+     */
+  @Override
+  public String getDefaultTopologyName() {
+    return "default";
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getDefaultAppRedirectPath()
+   */
+  @Override
+  public String getDefaultAppRedirectPath() {
+    // TODO Auto-generated method stub
+    return "/gateway/sandbox";
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getFrontendUrl()
+   */
+  @Override
+  public String getFrontendUrl() { return frontendUrl; }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getExcludedSSLProtocols()
+   */
+  @Override
+  public List getExcludedSSLProtocols() {
+    List<String> protocols = new ArrayList<String>();
+    protocols.add("SSLv3");
+    return protocols;
+  }
+
+  public void setFrontendUrl( String frontendUrl ) {
+    this.frontendUrl = frontendUrl;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#isClientAuthNeeded()
+   */
+  @Override
+  public boolean isClientAuthNeeded() {
+    // TODO Auto-generated method stub
+    return false;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getTruststorePath()
+   */
+  @Override
+  public String getTruststorePath() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getTrustAllCerts()
+   */
+  @Override
+  public boolean getTrustAllCerts() {
+    // TODO Auto-generated method stub
+    return false;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getTruststoreType()
+   */
+  @Override
+  public String getTruststoreType() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+  
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getKeystoreType()
+   */
+  @Override
+  public String getKeystoreType() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+//  public void setKerberosLoginConfig(String kerberosLoginConfig) {
+//   this.kerberosLoginConfig = kerberosLoginConfig;
+//  }
+
+   @Override
+   public String getGatewayServicesDir() {
+      return gatewayHomeDir + "/data/services";
+   }
+
+  @Override
+  public boolean isXForwardedEnabled() {
+    return xForwardedEnabled;
+  }
+
+  public void setXForwardedEnabled(boolean enabled) {
+    xForwardedEnabled = enabled;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getEphemeralDHKeySize()
+   */
+  @Override
+  public String getEphemeralDHKeySize() {
+    return "2048";
+  }
+
+  @Override
+  public int getHttpClientMaxConnections() {
+    return 16;
+  }
+
+  @Override
+  public int getThreadPoolMax() {
+    return 16;
+  }
+
+  @Override
+  public int getHttpServerRequestBuffer() {
+    return 16*1024;
+  }
+
+  @Override
+  public int getHttpServerRequestHeaderBuffer() {
+    return 8*1024;
+  }
+
+  @Override
+  public int getHttpServerResponseBuffer() {
+    return 32*1024;
+  }
+
+  @Override
+  public int getHttpServerResponseHeaderBuffer() {
+    return 8*1024;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e6a7cb40/gateway-test-release/src/test/java/org/apache/hadoop/gateway/GatewayTestDriver.java
----------------------------------------------------------------------
diff --git a/gateway-test-release/src/test/java/org/apache/hadoop/gateway/GatewayTestDriver.java b/gateway-test-release/src/test/java/org/apache/hadoop/gateway/GatewayTestDriver.java
new file mode 100644
index 0000000..6401584
--- /dev/null
+++ b/gateway-test-release/src/test/java/org/apache/hadoop/gateway/GatewayTestDriver.java
@@ -0,0 +1,212 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway;
+
+import com.mycila.xmltool.XMLTag;
+import org.apache.commons.io.FileUtils;
+import org.apache.directory.server.protocol.shared.transport.TcpTransport;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.security.ldap.SimpleLdapDirectoryServer;
+import org.apache.hadoop.gateway.services.DefaultGatewayServices;
+import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.hamcrest.MatcherAssert;
+import org.hamcrest.Matchers;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.InetAddress;
+import java.net.ServerSocket;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.net.UnknownHostException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+
+import static org.hamcrest.CoreMatchers.notNullValue;
+import static org.junit.Assert.assertThat;
+
+public class GatewayTestDriver {
+
+  private static Logger log = LoggerFactory.getLogger(GatewayTestDriver.class);
+
+  public Class<?> resourceBaseClass;
+  public SimpleLdapDirectoryServer ldap;
+  public TcpTransport ldapTransport;
+  public boolean useGateway;
+  public GatewayServer gateway;
+  public GatewayConfig config;
+  public String clusterName;
+
+  /**
+   * Sets the class from which relative test resource names should be resolved.
+   * @param resourceBaseClass The class from which relative test resource names should be resolved.
+   */
+  public void setResourceBase( Class<?> resourceBaseClass ) {
+    this.resourceBaseClass = resourceBaseClass;
+  }
+
+  /**
+   * Starts an embedded LDAP server of the specified port.
+   * @param port The desired port the LDAP server should listen on.
+   * @return The actual port the LDAP server is listening on.
+   * @throws Exception Thrown if a failure occurs.
+   */
+  public int setupLdap( int port ) throws Exception {
+    URL usersUrl = getResourceUrl( "users.ldif" );
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
+    ldap.start();
+    log.info( "LDAP port = " + port );
+    return port;
+  }
+
+
+  /**
+   * Creates a GATEWAY_HOME, starts a gateway instance and deploys a test topology.
+   */
+  public void setupGateway( GatewayTestConfig config, String cluster, XMLTag topology, boolean use ) throws Exception {
+    this.useGateway = use;
+    this.config = config;
+    this.clusterName = cluster;
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+    gatewayDir.mkdirs();
+
+    config.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File topoDir = new File( config.getGatewayTopologyDir() );
+    topoDir.mkdirs();
+
+    File deployDir = new File( config.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+    File descriptor = new File( topoDir, cluster + ".xml" );
+    FileOutputStream stream = new FileOutputStream( descriptor );
+    topology.toStream( stream );
+    stream.close();
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put("persist-master", "false");
+    options.put("master", "password");
+    try {
+      srvcs.init(config, options);
+    } catch (ServiceLifecycleException e) {
+      e.printStackTrace(); // I18N not required.
+    }
+    File stacksDir = new File( config.getGatewayServicesDir() );
+    stacksDir.mkdirs();
+    //TODO: [sumit] This is a hack for now, need to find a better way to locate the source resources for 'stacks' to be tested
+    String pathToStacksSource = "gateway-service-definitions/src/main/resources/services";
+    File stacksSourceDir = new File( targetDir.getParent(), pathToStacksSource);
+    if (!stacksSourceDir.exists()) {
+      stacksSourceDir = new File( targetDir.getParentFile().getParent(), pathToStacksSource);
+    }
+    if (stacksSourceDir.exists()) {
+      FileUtils.copyDirectoryToDirectory(stacksSourceDir, stacksDir);
+    }
+
+    gateway = GatewayServer.startGateway(config, srvcs);
+    MatcherAssert.assertThat("Failed to start gateway.", gateway, notNullValue());
+
+    log.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
+  }
+
+  public void cleanup() throws Exception {
+    gateway.stop();
+    FileUtils.deleteQuietly( new File( config.getGatewayTopologyDir() ) );
+    FileUtils.deleteQuietly( new File( config.getGatewayConfDir() ) );
+    FileUtils.deleteQuietly( new File( config.getGatewaySecurityDir() ) );
+    FileUtils.deleteQuietly( new File( config.getGatewayDeploymentDir() ) );
+    FileUtils.deleteQuietly( new File( config.getGatewayDataDir() ) );
+    FileUtils.deleteQuietly( new File( config.getGatewayServicesDir() ) );
+
+
+    ldap.stop( true );
+  }
+
+
+  public String getResourceBaseName() {
+    return resourceBaseClass.getName().replaceAll( "\\.", "/" ) + "/";
+  }
+
+  public String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
+    return url;
+  }
+
+  public InputStream getResourceStream( String resource ) throws IOException {
+    InputStream stream = null;
+    if( resource.startsWith( "file:/" ) ) {
+      try {
+        stream = FileUtils.openInputStream( new File( new URI( resource ) ) );
+      } catch( URISyntaxException e ) {
+        throw new IOException( e  );
+      }
+    } else {
+      stream = ClassLoader.getSystemResourceAsStream( getResourceName( resource ) );
+    }
+    assertThat( "Failed to find test resource " + resource, stream, Matchers.notNullValue() );
+    return stream;
+  }
+
+  public static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+  public String getLdapUrl() {
+    return "ldap://localhost:" + ldapTransport.getPort();
+  }
+
+  public String getClusterUrl() {
+    String url;
+    String localHostName = getLocalHostName();
+    url = "http://" + localHostName + ":" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath() + "/" + clusterName;
+    return url;
+  }
+
+  public int getGatewayPort() {
+    return gateway.getAddresses()[0].getPort();
+  }
+
+  private String getLocalHostName() {
+    String hostName = "localhost";
+    try {
+      hostName = InetAddress.getByName("127.0.0.1").getHostName();
+    } catch( UnknownHostException e ) {
+      // Ignore and use the default.
+    }
+    return hostName;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e6a7cb40/gateway-test-release/src/test/java/org/apache/hadoop/gateway/SecureClusterTest.java
----------------------------------------------------------------------
diff --git a/gateway-test-release/src/test/java/org/apache/hadoop/gateway/SecureClusterTest.java b/gateway-test-release/src/test/java/org/apache/hadoop/gateway/SecureClusterTest.java
new file mode 100644
index 0000000..2638b1c
--- /dev/null
+++ b/gateway-test-release/src/test/java/org/apache/hadoop/gateway/SecureClusterTest.java
@@ -0,0 +1,291 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway;
+
+import com.mycila.xmltool.XMLDoc;
+import com.mycila.xmltool.XMLTag;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
+import org.apache.hadoop.test.category.ReleaseTest;
+import org.apache.http.HttpHost;
+import org.apache.http.HttpRequest;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.BasicUserPrincipal;
+import org.apache.http.auth.Credentials;
+import org.apache.http.client.CredentialsProvider;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.impl.client.BasicCredentialsProvider;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.message.BasicHttpRequest;
+import org.apache.http.util.EntityUtils;
+import org.apache.log4j.PropertyConfigurator;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.net.ServerSocket;
+import java.security.Principal;
+import java.util.Properties;
+
+import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+
+@Category(ReleaseTest.class)
+public class SecureClusterTest {
+
+  private static MiniDFSCluster miniDFSCluster;
+  private static MiniKdc kdc;
+  private static HdfsConfiguration configuration;
+  private static int nameNodeHttpPort;
+  private static String userName;
+
+  private static GatewayTestDriver driver = new GatewayTestDriver();
+  private static File baseDir;
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    nameNodeHttpPort = findFreePort();
+    configuration = new HdfsConfiguration();
+    baseDir = new File(KeyStoreTestUtil.getClasspathDir(SecureClusterTest.class));
+    System.setProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA, baseDir.getAbsolutePath());
+    initKdc();
+    miniDFSCluster = new MiniDFSCluster.Builder(configuration)
+        .nameNodePort(20112)
+        .nameNodeHttpPort(nameNodeHttpPort)
+        .numDataNodes(0)
+        .format(true)
+        .racks(null)
+        .build();
+  }
+
+  private static void initKdc() throws Exception {
+    Properties kdcConf = MiniKdc.createConf();
+    kdc = new MiniKdc(kdcConf, baseDir);
+    kdc.start();
+
+    configuration = new HdfsConfiguration();
+    SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, configuration);
+    UserGroupInformation.setConfiguration(configuration);
+    assertTrue("Expected configuration to enable security", UserGroupInformation.isSecurityEnabled());
+    userName = UserGroupInformation.createUserForTesting("guest", new String[] {"users"}).getUserName();
+    File keytabFile = new File(baseDir, userName + ".keytab");
+    String keytab = keytabFile.getAbsolutePath();
+    // Windows will not reverse name lookup "127.0.0.1" to "localhost".
+    String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
+    kdc.createPrincipal(keytabFile, userName + "/" + krbInstance, "HTTP/" + krbInstance);
+    String hdfsPrincipal = userName + "/" + krbInstance + "@" + kdc.getRealm();
+    String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();
+
+    configuration.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
+    configuration.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
+    configuration.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
+    configuration.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
+    configuration.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
+    configuration.set(DFS_JOURNALNODE_KEYTAB_FILE_KEY, keytab);
+    configuration.set(DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
+    configuration.set(DFS_JOURNALNODE_KERBEROS_INTERNAL_SPNEGO_PRINCIPAL_KEY, spnegoPrincipal);
+    configuration.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
+    configuration.set(DFS_DATA_ENCRYPTION_ALGORITHM_KEY, "authentication");
+    configuration.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTP_AND_HTTPS.name());
+    configuration.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
+    configuration.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
+    configuration.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
+    configuration.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_KEY, 10);
+    configuration.set("hadoop.proxyuser." + userName + ".hosts", "*");
+    configuration.set("hadoop.proxyuser." + userName + ".groups", "*");
+    configuration.setBoolean("dfs.permissions", true);
+
+    String keystoresDir = baseDir.getAbsolutePath();
+    File sslClientConfFile = new File(keystoresDir + "/ssl-client.xml");
+    File sslServerConfFile = new File(keystoresDir + "/ssl-server.xml");
+    KeyStoreTestUtil.setupSSLConfig(keystoresDir, keystoresDir, configuration, false);
+    configuration.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
+        sslClientConfFile.getName());
+    configuration.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
+        sslServerConfFile.getName());
+
+    setupKnox(keytab, hdfsPrincipal);
+  }
+
+  private static void setupKnox(String keytab, String hdfsPrincipal) throws Exception {
+    //kerberos setup for http client
+    File jaasConf = setupJaasConf(baseDir, keytab, hdfsPrincipal);
+    System.setProperty("java.security.krb5.conf", kdc.getKrb5conf().getAbsolutePath());
+    System.setProperty("java.security.auth.login.config", jaasConf.getAbsolutePath());
+    System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");
+    System.setProperty("sun.security.krb5.debug", "true");
+
+    //knox setup
+    System.setProperty("gateway.hadoop.kerberos.secured", "true");
+    GatewayTestConfig config = new GatewayTestConfig();
+    config.setGatewayPath( "gateway" );
+    config.setHadoopKerberosSecured(true);
+    config.setKerberosConfig(kdc.getKrb5conf().getAbsolutePath());
+    config.setKerberosLoginConfig(jaasConf.getAbsolutePath());
+    driver.setResourceBase(SecureClusterTest.class);
+    driver.setupLdap(findFreePort());
+    driver.setupGateway(config, "cluster", createTopology(), true);
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    kdc.stop();
+    miniDFSCluster.shutdown();
+    driver.cleanup();
+  }
+
+  @Test
+  public void basicGetUserHomeRequest() throws Exception {
+    setupLogging();
+    CloseableHttpClient client = getHttpClient();
+    String method = "GET";
+    String uri = driver.getClusterUrl() + "/webhdfs/v1?op=GETHOMEDIRECTORY";
+    HttpHost target = new HttpHost("localhost", driver.getGatewayPort(), "http");
+    HttpRequest request = new BasicHttpRequest(method, uri);
+    CloseableHttpResponse response = client.execute(target, request);
+    String json = EntityUtils.toString(response.getEntity());
+    response.close();
+    System.out.println(json);
+    assertEquals("{\"Path\":\"/user/" + userName + "\"}", json);
+  }
+
+  private CloseableHttpClient getHttpClient() {
+    CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
+    credentialsProvider.setCredentials(AuthScope.ANY, new Credentials() {
+      @Override
+      public Principal getUserPrincipal() {
+        return new BasicUserPrincipal("guest");
+      }
+
+      @Override
+      public String getPassword() {
+        return "guest-password";
+      }
+    });
+
+    return HttpClients.custom()
+        .setDefaultCredentialsProvider(credentialsProvider)
+        .build();
+  }
+
+  private static void setupLogging() {
+    PropertyConfigurator.configure(ClassLoader.getSystemResource("log4j.properties"));
+  }
+
+  private static File setupJaasConf(File baseDir, String keyTabFile, String principal) throws IOException {
+    File file = new File(baseDir, "jaas.conf");
+    if (!file.exists()) {
+      file.createNewFile();
+    } else {
+      file.delete();
+      file.createNewFile();
+    }
+    FileWriter writer = new FileWriter(file);
+    String content = String.format("com.sun.security.jgss.initiate {\n" +
+        "com.sun.security.auth.module.Krb5LoginModule required\n" +
+        "renewTGT=true\n" +
+        "doNotPrompt=true\n" +
+        "useKeyTab=true\n" +
+        "keyTab=\"%s\"\n" +
+        "principal=\"%s\"\n" +
+        "isInitiator=true\n" +
+        "storeKey=true\n" +
+        "useTicketCache=true\n" +
+        "client=true;\n" +
+        "};\n", keyTabFile, principal);
+    writer.write(content);
+    writer.close();
+    return file;
+  }
+
+  /**
+   * Creates a topology that is deployed to the gateway instance for the test suite.
+   * Note that this topology is shared by all of the test methods in this suite.
+   * @return A populated XML structure for a topology file.
+   */
+  private static XMLTag createTopology() {
+    XMLTag xml = XMLDoc.newDocument(true)
+        .addRoot("topology")
+        .addTag( "gateway" )
+        .addTag( "provider" )
+        .addTag("role").addText("webappsec")
+        .addTag("name").addText("WebAppSec")
+        .addTag("enabled").addText("true")
+        .addTag( "param" )
+        .addTag("name").addText("csrf.enabled")
+        .addTag("value").addText("true").gotoParent().gotoParent()
+        .addTag("provider")
+        .addTag("role").addText("authentication")
+        .addTag("name").addText("ShiroProvider")
+        .addTag("enabled").addText("true")
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm")
+        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.userDnTemplate")
+        .addTag( "value" ).addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.contextFactory.url")
+        .addTag( "value" ).addText(driver.getLdapUrl()).gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
+        .addTag( "value" ).addText("simple").gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("urls./**")
+        .addTag( "value" ).addText("authcBasic").gotoParent().gotoParent()
+        .addTag("provider")
+        .addTag("role").addText("identity-assertion")
+        .addTag("enabled").addText("true")
+        .addTag("name").addText("Default").gotoParent()
+        .addTag("provider")
+        .addTag( "role" ).addText( "authorization" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag("name").addText("AclsAuthz").gotoParent()
+        .addTag("param")
+        .addTag("name").addText( "webhdfs-acl" )
+        .addTag("value").addText("hdfs;*;*").gotoParent()
+        .gotoRoot()
+        .addTag("service")
+        .addTag("role").addText("WEBHDFS")
+        .addTag("url").addText("http://localhost:" + nameNodeHttpPort + "/webhdfs/").gotoParent()
+        .gotoRoot();
+//     System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  private static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e6a7cb40/gateway-test-release/src/test/resources/org/apache/hadoop/gateway/SecureClusterTest/users.ldif
----------------------------------------------------------------------
diff --git a/gateway-test-release/src/test/resources/org/apache/hadoop/gateway/SecureClusterTest/users.ldif b/gateway-test-release/src/test/resources/org/apache/hadoop/gateway/SecureClusterTest/users.ldif
new file mode 100644
index 0000000..d82e99a
--- /dev/null
+++ b/gateway-test-release/src/test/resources/org/apache/hadoop/gateway/SecureClusterTest/users.ldif
@@ -0,0 +1,61 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+version: 1
+
+dn: dc=hadoop,dc=apache,dc=org
+objectclass: organization
+objectclass: dcObject
+o: Hadoop
+dc: hadoop
+
+dn: ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: people
+
+dn: ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: groups
+
+dn: uid=hdfs,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: LarryWalls
+sn: Walls
+uid: hdfs
+userPassword:hdfs-password
+
+dn: uid=guest,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: Guest
+sn: Guest
+uid: guest
+userPassword:guest-password
+
+dn: cn=admin,ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:groupOfNames
+cn: admin
+member: uid=allowedUser,ou=people,dc=hadoop,dc=apache,dc=org

http://git-wip-us.apache.org/repos/asf/knox/blob/e6a7cb40/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/FunctionalTests.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/FunctionalTests.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/FunctionalTests.java
deleted file mode 100644
index c532b63..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/FunctionalTests.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.category;
-
-public interface FunctionalTests {
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e6a7cb40/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/IntegrationTests.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/IntegrationTests.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/IntegrationTests.java
deleted file mode 100644
index 36de9e5..0000000
--- a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/IntegrationTests.java
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.test.category;
-
-public interface IntegrationTests {
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/e6a7cb40/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/ReleaseTest.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/ReleaseTest.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/ReleaseTest.java
new file mode 100644
index 0000000..bd52807
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/ReleaseTest.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test.category;
+
+public interface ReleaseTest {
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e6a7cb40/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/VerifyTest.java
----------------------------------------------------------------------
diff --git a/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/VerifyTest.java b/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/VerifyTest.java
new file mode 100644
index 0000000..0b0acaa
--- /dev/null
+++ b/gateway-test-utils/src/main/java/org/apache/hadoop/test/category/VerifyTest.java
@@ -0,0 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test.category;
+
+public interface VerifyTest {
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/e6a7cb40/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
index 8a96ee3..aa07076 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
@@ -27,7 +27,7 @@ import com.mycila.xmltool.XMLTag;
 import org.apache.commons.io.filefilter.WildcardFileFilter;
 import org.apache.hadoop.gateway.util.KnoxCLI;
 import org.apache.hadoop.test.TestUtils;
-import org.apache.hadoop.test.category.FunctionalTests;
+import org.apache.hadoop.test.category.VerifyTest;
 import org.apache.hadoop.test.category.MediumTests;
 import org.apache.hadoop.test.mock.MockRequestMatcher;
 import org.apache.http.HttpStatus;
@@ -81,7 +81,7 @@ import static org.xmlmatchers.XmlMatchers.isEquivalentTo;
 import static org.xmlmatchers.transform.XmlConverters.the;
 import static uk.co.datumedge.hamcrest.json.SameJSONAs.sameJSONAs;
 
-@Category( { FunctionalTests.class, MediumTests.class } )
+@Category( { VerifyTest.class, MediumTests.class } )
 public class GatewayBasicFuncTest {
 
   private static final long SHORT_TIMEOUT = 1000L;

http://git-wip-us.apache.org/repos/asf/knox/blob/e6a7cb40/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java
index b4965c8..81c45e1 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayFuncTestDriver.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.gateway;
 
+import com.jayway.restassured.RestAssured;
+import com.jayway.restassured.path.json.JsonPath;
 import com.jayway.restassured.response.Response;
 import com.mycila.xmltool.XMLTag;
 import org.apache.commons.io.FileUtils;
@@ -44,6 +46,7 @@ import org.apache.http.impl.client.BasicAuthCache;
 import org.apache.http.impl.client.DefaultHttpClient;
 import org.apache.http.protocol.BasicHttpContext;
 import org.apache.http.util.EntityUtils;
+import org.hamcrest.CoreMatchers;
 import org.hamcrest.MatcherAssert;
 import org.hamcrest.Matchers;
 import org.slf4j.Logger;
@@ -63,14 +66,6 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.UUID;
 
-import static com.jayway.restassured.RestAssured.given;
-import static com.jayway.restassured.path.json.JsonPath.from;
-import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.notNullValue;
-import static org.hamcrest.Matchers.hasItems;
-import static org.hamcrest.Matchers.is;
-import static org.hamcrest.Matchers.isIn;
-import static org.hamcrest.Matchers.not;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.fail;
 
@@ -174,7 +169,7 @@ public class GatewayFuncTestDriver {
     }
 
     gateway = GatewayServer.startGateway( config, srvcs );
-    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
+    MatcherAssert.assertThat( "Failed to start gateway.", gateway, CoreMatchers.notNullValue() );
 
     log.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
   }
@@ -244,6 +239,10 @@ public class GatewayFuncTestDriver {
     return url;
   }
 
+  public int getGatewayPort() {
+    return gateway.getAddresses()[0].getPort();
+  }
+
   public String getRealAddr( String role ) {
     String addr;
     String localHostName = getLocalHostName();
@@ -319,7 +318,7 @@ public class GatewayFuncTestDriver {
       if( service.mock ) {
         assertThat(
             "Service " + service.role + " has remaining expected interactions.",
-            service.server.getCount(), is( 0 ) );
+            service.server.getCount(), Matchers.is(0) );
       }
       service.server.reset();
     }
@@ -336,7 +335,7 @@ public class GatewayFuncTestDriver {
       if(service.mock) {
         assertThat(
             "Service " + service.role + " has remaining expected interactions.",
-            service.server.getCount(), not(0));
+            service.server.getCount(), Matchers.not(0));
       }
       service.server.reset();
     } else {
@@ -371,7 +370,7 @@ public class GatewayFuncTestDriver {
           .respond()
           .status( status );
     }
-    Response response = given()
+    Response response = RestAssured.given()
         //.log().headers()
         //.log().parameters()
         .auth().preemptive().basic( user, password )
@@ -412,7 +411,7 @@ public class GatewayFuncTestDriver {
           .respond()
           .status( status );
     }
-    Response response = given()
+    Response response = RestAssured.given()
         //.log().all()
         .auth().preemptive().basic( user, password )
         .header( "X-XSRF-Header", "jksdhfkhdsf" )
@@ -473,7 +472,7 @@ public class GatewayFuncTestDriver {
           .respond()
           .status( status );
     }
-    Response response = given()
+    Response response = RestAssured.given()
         //.log().all()
         .auth().preemptive().basic( user, password )
         .header( "X-XSRF-Header", "jksdhfkhdsf" )
@@ -485,7 +484,7 @@ public class GatewayFuncTestDriver {
     if( response.getStatusCode() == HttpStatus.SC_OK ) {
       String actualContent = response.asString();
       String expectedContent = getResourceString( resource, Charset.forName("UTF-8") );
-      assertThat( actualContent, is( expectedContent ) );
+      assertThat( actualContent, Matchers.is(expectedContent) );
     }
     assertComplete();
   }
@@ -501,7 +500,7 @@ public class GatewayFuncTestDriver {
         .queryParam( "group", group )
         .respond()
         .status( HttpStatus.SC_OK );
-    given()
+    RestAssured.given()
         //.log().all()
         .auth().preemptive().basic( user, password )
         .header( "X-XSRF-Header", "jksdhfkhdsf" )
@@ -525,7 +524,7 @@ public class GatewayFuncTestDriver {
         .queryParam( "permission", permsOctal )
         .respond()
         .status( HttpStatus.SC_OK );
-    given()
+    RestAssured.given()
         //.log().all()
         .auth().preemptive().basic( user, password )
         .header( "X-XSRF-Header", "jksdhfkhdsf" )
@@ -570,7 +569,7 @@ public class GatewayFuncTestDriver {
           .respond()
           .status( status );
     }
-    Response response = given()
+    Response response = RestAssured.given()
         //.log().all()
         .auth().preemptive().basic( user, password )
         .header( "X-XSRF-Header", "jksdhfkhdsf" )
@@ -611,7 +610,7 @@ public class GatewayFuncTestDriver {
           .respond()
           .status( status );
     }
-    Response response = given()
+    Response response = RestAssured.given()
         //.log().all()
         .auth().preemptive().basic( user, password )
         .header( "X-XSRF-Header", "jksdhfkhdsf" )
@@ -634,7 +633,7 @@ public class GatewayFuncTestDriver {
         .queryParam( "op", "DELETE" )
         .queryParam( "recursive", recursive )
         .respond().status( status[0] );
-    given()
+    RestAssured.given()
         //.log().all()
         .auth().preemptive().basic( user, password )
         .header( "X-XSRF-Header", "jksdhfkhdsf" )
@@ -642,7 +641,7 @@ public class GatewayFuncTestDriver {
         .queryParam( "recursive", recursive )
         .expect()
         //.log().all()
-        .statusCode( isIn( ArrayUtils.toObject( status ) ) )
+        .statusCode( Matchers.isIn(ArrayUtils.toObject(status)) )
         .when()
         .delete( getUrl( "WEBHDFS" ) + "/v1" + file + ( isUseGateway() ? "" : "?user.name=" + user ) );
     assertComplete();
@@ -660,7 +659,7 @@ public class GatewayFuncTestDriver {
         .status( HttpStatus.SC_OK )
         .contentType( "application/json" )
         .content( "{\"boolean\": true}".getBytes() );
-    Response response = given()
+    Response response = RestAssured.given()
         //.log().all()
         .auth().preemptive().basic( user, password )
         .header( "X-XSRF-Header", "jksdhfkhdsf" )
@@ -670,7 +669,7 @@ public class GatewayFuncTestDriver {
         //.log().all()
         .statusCode( status )
         .contentType( "application/json" )
-        .content( "boolean", equalTo( true ) )
+        .content( "boolean", CoreMatchers.equalTo(true) )
         .when()
         .put( getUrl("WEBHDFS") + "/v1" + dir + ( isUseGateway() ? "" : "?user.name=" + user ) );
     String location = response.getHeader( "Location" );
@@ -686,14 +685,14 @@ public class GatewayFuncTestDriver {
   }
 
   public void readDir( String user, String password, String dir, String resource, int status ) {
-    given()
+    RestAssured.given()
         //.log().all()
         .auth().preemptive().basic( user, password )
         .queryParam( "op", "LISTSTATUS" )
         .expect()
         //.log().all()
         .statusCode( status )
-        .content( equalTo( "TODO" ) )
+        .content( CoreMatchers.equalTo("TODO") )
         .when()
         .get( getUrl( "WEBHDFS" ) + "/v1" + dir );
   }
@@ -711,7 +710,7 @@ public class GatewayFuncTestDriver {
         .status( status )
         .contentType( "application/json" )
         .content( "{\"id\":\"job_201210301335_0086\"}".getBytes() );
-    String json = given()
+    String json = RestAssured.given()
         //.log().all()
         .auth().preemptive().basic( user, password )
         .header( "X-XSRF-Header", "jksdhfkhdsf" )
@@ -724,7 +723,7 @@ public class GatewayFuncTestDriver {
         .statusCode( status )
         .when().post( getUrl( "WEBHCAT" ) + "/v1/mapreduce/jar" + ( isUseGateway() ? "" : "?user.name=" + user ) ).asString();
     log.trace( "JSON=" + json );
-    String job = from( json ).getString( "id" );
+    String job = JsonPath.from(json).getString( "id" );
     log.debug( "JOB=" + job );
     assertComplete();
     return job;
@@ -739,7 +738,7 @@ public class GatewayFuncTestDriver {
         .status( status[0] )
         .contentType( "application/json" )
         .content( "{\"id\":\"job_201210301335_0086\"}".getBytes() );
-    String json = given()
+    String json = RestAssured.given()
         //.log().all()
         .auth().preemptive().basic( user, password )
         .header( "X-XSRF-Header", "jksdhfkhdsf" )
@@ -751,14 +750,14 @@ public class GatewayFuncTestDriver {
         .formParam( "statusdir", statusDir )
         .expect()
         //.log().all();
-        .statusCode( isIn( ArrayUtils.toObject( status ) ) )
+        .statusCode( Matchers.isIn(ArrayUtils.toObject(status)) )
         .contentType( "application/json" )
         //.content( "boolean", equalTo( true ) )
         .when()
         .post( getUrl( "WEBHCAT" ) + "/v1/pig" + ( isUseGateway() ? "" : "?user.name=" + user ) )
         .asString();
     log.trace( "JSON=" + json );
-    String job = from( json ).getString( "id" );
+    String job = JsonPath.from(json).getString( "id" );
     log.debug( "JOB=" + job );
     assertComplete();
     return job;
@@ -773,7 +772,7 @@ public class GatewayFuncTestDriver {
         .status( status[ 0 ] )
         .contentType( "application/json" )
         .content( "{\"id\":\"job_201210301335_0086\"}".getBytes() );
-    String json = given()
+    String json = RestAssured.given()
         //.log().all()
         .auth().preemptive().basic( user, password )
         .header( "X-XSRF-Header", "jksdhfkhdsf" )
@@ -784,14 +783,14 @@ public class GatewayFuncTestDriver {
         .formParam( "statusdir", statusDir )
         .expect()
         //.log().all()
-        .statusCode( isIn( ArrayUtils.toObject( status ) ) )
+        .statusCode( Matchers.isIn(ArrayUtils.toObject(status)) )
         .contentType( "application/json" )
         //.content( "boolean", equalTo( true ) )
         .when()
         .post( getUrl( "WEBHCAT" ) + "/v1/hive" + ( isUseGateway() ? "" : "?user.name=" + user ) )
         .asString();
     log.trace( "JSON=" + json );
-    String job = from( json ).getString( "id" );
+    String job = JsonPath.from(json).getString( "id" );
     log.debug( "JOB=" + job );
     assertComplete();
     return job;
@@ -806,14 +805,14 @@ public class GatewayFuncTestDriver {
           .status( HttpStatus.SC_OK )
           .content( getResourceBytes( "webhcat-job-status.json" ) )
           .contentType( "application/json" );
-    String status = given()
+    String status = RestAssured.given()
         //.log().all()
         .auth().preemptive().basic( user, password )
         .header( "X-XSRF-Header", "jksdhfkhdsf" )
         .pathParam( "job", job )
         .expect()
         //.log().all()
-        .content( "status.jobId", equalTo( job ) )
+        .content( "status.jobId", CoreMatchers.equalTo(job) )
         .statusCode( HttpStatus.SC_OK )
         .when().get( getUrl( "WEBHCAT" ) + "/v1/jobs/{job}" + ( isUseGateway() ? "" : "?user.name=" + user ) ).asString();
     log.debug( "STATUS=" + status );
@@ -829,12 +828,12 @@ public class GatewayFuncTestDriver {
   See: oozie-versions.json
   */
   public void oozieGetVersions( String user, String password ) throws IOException {
-    given()
+    RestAssured.given()
         .auth().preemptive().basic( user, password )
         .header( "X-XSRF-Header", "jksdhfkhdsf" )
         .expect()
         .statusCode( 200 )
-        .body( "", hasItems( 0, 1 ) )
+        .body( "", Matchers.hasItems(0, 1) )
         .when().get( getUrl( "OOZIE" ) + "/versions" + ( isUseGateway() ? "" : "?user.name=" + user ) ).asString();
   }
 
@@ -935,7 +934,7 @@ TODO
     post.setEntity( entity );
     post.setHeader( "X-XSRF-Header", "ksdjfhdsjkfhds" );
     HttpResponse response = client.execute( targetHost, post, localContext );
-    assertThat( response.getStatusLine().getStatusCode(), is( status ) );
+    assertThat( response.getStatusLine().getStatusCode(), Matchers.is(status) );
     String json = EntityUtils.toString( response.getEntity() );
 
 //    String json = given()
@@ -949,7 +948,7 @@ TODO
 //        .statusCode( status )
 //        .when().post( getUrl( "OOZIE" ) + "/v1/jobs" + ( isUseGateway() ? "" : "?user.name=" + user ) ).asString();
     //System.out.println( "JSON=" + json );
-    String id = from( json ).getString( "id" );
+    String id = JsonPath.from(json).getString( "id" );
     return id;
   }
 
@@ -1022,9 +1021,9 @@ TODO
     HttpGet request = new HttpGet( url.toURI() );
     request.setHeader("X-XSRF-Header", "ksdhfjkhdsjkf");
     HttpResponse response = client.execute( targetHost, request, localContext );
-    assertThat( response.getStatusLine().getStatusCode(), is( status ) );
+    assertThat( response.getStatusLine().getStatusCode(), Matchers.is(status) );
     String json = EntityUtils.toString( response.getEntity() );
-    String jobStatus = from( json ).getString( "status" );
+    String jobStatus = JsonPath.from(json).getString( "status" );
     return jobStatus;
   }
 

http://git-wip-us.apache.org/repos/asf/knox/blob/e6a7cb40/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapPosixGroupFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapPosixGroupFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapPosixGroupFuncTest.java
index b8d520f..186e585 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapPosixGroupFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapPosixGroupFuncTest.java
@@ -26,7 +26,7 @@ import org.apache.hadoop.gateway.services.DefaultGatewayServices;
 import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.ServiceLifecycleException;
 import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.test.category.FunctionalTests;
+import org.apache.hadoop.test.category.VerifyTest;
 import org.apache.http.HttpStatus;
 import org.apache.log4j.Appender;
 import org.hamcrest.MatcherAssert;
@@ -63,7 +63,7 @@ import static org.junit.Assert.fail;
  * and using them in acl authorization checks
  *
  */
-@Category(FunctionalTests.class)
+@Category(VerifyTest.class)
 public class GatewayLdapPosixGroupFuncTest {
 
   private static final long SHORT_TIMEOUT = 2000L;

http://git-wip-us.apache.org/repos/asf/knox/blob/e6a7cb40/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
index 7823978..fab2429 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
@@ -19,12 +19,10 @@ package org.apache.hadoop.gateway;
 
 import com.mycila.xmltool.XMLDoc;
 import com.mycila.xmltool.XMLTag;
-import org.apache.hadoop.test.category.FunctionalTests;
+import org.apache.hadoop.test.category.VerifyTest;
 import org.apache.hadoop.test.category.MediumTests;
-import org.apache.hadoop.test.log.NoOpLogger;
 import org.apache.hadoop.test.mock.MockServer;
 import org.apache.http.HttpStatus;
-import org.eclipse.jetty.util.log.Log;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Ignore;
@@ -39,7 +37,7 @@ import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.is;
 
-@Category({FunctionalTests.class, MediumTests.class})
+@Category({VerifyTest.class, MediumTests.class})
 public class WebHdfsHaFuncTest {
 
    private static final long SHORT_TIMEOUT = 1000L;

http://git-wip-us.apache.org/repos/asf/knox/blob/e6a7cb40/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 4de7214..174822f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -85,6 +85,7 @@
         <module>hsso-release</module>
         <module>gateway-service-vault</module>
         <module>gateway-service-test</module>
+        <module>gateway-test-release</module>
     </modules>
 
     <properties>
@@ -171,6 +172,18 @@
                     </plugin>
                 </plugins>
             </build>
+            <properties>
+                <failsafe.group>org.apache.hadoop.test.category.VerifyTest, org.apache.hadoop.test.category.ReleaseTest</failsafe.group>
+            </properties>
+        </profile>
+        <profile>
+            <id>dev</id>
+            <activation>
+                <activeByDefault>true</activeByDefault>
+            </activation>
+            <properties>
+                <failsafe.group>org.apache.hadoop.test.category.VerifyTest</failsafe.group>
+            </properties>
         </profile>
     </profiles>
 
@@ -260,13 +273,32 @@
                 <version>2.16</version>
                 <configuration>
                     <excludedGroups>
-                        org.apache.hadoop.test.category.SlowTests,org.apache.hadoop.test.category.ManualTests,org.apache.hadoop.test.category.IntegrationTests
+                        org.apache.hadoop.test.category.SlowTests,org.apache.hadoop.test.category.ManualTests,org.apache.hadoop.test.category.VerifyTest,org.apache.hadoop.test.category.ReleaseTest
                     </excludedGroups>
                     <systemPropertyVariables>
                         <gateway-version>${gateway-version}</gateway-version>
                     </systemPropertyVariables>
                 </configuration>
             </plugin>
+            <plugin>
+                <artifactId>maven-failsafe-plugin</artifactId>
+                <version>2.19.1</version>
+                <configuration>
+                    <groups>${failsafe.group}</groups>
+                </configuration>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>integration-test</goal>
+                        </goals>
+                        <configuration>
+                            <includes>
+                                <include>**/*.class</include>
+                            </includes>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
         </plugins>
     </build>