You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by km...@apache.org on 2015/11/09 22:10:49 UTC

[1/5] knox git commit: KNOX-620: Jenkins Knox-master-verify failing since #725 due to JDK version issues. Renabled remainder of func tests.

Repository: knox
Updated Branches:
  refs/heads/master 2bb8d1e0d -> 56cedc0ae


http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
index 1e5a59a..a23e675 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/deploy/DeploymentFactoryFuncTest.java
@@ -70,464 +70,464 @@ import static org.junit.Assert.fail;
 
 public class DeploymentFactoryFuncTest {
 
-//  private static final long SHORT_TIMEOUT = 1000L;
-//  private static final long MEDIUM_TIMEOUT = 10 * SHORT_TIMEOUT;
-//  private static final long LONG_TIMEOUT = 10 * MEDIUM_TIMEOUT;
-//
-//  @Test( timeout = SHORT_TIMEOUT )
-//  public void testGenericProviderDeploymentContributor() throws ParserConfigurationException, SAXException, IOException, TransformerException {
-//    LOG_ENTER();
-//    GatewayConfig config = new GatewayTestConfig();
-//    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
-//    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
-//    gatewayDir.mkdirs();
-//
-//    ((GatewayTestConfig) config).setGatewayHomeDir( gatewayDir.getAbsolutePath() );
-//
-//    File deployDir = new File( config.getGatewayDeploymentDir() );
-//    deployDir.mkdirs();
-//
-////    ((GatewayTestConfig) config).setDeploymentDir( "clusters" );
-//
-//    addStacksDir(config, targetDir);
-//    DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//    Map<String,String> options = new HashMap<String,String>();
-//    options.put("persist-master", "false");
-//    options.put("master", "password");
-//    try {
-//      DeploymentFactory.setGatewayServices(srvcs);
-//      srvcs.init(config, options);
-//    } catch (ServiceLifecycleException e) {
-//      e.printStackTrace(); // I18N not required.
-//    }
-//
-//    Topology topology = new Topology();
-//    topology.setName( "test-cluster" );
-//    Service service = new Service();
-//    service.setRole( "WEBHDFS" );
-//    service.addUrl( "http://localhost:50070/test-service-url" );
-//    topology.addService( service );
-//
-//    Provider provider = new Provider();
-//    provider.setRole( "authentication" );
-//    provider.setName( "generic" );
-//    provider.setEnabled( true );
-//    Param param = new Param();
-//    param.setName( "filter" );
-//    param.setValue( "org.opensource.ExistingFilter" );
-//    provider.addParam( param );
-//    param = new Param();
-//    param.setName( "test-param-name" );
-//    param.setValue( "test-param-value" );
-//    provider.addParam( param );
-//    topology.addProvider( provider );
-//
-//    WebArchive war = DeploymentFactory.createDeployment( config, topology );
-//
-//    Document gateway = parse( war.get( "WEB-INF/gateway.xml" ).getAsset().openStream() );
-//    //dump( gateway );
-//
-//    //by default the first filter will be the X-Forwarded header filter
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[1]/role", equalTo( "xforwardedheaders" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[1]/name", equalTo( "XForwardedHeaderFilter" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[1]/class", equalTo( "org.apache.hadoop.gateway.filter.XForwardedHeaderFilter" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[2]/role", equalTo( "authentication" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[2]/name", equalTo( "generic" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[2]/class", equalTo( "org.opensource.ExistingFilter" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[2]/param[1]/name", equalTo( "test-param-name" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[2]/param[1]/value", equalTo( "test-param-value" ) ) );
-//    LOG_EXIT();
-//  }
-//
-//  @Test( timeout = LONG_TIMEOUT )
-//  public void testInvalidGenericProviderDeploymentContributor() throws ParserConfigurationException, SAXException, IOException, TransformerException {
-//    LOG_ENTER();
-//    GatewayConfig config = new GatewayTestConfig();
-//    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
-//    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
-//    gatewayDir.mkdirs();
-//    ((GatewayTestConfig) config).setGatewayHomeDir( gatewayDir.getAbsolutePath() );
-//    File deployDir = new File( config.getGatewayDeploymentDir() );
-//    deployDir.mkdirs();
-//    addStacksDir(config, targetDir);
-//
-//    DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//    Map<String,String> options = new HashMap<String,String>();
-//    options.put("persist-master", "false");
-//    options.put("master", "password");
-//    try {
-//      DeploymentFactory.setGatewayServices(srvcs);
-//      srvcs.init(config, options);
-//    } catch (ServiceLifecycleException e) {
-//      e.printStackTrace(); // I18N not required.
-//    }
-//
-//    Topology topology = new Topology();
-//    topology.setName( "test-cluster" );
-//    Service service = new Service();
-//    service.setRole( "WEBHDFS" );
-//    service.addUrl( "http://localhost:50070/test-service-url" );
-//    topology.addService( service );
-//
-//    Provider provider = new Provider();
-//    provider.setRole( "authentication" );
-//    provider.setName( "generic" );
-//    provider.setEnabled( true );
-//    Param param; // = new ProviderParam();
-//    // Missing filter param.
-//    //param.setName( "filter" );
-//    //param.setValue( "org.opensource.ExistingFilter" );
-//    //provider.addParam( param );
-//    param = new Param();
-//    param.setName( "test-param-name" );
-//    param.setValue( "test-param-value" );
-//    provider.addParam( param );
-//    topology.addProvider( provider );
-//
-//    Enumeration<Appender> appenders = NoOpAppender.setUp();
-//    try {
-//      DeploymentFactory.createDeployment( config, topology );
-//      fail( "Should have throws IllegalArgumentException" );
-//    } catch ( DeploymentException e ) {
-//      // Expected.
-//    } finally {
-//      NoOpAppender.tearDown( appenders );
-//    }
-//    LOG_EXIT();
-//  }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testSimpleTopology() throws IOException, SAXException, ParserConfigurationException, URISyntaxException {
-//    LOG_ENTER();
-//    GatewayConfig config = new GatewayTestConfig();
-//    //Testing without x-forwarded headers filter
-//    ((GatewayTestConfig)config).setXForwardedEnabled(false);
-//    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
-//    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
-//    gatewayDir.mkdirs();
-//    ((GatewayTestConfig) config).setGatewayHomeDir( gatewayDir.getAbsolutePath() );
-//    File deployDir = new File( config.getGatewayDeploymentDir() );
-//    deployDir.mkdirs();
-//    addStacksDir(config, targetDir);
-//
-//    DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//    Map<String,String> options = new HashMap<String,String>();
-//    options.put("persist-master", "false");
-//    options.put("master", "password");
-//    try {
-//      DeploymentFactory.setGatewayServices(srvcs);
-//      srvcs.init(config, options);
-//    } catch (ServiceLifecycleException e) {
-//      e.printStackTrace(); // I18N not required.
-//    }
-//
-//    Topology topology = new Topology();
-//    topology.setName( "test-cluster" );
-//    Service service = new Service();
-//    service.setRole( "WEBHDFS" );
-//    service.addUrl( "http://localhost:50070/webhdfs" );
-//    topology.addService( service );
-//    Provider provider = new Provider();
-//    provider.setRole( "authentication" );
-//    provider.setName( "ShiroProvider" );
-//    provider.setEnabled( true );
-//    Param param = new Param();
-//    param.setName( "contextConfigLocation" );
-//    param.setValue( "classpath:app-context-security.xml" );
-//    provider.addParam( param );
-//    topology.addProvider( provider );
-//    Provider asserter = new Provider();
-//    asserter.setRole( "identity-assertion" );
-//    asserter.setName("Default");
-//    asserter.setEnabled( true );
-//    topology.addProvider( asserter );
-//    Provider authorizer = new Provider();
-//    authorizer.setRole( "authorization" );
-//    authorizer.setName("AclsAuthz");
-//    authorizer.setEnabled( true );
-//    topology.addProvider( authorizer );
-//
-//    WebArchive war = DeploymentFactory.createDeployment( config, topology );
-////    File dir = new File( System.getProperty( "user.dir" ) );
-////    File file = war.as( ExplodedExporter.class ).exportExploded( dir, "test-cluster.war" );
-//
-//    Document web = parse( war.get( "WEB-INF/web.xml" ).getAsset().openStream() );
-//    assertThat( web, hasXPath( "/web-app/servlet/servlet-name", equalTo( "test-cluster" ) ) );
-//    assertThat( web, hasXPath( "/web-app/servlet/servlet-class", equalTo( "org.apache.hadoop.gateway.GatewayServlet" ) ) );
-//    assertThat( web, hasXPath( "/web-app/servlet/init-param/param-name", equalTo( "gatewayDescriptorLocation" ) ) );
-//    assertThat( web, hasXPath( "/web-app/servlet/init-param/param-value", equalTo( "gateway.xml" ) ) );
-//    assertThat( web, hasXPath( "/web-app/servlet-mapping/servlet-name", equalTo( "test-cluster" ) ) );
-//    assertThat( web, hasXPath( "/web-app/servlet-mapping/url-pattern", equalTo( "/*" ) ) );
-//
-//    Document gateway = parse( war.get( "WEB-INF/gateway.xml" ).getAsset().openStream() );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/pattern", equalTo( "/webhdfs/v1/?**" ) ) );
-//    //assertThat( gateway, hasXPath( "/gateway/resource[1]/target", equalTo( "http://localhost:50070/webhdfs/v1/?{**}" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[1]/role", equalTo( "authentication" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[1]/class", equalTo( "org.apache.hadoop.gateway.filter.ResponseCookieFilter" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[2]/role", equalTo( "authentication" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[2]/class", equalTo( "org.apache.shiro.web.servlet.ShiroFilter" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[3]/role", equalTo( "authentication" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[3]/class", equalTo( "org.apache.hadoop.gateway.filter.ShiroSubjectIdentityAdapter" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[4]/role", equalTo( "rewrite" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[4]/class", equalTo( "org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteServletFilter" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[5]/role", equalTo( "identity-assertion" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[5]/class", equalTo( "org.apache.hadoop.gateway.identityasserter.filter.IdentityAsserterFilter" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[6]/role", equalTo( "authorization" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[6]/name", equalTo( "AclsAuthz" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[6]/class", equalTo( "org.apache.hadoop.gateway.filter.AclsAuthorizationFilter" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[7]/role", equalTo( "dispatch" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[7]/name", equalTo( "webhdfs" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[7]/class", equalTo( "org.apache.hadoop.gateway.dispatch.GatewayDispatchFilter" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[2]/pattern", equalTo( "/webhdfs/v1/**?**" ) ) );
-//    //assertThat( gateway, hasXPath( "/gateway/resource[2]/target", equalTo( "http://localhost:50070/webhdfs/v1/{path=**}?{**}" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[1]/role", equalTo( "authentication" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[1]/class", equalTo( "org.apache.hadoop.gateway.filter.ResponseCookieFilter" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[2]/role", equalTo( "authentication" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[2]/class", equalTo( "org.apache.shiro.web.servlet.ShiroFilter" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[3]/role", equalTo( "authentication" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[3]/class", equalTo( "org.apache.hadoop.gateway.filter.ShiroSubjectIdentityAdapter" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[4]/role", equalTo( "rewrite" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[4]/class", equalTo( "org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteServletFilter" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[5]/role", equalTo( "identity-assertion" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[5]/class", equalTo( "org.apache.hadoop.gateway.identityasserter.filter.IdentityAsserterFilter" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[6]/role", equalTo( "authorization" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[6]/name", equalTo( "AclsAuthz" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[6]/class", equalTo( "org.apache.hadoop.gateway.filter.AclsAuthorizationFilter" ) ) );
-//
-//    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[7]/role", equalTo( "dispatch" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[7]/name", equalTo( "webhdfs" ) ) );
-//    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[7]/class", equalTo( "org.apache.hadoop.gateway.dispatch.GatewayDispatchFilter" ) ) );
-//
-//    LOG_EXIT();
-//  }
-//
-//
-//   @Test( timeout = LONG_TIMEOUT )
-//   public void testWebXmlGeneration() throws IOException, SAXException, ParserConfigurationException, URISyntaxException {
-//      LOG_ENTER();
-//      GatewayConfig config = new GatewayTestConfig();
-//      File targetDir = new File(System.getProperty("user.dir"), "target");
-//      File gatewayDir = new File(targetDir, "gateway-home-" + UUID.randomUUID());
-//      gatewayDir.mkdirs();
-//      ((GatewayTestConfig) config).setGatewayHomeDir(gatewayDir.getAbsolutePath());
-//      File deployDir = new File(config.getGatewayDeploymentDir());
-//      deployDir.mkdirs();
-//
-//      DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//      Map<String, String> options = new HashMap<String, String>();
-//      options.put("persist-master", "false");
-//      options.put("master", "password");
-//      try {
-//         DeploymentFactory.setGatewayServices(srvcs);
-//         srvcs.init(config, options);
-//      } catch (ServiceLifecycleException e) {
-//         e.printStackTrace(); // I18N not required.
-//      }
-//
-//      Topology topology = new Topology();
-//      topology.setName("test-cluster");
-//      Service service = new Service();
-//      service.setRole("WEBHDFS");
-//      service.addUrl("http://localhost:50070/webhdfs");
-//      topology.addService(service);
-//      Provider provider = new Provider();
-//      provider.setRole("authentication");
-//      provider.setName("ShiroProvider");
-//      provider.setEnabled(true);
-//      Param param = new Param();
-//      param.setName("contextConfigLocation");
-//      param.setValue("classpath:app-context-security.xml");
-//      provider.addParam(param);
-//      topology.addProvider(provider);
-//      Provider asserter = new Provider();
-//      asserter.setRole("identity-assertion");
-//      asserter.setName("Default");
-//      asserter.setEnabled(true);
-//      topology.addProvider(asserter);
-//      Provider authorizer = new Provider();
-//      authorizer.setRole("authorization");
-//      authorizer.setName("AclsAuthz");
-//      authorizer.setEnabled(true);
-//      topology.addProvider(authorizer);
-//      Provider ha = new Provider();
-//      ha.setRole("ha");
-//      ha.setName("HaProvider");
-//      ha.setEnabled(true);
-//      topology.addProvider(ha);
-//
-//      for (int i = 0; i < 100; i++) {
-//         createAndTestDeployment(config, topology);
-//      }
-//      LOG_EXIT();
-//   }
-//
-//   private void createAndTestDeployment(GatewayConfig config, Topology topology) throws IOException, SAXException, ParserConfigurationException {
-//
-//      WebArchive war = DeploymentFactory.createDeployment(config, topology);
-////      File dir = new File( System.getProperty( "user.dir" ) );
-////      File file = war.as( ExplodedExporter.class ).exportExploded( dir, "test-cluster.war" );
-//
-//      Document web = parse(war.get("WEB-INF/web.xml").getAsset().openStream());
-//      assertThat(web, hasXPath("/web-app/servlet/servlet-class", equalTo("org.apache.hadoop.gateway.GatewayServlet")));
-//      assertThat(web, hasXPath("/web-app/servlet/init-param/param-name", equalTo("gatewayDescriptorLocation")));
-//      assertThat(web, hasXPath("/web-app/servlet/init-param/param-value", equalTo("gateway.xml")));
-//      assertThat(web, hasXPath("/web-app/servlet-mapping/servlet-name", equalTo("test-cluster")));
-//      assertThat(web, hasXPath("/web-app/servlet-mapping/url-pattern", equalTo("/*")));
-//      //testing the order of listener classes generated
-//      assertThat(web, hasXPath("/web-app/listener[2]/listener-class", equalTo("org.apache.hadoop.gateway.services.GatewayServicesContextListener")));
-//      assertThat(web, hasXPath("/web-app/listener[3]/listener-class", equalTo("org.apache.hadoop.gateway.ha.provider.HaServletContextListener")));
-//      assertThat(web, hasXPath("/web-app/listener[4]/listener-class", equalTo("org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteServletContextListener")));
-//   }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testDeploymentWithServiceParams() throws Exception {
-//    LOG_ENTER();
-//    GatewayConfig config = new GatewayTestConfig();
-//    File targetDir = new File(System.getProperty("user.dir"), "target");
-//    File gatewayDir = new File(targetDir, "gateway-home-" + UUID.randomUUID());
-//    gatewayDir.mkdirs();
-//    ((GatewayTestConfig) config).setGatewayHomeDir(gatewayDir.getAbsolutePath());
-//    File deployDir = new File(config.getGatewayDeploymentDir());
-//    deployDir.mkdirs();
-//    addStacksDir(config, targetDir);
-//
-//    DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//    Map<String, String> options = new HashMap<String, String>();
-//    options.put("persist-master", "false");
-//    options.put("master", "password");
-//    try {
-//      DeploymentFactory.setGatewayServices(srvcs);
-//      srvcs.init(config, options);
-//    } catch (ServiceLifecycleException e) {
-//      e.printStackTrace(); // I18N not required.
-//    }
-//
-//    Service service;
-//    Param param;
-//    Topology topology = new Topology();
-//    topology.setName( "test-cluster" );
-//
-//    service = new Service();
-//    service.setRole( "HIVE" );
-//    service.setUrls( Arrays.asList( new String[]{ "http://hive-host:50001/" } ) );
-//    param = new Param();
-//    param.setName( "someparam" );
-//    param.setValue( "somevalue" );
-//    service.addParam( param );
-//    topology.addService( service );
-//
-//    service = new Service();
-//    service.setRole( "WEBHBASE" );
-//    service.setUrls( Arrays.asList( new String[]{ "http://hbase-host:50002/" } ) );
-//    param = new Param();
-//    param.setName( "replayBufferSize" );
-//    param.setValue( "33" );
-//    service.addParam( param );
-//    topology.addService( service );
-//
-//    service = new Service();
-//    service.setRole( "OOZIE" );
-//    service.setUrls( Arrays.asList( new String[]{ "http://hbase-host:50003/" } ) );
-//    param = new Param();
-//    param.setName( "otherparam" );
-//    param.setValue( "65" );
-//    service.addParam( param );
-//    topology.addService( service );
-//
-//    WebArchive war = DeploymentFactory.createDeployment( config, topology );
-//    Document doc = parse( war.get( "WEB-INF/gateway.xml" ).getAsset().openStream() );
-////    dump( doc );
-//
-//    Node resourceNode, filterNode, paramNode;
-//    String value;
-//
-//    resourceNode = node( doc, "gateway/resource[role/text()='HIVE']" );
-//    assertThat( resourceNode, is(not(nullValue())));
-//    filterNode = node( resourceNode, "filter[role/text()='dispatch']" );
-//    assertThat( filterNode, is(not(nullValue())));
-//    paramNode = node( filterNode, "param[name/text()='someparam']" );
-//    value = value( paramNode, "value/text()" );
-//    assertThat( value, is( "somevalue" ) ) ;
-//
-//    resourceNode = node( doc, "gateway/resource[role/text()='WEBHBASE']" );
-//    assertThat( resourceNode, is(not(nullValue())));
-//    filterNode = node( resourceNode, "filter[role/text()='dispatch']" );
-//    assertThat( filterNode, is(not(nullValue())));
-//    paramNode = node( filterNode, "param[name/text()='replayBufferSize']" );
-//    value = value( paramNode, "value/text()" );
-//    assertThat( value, is( "33" ) ) ;
-//
-//    resourceNode = node( doc, "gateway/resource[role/text()='OOZIE']" );
-//    assertThat( resourceNode, is(not(nullValue())));
-//    filterNode = node( resourceNode, "filter[role/text()='dispatch']" );
-//    assertThat( filterNode, is(not(nullValue())));
-//    paramNode = node( filterNode, "param[name/text()='otherparam']" );
-//    value = value( paramNode, "value/text()" );
-//    assertThat( value, is( "65" ) ) ;
-//
-//    FileUtils.deleteQuietly( deployDir );
-//
-//    LOG_EXIT();
-//  }
-//
-//  private Document parse( InputStream stream ) throws IOException, SAXException, ParserConfigurationException {
-//    DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
-//    DocumentBuilder builder = factory.newDocumentBuilder();
-//    InputSource source = new InputSource( stream );
-//    return builder.parse( source );
-//  }
-//
-//  private void addStacksDir(GatewayConfig config, File targetDir) {
-//    File stacksDir = new File( config.getGatewayServicesDir() );
-//    stacksDir.mkdirs();
-//    //TODO: [sumit] This is a hack for now, need to find a better way to locate the source resources for 'stacks' to be tested
-//    String pathToStacksSource = "gateway-service-definitions/src/main/resources/services";
-//    File stacksSourceDir = new File( targetDir.getParent(), pathToStacksSource);
-//    if (!stacksSourceDir.exists()) {
-//      stacksSourceDir = new File( targetDir.getParentFile().getParent(), pathToStacksSource);
-//    }
-//    if (stacksSourceDir.exists()) {
-//      try {
-//        FileUtils.copyDirectoryToDirectory(stacksSourceDir, stacksDir);
-//      } catch ( IOException e) {
-//        fail(e.getMessage());
-//      }
-//    }
-//
-//  }
-//
-//  private void dump( Document document ) throws TransformerException {
-//    Transformer transformer = TransformerFactory.newInstance().newTransformer();
-//    transformer.setOutputProperty( OutputKeys.INDENT, "yes" );
-//    StreamResult result = new StreamResult( new StringWriter() );
-//    DOMSource source = new DOMSource( document );
-//    transformer.transform( source, result );
-//    String xmlString = result.getWriter().toString();
-//    System.out.println( xmlString );
-//  }
-//
-//  private Node node( Node scope, String expression ) throws XPathExpressionException {
-//    return (Node)XPathFactory.newInstance().newXPath().compile( expression ).evaluate( scope, XPathConstants.NODE );
-//  }
-//
-//  private String value( Node scope, String expression ) throws XPathExpressionException {
-//    return XPathFactory.newInstance().newXPath().compile( expression ).evaluate( scope );
-//  }
+  private static final long SHORT_TIMEOUT = 1000L;
+  private static final long MEDIUM_TIMEOUT = 10 * SHORT_TIMEOUT;
+  private static final long LONG_TIMEOUT = 10 * MEDIUM_TIMEOUT;
+
+  @Test( timeout = SHORT_TIMEOUT )
+  public void testGenericProviderDeploymentContributor() throws ParserConfigurationException, SAXException, IOException, TransformerException {
+    LOG_ENTER();
+    GatewayConfig config = new GatewayTestConfig();
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+    gatewayDir.mkdirs();
+
+    ((GatewayTestConfig) config).setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File deployDir = new File( config.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+//    ((GatewayTestConfig) config).setDeploymentDir( "clusters" );
+
+    addStacksDir(config, targetDir);
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put("persist-master", "false");
+    options.put("master", "password");
+    try {
+      DeploymentFactory.setGatewayServices(srvcs);
+      srvcs.init(config, options);
+    } catch (ServiceLifecycleException e) {
+      e.printStackTrace(); // I18N not required.
+    }
+
+    Topology topology = new Topology();
+    topology.setName( "test-cluster" );
+    Service service = new Service();
+    service.setRole( "WEBHDFS" );
+    service.addUrl( "http://localhost:50070/test-service-url" );
+    topology.addService( service );
+
+    Provider provider = new Provider();
+    provider.setRole( "authentication" );
+    provider.setName( "generic" );
+    provider.setEnabled( true );
+    Param param = new Param();
+    param.setName( "filter" );
+    param.setValue( "org.opensource.ExistingFilter" );
+    provider.addParam( param );
+    param = new Param();
+    param.setName( "test-param-name" );
+    param.setValue( "test-param-value" );
+    provider.addParam( param );
+    topology.addProvider( provider );
+
+    WebArchive war = DeploymentFactory.createDeployment( config, topology );
+
+    Document gateway = parse( war.get( "WEB-INF/gateway.xml" ).getAsset().openStream() );
+    //dump( gateway );
+
+    //by default the first filter will be the X-Forwarded header filter
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[1]/role", equalTo( "xforwardedheaders" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[1]/name", equalTo( "XForwardedHeaderFilter" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[1]/class", equalTo( "org.apache.hadoop.gateway.filter.XForwardedHeaderFilter" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[2]/role", equalTo( "authentication" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[2]/name", equalTo( "generic" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[2]/class", equalTo( "org.opensource.ExistingFilter" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[2]/param[1]/name", equalTo( "test-param-name" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[2]/param[1]/value", equalTo( "test-param-value" ) ) );
+    LOG_EXIT();
+  }
+
+  @Test( timeout = LONG_TIMEOUT )
+  public void testInvalidGenericProviderDeploymentContributor() throws ParserConfigurationException, SAXException, IOException, TransformerException {
+    LOG_ENTER();
+    GatewayConfig config = new GatewayTestConfig();
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+    gatewayDir.mkdirs();
+    ((GatewayTestConfig) config).setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+    File deployDir = new File( config.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+    addStacksDir(config, targetDir);
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put("persist-master", "false");
+    options.put("master", "password");
+    try {
+      DeploymentFactory.setGatewayServices(srvcs);
+      srvcs.init(config, options);
+    } catch (ServiceLifecycleException e) {
+      e.printStackTrace(); // I18N not required.
+    }
+
+    Topology topology = new Topology();
+    topology.setName( "test-cluster" );
+    Service service = new Service();
+    service.setRole( "WEBHDFS" );
+    service.addUrl( "http://localhost:50070/test-service-url" );
+    topology.addService( service );
+
+    Provider provider = new Provider();
+    provider.setRole( "authentication" );
+    provider.setName( "generic" );
+    provider.setEnabled( true );
+    Param param; // = new ProviderParam();
+    // Missing filter param.
+    //param.setName( "filter" );
+    //param.setValue( "org.opensource.ExistingFilter" );
+    //provider.addParam( param );
+    param = new Param();
+    param.setName( "test-param-name" );
+    param.setValue( "test-param-value" );
+    provider.addParam( param );
+    topology.addProvider( provider );
+
+    Enumeration<Appender> appenders = NoOpAppender.setUp();
+    try {
+      DeploymentFactory.createDeployment( config, topology );
+      fail( "Should have throws IllegalArgumentException" );
+    } catch ( DeploymentException e ) {
+      // Expected.
+    } finally {
+      NoOpAppender.tearDown( appenders );
+    }
+    LOG_EXIT();
+  }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testSimpleTopology() throws IOException, SAXException, ParserConfigurationException, URISyntaxException {
+    LOG_ENTER();
+    GatewayConfig config = new GatewayTestConfig();
+    //Testing without x-forwarded headers filter
+    ((GatewayTestConfig)config).setXForwardedEnabled(false);
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+    gatewayDir.mkdirs();
+    ((GatewayTestConfig) config).setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+    File deployDir = new File( config.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+    addStacksDir(config, targetDir);
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put("persist-master", "false");
+    options.put("master", "password");
+    try {
+      DeploymentFactory.setGatewayServices(srvcs);
+      srvcs.init(config, options);
+    } catch (ServiceLifecycleException e) {
+      e.printStackTrace(); // I18N not required.
+    }
+
+    Topology topology = new Topology();
+    topology.setName( "test-cluster" );
+    Service service = new Service();
+    service.setRole( "WEBHDFS" );
+    service.addUrl( "http://localhost:50070/webhdfs" );
+    topology.addService( service );
+    Provider provider = new Provider();
+    provider.setRole( "authentication" );
+    provider.setName( "ShiroProvider" );
+    provider.setEnabled( true );
+    Param param = new Param();
+    param.setName( "contextConfigLocation" );
+    param.setValue( "classpath:app-context-security.xml" );
+    provider.addParam( param );
+    topology.addProvider( provider );
+    Provider asserter = new Provider();
+    asserter.setRole( "identity-assertion" );
+    asserter.setName("Default");
+    asserter.setEnabled( true );
+    topology.addProvider( asserter );
+    Provider authorizer = new Provider();
+    authorizer.setRole( "authorization" );
+    authorizer.setName("AclsAuthz");
+    authorizer.setEnabled( true );
+    topology.addProvider( authorizer );
+
+    WebArchive war = DeploymentFactory.createDeployment( config, topology );
+//    File dir = new File( System.getProperty( "user.dir" ) );
+//    File file = war.as( ExplodedExporter.class ).exportExploded( dir, "test-cluster.war" );
+
+    Document web = parse( war.get( "WEB-INF/web.xml" ).getAsset().openStream() );
+    assertThat( web, hasXPath( "/web-app/servlet/servlet-name", equalTo( "test-cluster" ) ) );
+    assertThat( web, hasXPath( "/web-app/servlet/servlet-class", equalTo( "org.apache.hadoop.gateway.GatewayServlet" ) ) );
+    assertThat( web, hasXPath( "/web-app/servlet/init-param/param-name", equalTo( "gatewayDescriptorLocation" ) ) );
+    assertThat( web, hasXPath( "/web-app/servlet/init-param/param-value", equalTo( "gateway.xml" ) ) );
+    assertThat( web, hasXPath( "/web-app/servlet-mapping/servlet-name", equalTo( "test-cluster" ) ) );
+    assertThat( web, hasXPath( "/web-app/servlet-mapping/url-pattern", equalTo( "/*" ) ) );
+
+    Document gateway = parse( war.get( "WEB-INF/gateway.xml" ).getAsset().openStream() );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/pattern", equalTo( "/webhdfs/v1/?**" ) ) );
+    //assertThat( gateway, hasXPath( "/gateway/resource[1]/target", equalTo( "http://localhost:50070/webhdfs/v1/?{**}" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[1]/role", equalTo( "authentication" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[1]/class", equalTo( "org.apache.hadoop.gateway.filter.ResponseCookieFilter" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[2]/role", equalTo( "authentication" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[2]/class", equalTo( "org.apache.shiro.web.servlet.ShiroFilter" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[3]/role", equalTo( "authentication" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[3]/class", equalTo( "org.apache.hadoop.gateway.filter.ShiroSubjectIdentityAdapter" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[4]/role", equalTo( "rewrite" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[4]/class", equalTo( "org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteServletFilter" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[5]/role", equalTo( "identity-assertion" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[5]/class", equalTo( "org.apache.hadoop.gateway.identityasserter.filter.IdentityAsserterFilter" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[6]/role", equalTo( "authorization" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[6]/name", equalTo( "AclsAuthz" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[6]/class", equalTo( "org.apache.hadoop.gateway.filter.AclsAuthorizationFilter" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[7]/role", equalTo( "dispatch" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[7]/name", equalTo( "webhdfs" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[7]/class", equalTo( "org.apache.hadoop.gateway.dispatch.GatewayDispatchFilter" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[2]/pattern", equalTo( "/webhdfs/v1/**?**" ) ) );
+    //assertThat( gateway, hasXPath( "/gateway/resource[2]/target", equalTo( "http://localhost:50070/webhdfs/v1/{path=**}?{**}" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[1]/role", equalTo( "authentication" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[1]/class", equalTo( "org.apache.hadoop.gateway.filter.ResponseCookieFilter" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[2]/role", equalTo( "authentication" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[2]/class", equalTo( "org.apache.shiro.web.servlet.ShiroFilter" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[3]/role", equalTo( "authentication" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[3]/class", equalTo( "org.apache.hadoop.gateway.filter.ShiroSubjectIdentityAdapter" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[4]/role", equalTo( "rewrite" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[4]/class", equalTo( "org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteServletFilter" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[5]/role", equalTo( "identity-assertion" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[5]/class", equalTo( "org.apache.hadoop.gateway.identityasserter.filter.IdentityAsserterFilter" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[6]/role", equalTo( "authorization" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[6]/name", equalTo( "AclsAuthz" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[1]/filter[6]/class", equalTo( "org.apache.hadoop.gateway.filter.AclsAuthorizationFilter" ) ) );
+
+    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[7]/role", equalTo( "dispatch" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[7]/name", equalTo( "webhdfs" ) ) );
+    assertThat( gateway, hasXPath( "/gateway/resource[2]/filter[7]/class", equalTo( "org.apache.hadoop.gateway.dispatch.GatewayDispatchFilter" ) ) );
+
+    LOG_EXIT();
+  }
+
+
+   @Test( timeout = LONG_TIMEOUT )
+   public void testWebXmlGeneration() throws IOException, SAXException, ParserConfigurationException, URISyntaxException {
+      LOG_ENTER();
+      GatewayConfig config = new GatewayTestConfig();
+      File targetDir = new File(System.getProperty("user.dir"), "target");
+      File gatewayDir = new File(targetDir, "gateway-home-" + UUID.randomUUID());
+      gatewayDir.mkdirs();
+      ((GatewayTestConfig) config).setGatewayHomeDir(gatewayDir.getAbsolutePath());
+      File deployDir = new File(config.getGatewayDeploymentDir());
+      deployDir.mkdirs();
+
+      DefaultGatewayServices srvcs = new DefaultGatewayServices();
+      Map<String, String> options = new HashMap<String, String>();
+      options.put("persist-master", "false");
+      options.put("master", "password");
+      try {
+         DeploymentFactory.setGatewayServices(srvcs);
+         srvcs.init(config, options);
+      } catch (ServiceLifecycleException e) {
+         e.printStackTrace(); // I18N not required.
+      }
+
+      Topology topology = new Topology();
+      topology.setName("test-cluster");
+      Service service = new Service();
+      service.setRole("WEBHDFS");
+      service.addUrl("http://localhost:50070/webhdfs");
+      topology.addService(service);
+      Provider provider = new Provider();
+      provider.setRole("authentication");
+      provider.setName("ShiroProvider");
+      provider.setEnabled(true);
+      Param param = new Param();
+      param.setName("contextConfigLocation");
+      param.setValue("classpath:app-context-security.xml");
+      provider.addParam(param);
+      topology.addProvider(provider);
+      Provider asserter = new Provider();
+      asserter.setRole("identity-assertion");
+      asserter.setName("Default");
+      asserter.setEnabled(true);
+      topology.addProvider(asserter);
+      Provider authorizer = new Provider();
+      authorizer.setRole("authorization");
+      authorizer.setName("AclsAuthz");
+      authorizer.setEnabled(true);
+      topology.addProvider(authorizer);
+      Provider ha = new Provider();
+      ha.setRole("ha");
+      ha.setName("HaProvider");
+      ha.setEnabled(true);
+      topology.addProvider(ha);
+
+      for (int i = 0; i < 100; i++) {
+         createAndTestDeployment(config, topology);
+      }
+      LOG_EXIT();
+   }
+
+   private void createAndTestDeployment(GatewayConfig config, Topology topology) throws IOException, SAXException, ParserConfigurationException {
+
+      WebArchive war = DeploymentFactory.createDeployment(config, topology);
+//      File dir = new File( System.getProperty( "user.dir" ) );
+//      File file = war.as( ExplodedExporter.class ).exportExploded( dir, "test-cluster.war" );
+
+      Document web = parse(war.get("WEB-INF/web.xml").getAsset().openStream());
+      assertThat(web, hasXPath("/web-app/servlet/servlet-class", equalTo("org.apache.hadoop.gateway.GatewayServlet")));
+      assertThat(web, hasXPath("/web-app/servlet/init-param/param-name", equalTo("gatewayDescriptorLocation")));
+      assertThat(web, hasXPath("/web-app/servlet/init-param/param-value", equalTo("gateway.xml")));
+      assertThat(web, hasXPath("/web-app/servlet-mapping/servlet-name", equalTo("test-cluster")));
+      assertThat(web, hasXPath("/web-app/servlet-mapping/url-pattern", equalTo("/*")));
+      //testing the order of listener classes generated
+      assertThat(web, hasXPath("/web-app/listener[2]/listener-class", equalTo("org.apache.hadoop.gateway.services.GatewayServicesContextListener")));
+      assertThat(web, hasXPath("/web-app/listener[3]/listener-class", equalTo("org.apache.hadoop.gateway.ha.provider.HaServletContextListener")));
+      assertThat(web, hasXPath("/web-app/listener[4]/listener-class", equalTo("org.apache.hadoop.gateway.filter.rewrite.api.UrlRewriteServletContextListener")));
+   }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testDeploymentWithServiceParams() throws Exception {
+    LOG_ENTER();
+    GatewayConfig config = new GatewayTestConfig();
+    File targetDir = new File(System.getProperty("user.dir"), "target");
+    File gatewayDir = new File(targetDir, "gateway-home-" + UUID.randomUUID());
+    gatewayDir.mkdirs();
+    ((GatewayTestConfig) config).setGatewayHomeDir(gatewayDir.getAbsolutePath());
+    File deployDir = new File(config.getGatewayDeploymentDir());
+    deployDir.mkdirs();
+    addStacksDir(config, targetDir);
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String, String> options = new HashMap<String, String>();
+    options.put("persist-master", "false");
+    options.put("master", "password");
+    try {
+      DeploymentFactory.setGatewayServices(srvcs);
+      srvcs.init(config, options);
+    } catch (ServiceLifecycleException e) {
+      e.printStackTrace(); // I18N not required.
+    }
+
+    Service service;
+    Param param;
+    Topology topology = new Topology();
+    topology.setName( "test-cluster" );
+
+    service = new Service();
+    service.setRole( "HIVE" );
+    service.setUrls( Arrays.asList( new String[]{ "http://hive-host:50001/" } ) );
+    param = new Param();
+    param.setName( "someparam" );
+    param.setValue( "somevalue" );
+    service.addParam( param );
+    topology.addService( service );
+
+    service = new Service();
+    service.setRole( "WEBHBASE" );
+    service.setUrls( Arrays.asList( new String[]{ "http://hbase-host:50002/" } ) );
+    param = new Param();
+    param.setName( "replayBufferSize" );
+    param.setValue( "33" );
+    service.addParam( param );
+    topology.addService( service );
+
+    service = new Service();
+    service.setRole( "OOZIE" );
+    service.setUrls( Arrays.asList( new String[]{ "http://hbase-host:50003/" } ) );
+    param = new Param();
+    param.setName( "otherparam" );
+    param.setValue( "65" );
+    service.addParam( param );
+    topology.addService( service );
+
+    WebArchive war = DeploymentFactory.createDeployment( config, topology );
+    Document doc = parse( war.get( "WEB-INF/gateway.xml" ).getAsset().openStream() );
+//    dump( doc );
+
+    Node resourceNode, filterNode, paramNode;
+    String value;
+
+    resourceNode = node( doc, "gateway/resource[role/text()='HIVE']" );
+    assertThat( resourceNode, is(not(nullValue())));
+    filterNode = node( resourceNode, "filter[role/text()='dispatch']" );
+    assertThat( filterNode, is(not(nullValue())));
+    paramNode = node( filterNode, "param[name/text()='someparam']" );
+    value = value( paramNode, "value/text()" );
+    assertThat( value, is( "somevalue" ) ) ;
+
+    resourceNode = node( doc, "gateway/resource[role/text()='WEBHBASE']" );
+    assertThat( resourceNode, is(not(nullValue())));
+    filterNode = node( resourceNode, "filter[role/text()='dispatch']" );
+    assertThat( filterNode, is(not(nullValue())));
+    paramNode = node( filterNode, "param[name/text()='replayBufferSize']" );
+    value = value( paramNode, "value/text()" );
+    assertThat( value, is( "33" ) ) ;
+
+    resourceNode = node( doc, "gateway/resource[role/text()='OOZIE']" );
+    assertThat( resourceNode, is(not(nullValue())));
+    filterNode = node( resourceNode, "filter[role/text()='dispatch']" );
+    assertThat( filterNode, is(not(nullValue())));
+    paramNode = node( filterNode, "param[name/text()='otherparam']" );
+    value = value( paramNode, "value/text()" );
+    assertThat( value, is( "65" ) ) ;
+
+    FileUtils.deleteQuietly( deployDir );
+
+    LOG_EXIT();
+  }
+
+  private Document parse( InputStream stream ) throws IOException, SAXException, ParserConfigurationException {
+    DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
+    DocumentBuilder builder = factory.newDocumentBuilder();
+    InputSource source = new InputSource( stream );
+    return builder.parse( source );
+  }
+
+  private void addStacksDir(GatewayConfig config, File targetDir) {
+    File stacksDir = new File( config.getGatewayServicesDir() );
+    stacksDir.mkdirs();
+    //TODO: [sumit] This is a hack for now, need to find a better way to locate the source resources for 'stacks' to be tested
+    String pathToStacksSource = "gateway-service-definitions/src/main/resources/services";
+    File stacksSourceDir = new File( targetDir.getParent(), pathToStacksSource);
+    if (!stacksSourceDir.exists()) {
+      stacksSourceDir = new File( targetDir.getParentFile().getParent(), pathToStacksSource);
+    }
+    if (stacksSourceDir.exists()) {
+      try {
+        FileUtils.copyDirectoryToDirectory(stacksSourceDir, stacksDir);
+      } catch ( IOException e) {
+        fail(e.getMessage());
+      }
+    }
+
+  }
+
+  private void dump( Document document ) throws TransformerException {
+    Transformer transformer = TransformerFactory.newInstance().newTransformer();
+    transformer.setOutputProperty( OutputKeys.INDENT, "yes" );
+    StreamResult result = new StreamResult( new StringWriter() );
+    DOMSource source = new DOMSource( document );
+    transformer.transform( source, result );
+    String xmlString = result.getWriter().toString();
+    System.out.println( xmlString );
+  }
+
+  private Node node( Node scope, String expression ) throws XPathExpressionException {
+    return (Node)XPathFactory.newInstance().newXPath().compile( expression ).evaluate( scope, XPathConstants.NODE );
+  }
+
+  private String value( Node scope, String expression ) throws XPathExpressionException {
+    return XPathFactory.newInstance().newXPath().compile( expression ).evaluate( scope );
+  }
 
 }


[4/5] knox git commit: KNOX-620: Jenkins Knox-master-verify failing since #725 due to JDK version issues. Renabled remainder of func tests.

Posted by km...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayDeployFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayDeployFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayDeployFuncTest.java
index 79ba84b..c944796 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayDeployFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayDeployFuncTest.java
@@ -62,293 +62,293 @@ import static org.junit.Assert.assertThat;
 
 public class GatewayDeployFuncTest {
 
-//  private static final long SHORT_TIMEOUT = 1000L;
-//  private static final long LONG_TIMEOUT = 30 * 1000L;
-//
-//  private static Class RESOURCE_BASE_CLASS = GatewayDeployFuncTest.class;
-//  private static Logger LOG = LoggerFactory.getLogger( GatewayDeployFuncTest.class );
-//
-//  public static Enumeration<Appender> appenders;
-//  public static GatewayConfig config;
-//  public static GatewayServer gateway;
-//  public static File gatewayHome;
-//  public static String gatewayUrl;
-//  public static String clusterUrl;
-//  public static SimpleLdapDirectoryServer ldap;
-//  public static TcpTransport ldapTransport;
-//
-//  @BeforeClass
-//  public static void setupSuite() throws Exception {
-//    LOG_ENTER();
-//    //appenders = NoOpAppender.setUp();
-//    setupLdap();
-//    LOG_EXIT();
-//  }
-//
-//  @AfterClass
-//  public static void cleanupSuite() throws Exception {
-//    LOG_ENTER();
-//    ldap.stop( true );
-//    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
-//    //NoOpAppender.tearDown( appenders );
-//    LOG_EXIT();
-//  }
-//
-//  public static void setupLdap() throws Exception {
-//    URL usersUrl = getResourceUrl( "users.ldif" );
-//    int port = findFreePort();
-//    ldapTransport = new TcpTransport( port );
-//    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
-//    ldap.start();
-//    LOG.info( "LDAP port = " + ldapTransport.getPort() );
-//  }
-//
-//  @Before
-//  public void setupGateway() throws Exception {
-//
-//    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
-//    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
-//    gatewayDir.mkdirs();
-//    gatewayHome = gatewayDir;
-//
-//    GatewayTestConfig testConfig = new GatewayTestConfig();
-//    config = testConfig;
-//    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
-//
-//    File topoDir = new File( testConfig.getGatewayTopologyDir() );
-//    topoDir.mkdirs();
-//
-//    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
-//    deployDir.mkdirs();
-//
-//    DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//    Map<String,String> options = new HashMap<String,String>();
-//    options.put( "persist-master", "false" );
-//    options.put( "master", "password" );
-//    try {
-//      srvcs.init( testConfig, options );
-//    } catch ( ServiceLifecycleException e ) {
-//      e.printStackTrace(); // I18N not required.
-//    }
-//    gateway = GatewayServer.startGateway( testConfig, srvcs );
-//    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
-//
-//    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
-//
-//    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
-//    clusterUrl = gatewayUrl + "/test-cluster";
-//  }
-//
-//  @After
-//  public void cleanupGateway() throws Exception {
-//    gateway.stop();
-//    FileUtils.deleteQuietly( gatewayHome );
-//  }
-//
-//  private static XMLTag createTopology() {
-//    XMLTag xml = XMLDoc.newDocument( true )
-//        .addRoot( "topology" )
-//        .addTag( "gateway" )
-//
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "authentication" )
-//        .addTag( "name" ).addText( "ShiroProvider" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm" )
-//        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
-//        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
-//        .addTag( "value" ).addText( "ldap://localhost:" + ldapTransport.getPort() ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
-//        .addTag( "value" ).addText( "simple" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "urls./**" )
-//        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "identity-assertion" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "name" ).addText( "Default" ).gotoParent()
-//        .addTag( "provider" )
-//        .gotoRoot()
-//        .addTag( "service" )
-//        .addTag( "role" ).addText( "test-service-role" )
-//        .gotoRoot();
-//    return xml;
-//  }
-//
-//  private static int findFreePort() throws IOException {
-//    ServerSocket socket = new ServerSocket(0);
-//    int port = socket.getLocalPort();
-//    socket.close();
-//    return port;
-//  }
-//
-//  public static InputStream getResourceStream( String resource ) throws IOException {
-//    return getResourceUrl( resource ).openStream();
-//  }
-//
-//  public static URL getResourceUrl( String resource ) {
-//    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
-//    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
-//    return url;
-//  }
-//
-//  public static String getResourceName( String resource ) {
-//    return getResourceBaseName() + resource;
-//  }
-//
-//  public static String getResourceBaseName() {
-//    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
-//  }
-//
-//  //@Test
-//  public void waitForManualTesting() throws IOException {
-//    System.in.read();
-//  }
-//
-//  @Test( timeout = LONG_TIMEOUT )
-//  public void testDeployRedeployUndeploy() throws InterruptedException, IOException {
-//    LOG_ENTER();
-//    long sleep = 200;
-//    int numFilesInWar = 5;
-//    String username = "guest";
-//    String password = "guest-password";
-//    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
-//    long topoTimestampBefore, topoTimestampAfter;
-//
-//    File topoDir = new File( config.getGatewayTopologyDir() );
-//    File deployDir = new File( config.getGatewayDeploymentDir() );
-//    File warDir;
-//
-//    // Make sure deployment directory is empty.
-//    assertThat( topoDir.listFiles().length, is( 0 ) );
-//    assertThat( deployDir.listFiles().length, is( 0 ) );
-//
-//    File descriptor = writeTestTopology( "test-cluster", createTopology() );
-//    long writeTime = System.currentTimeMillis();
-//
-//    warDir = waitForFiles( deployDir, "test-cluster.war\\.[0-9A-Fa-f]+", 1, 0, sleep );
-//    for( File webInfDir : warDir.listFiles() ) {
-//      waitForFiles( webInfDir, ".*", numFilesInWar, 0, sleep );
-//    }
-//    waitForAccess( serviceUrl, username, password, sleep );
-//
-//    // Wait to make sure a second has passed to ensure the the file timestamps are different.
-//    waitForElapsed( writeTime, 1000, 100 );
-//
-//    // Redeploy and make sure the timestamp is updated.
-//    topoTimestampBefore = descriptor.lastModified();
-//    GatewayServer.redeployTopologies( null );
-//    writeTime = System.currentTimeMillis();
-//    topoTimestampAfter = descriptor.lastModified();
-//    assertThat( topoTimestampAfter, greaterThan( topoTimestampBefore ) );
-//
-//    // Check to make sure there are two war directories with the same root.
-//    warDir = waitForFiles( deployDir, "test-cluster.war\\.[0-9A-Fa-f]+", 2, 1, sleep );
-//    for( File webInfDir : warDir.listFiles() ) {
-//      waitForFiles( webInfDir, ".*", numFilesInWar, 0, sleep );
-//    }
-//    waitForAccess( serviceUrl, username, password, sleep );
-//
-//    // Wait to make sure a second has passed to ensure the the file timestamps are different.
-//    waitForElapsed( writeTime, 1000, 100 );
-//
-//    // Redeploy and make sure the timestamp is updated.
-//    topoTimestampBefore = descriptor.lastModified();
-//    GatewayServer.redeployTopologies( "test-cluster" );
-//    writeTime = System.currentTimeMillis();
-//    topoTimestampAfter = descriptor.lastModified();
-//    assertThat( topoTimestampAfter, greaterThan( topoTimestampBefore ) );
-//
-//    // Check to make sure there are two war directories with the same root.
-//    warDir = waitForFiles( deployDir, "test-cluster.war\\.[0-9A-Fa-f]+", 3, 2, sleep );
-//    for( File webInfDir : warDir.listFiles() ) {
-//      waitForFiles( webInfDir, ".*", numFilesInWar, 0, sleep );
-//    }
-//    waitForAccess( serviceUrl, username, password, sleep );
-//
-//    // Delete the test topology.
-//    assertThat( "Failed to delete the topology file.", descriptor.delete(), is( true ) );
-//
-//    // Wait to make sure a second has passed to ensure the the file timestamps are different.
-//    waitForElapsed( writeTime, 1000, 100 );
-//
-//    waitForFiles( deployDir, ".*", 0, -1, sleep );
-//
-//    // Wait a bit more to make sure undeployment finished.
-//    Thread.sleep( sleep );
-//
-//    // Make sure the test topology is not accessible.
-//    given().auth().preemptive().basic( username, password )
-//        .expect().statusCode( HttpStatus.SC_NOT_FOUND )
-//        .when().get( serviceUrl );
-//
-//    // Make sure deployment directory is empty.
-//    assertThat( topoDir.listFiles().length, is( 0 ) );
-//    assertThat( deployDir.listFiles().length, is( 0 ) );
-//    LOG_EXIT();
-//  }
-//
-//  private void waitForElapsed( long from, long total, long sleep ) throws InterruptedException {
-//    while( System.currentTimeMillis() - from < total ) {
-//      Thread.sleep( sleep );
-//    }
-//  }
-//
-//  private File writeTestTopology( String name, XMLTag xml ) throws IOException {
-//    // Create the test topology.
-//    File tempFile = new File( config.getGatewayTopologyDir(), name + ".xml." + UUID.randomUUID() );
-//    FileOutputStream stream = new FileOutputStream( tempFile );
-//    xml.toStream( stream );
-//    stream.close();
-//    File descriptor = new File( config.getGatewayTopologyDir(), name + ".xml" );
-//    tempFile.renameTo( descriptor );
-//    return descriptor;
-//  }
-//
-//  private File waitForFiles( File dir, String pattern, int count, int index, long sleep ) throws InterruptedException {
-//    RegexDirFilter filter = new RegexDirFilter( pattern );
-//    while( true ) {
-//      File[] files = dir.listFiles( filter );
-//      if( files.length == count ) {
-//        return ( index < 0 ) ? null : files[ index ];
-//      }
-//      Thread.sleep( sleep );
-//    }
-//  }
-//
-//  private void waitForAccess( String url, String username, String password, long sleep ) throws InterruptedException {
-//    while( true ) {
-//      Response response = given()
-//          .auth().preemptive().basic( username, password )
-//          .when().get( url ).andReturn();
-//      if( response.getStatusCode() == HttpStatus.SC_NOT_FOUND ) {
-//        Thread.sleep( sleep );
-//        continue;
-//      }
-//      assertThat( response.getContentType(), containsString( "text/plain" ) );
-//      assertThat( response.getBody().asString(), is( "test-service-response" ) );
-//      break;
-//    }
-//  }
-//
-//  private class RegexDirFilter implements FilenameFilter {
-//
-//    Pattern pattern;
-//
-//    RegexDirFilter( String regex ) {
-//      pattern = Pattern.compile( regex );
-//    }
-//
-//    @Override
-//    public boolean accept( File dir, String name ) {
-//      return pattern.matcher( name ).matches();
-//    }
-//  }
+  private static final long SHORT_TIMEOUT = 1000L;
+  private static final long LONG_TIMEOUT = 30 * 1000L;
+
+  private static Class RESOURCE_BASE_CLASS = GatewayDeployFuncTest.class;
+  private static Logger LOG = LoggerFactory.getLogger( GatewayDeployFuncTest.class );
+
+  public static Enumeration<Appender> appenders;
+  public static GatewayConfig config;
+  public static GatewayServer gateway;
+  public static File gatewayHome;
+  public static String gatewayUrl;
+  public static String clusterUrl;
+  public static SimpleLdapDirectoryServer ldap;
+  public static TcpTransport ldapTransport;
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    LOG_ENTER();
+    //appenders = NoOpAppender.setUp();
+    setupLdap();
+    LOG_EXIT();
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    LOG_ENTER();
+    ldap.stop( true );
+    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
+    //NoOpAppender.tearDown( appenders );
+    LOG_EXIT();
+  }
+
+  public static void setupLdap() throws Exception {
+    URL usersUrl = getResourceUrl( "users.ldif" );
+    int port = findFreePort();
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
+    ldap.start();
+    LOG.info( "LDAP port = " + ldapTransport.getPort() );
+  }
+
+  @Before
+  public void setupGateway() throws Exception {
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+    gatewayDir.mkdirs();
+    gatewayHome = gatewayDir;
+
+    GatewayTestConfig testConfig = new GatewayTestConfig();
+    config = testConfig;
+    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File topoDir = new File( testConfig.getGatewayTopologyDir() );
+    topoDir.mkdirs();
+
+    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put( "persist-master", "false" );
+    options.put( "master", "password" );
+    try {
+      srvcs.init( testConfig, options );
+    } catch ( ServiceLifecycleException e ) {
+      e.printStackTrace(); // I18N not required.
+    }
+    gateway = GatewayServer.startGateway( testConfig, srvcs );
+    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
+
+    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
+
+    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
+    clusterUrl = gatewayUrl + "/test-cluster";
+  }
+
+  @After
+  public void cleanupGateway() throws Exception {
+    gateway.stop();
+    FileUtils.deleteQuietly( gatewayHome );
+  }
+
+  private static XMLTag createTopology() {
+    XMLTag xml = XMLDoc.newDocument( true )
+        .addRoot( "topology" )
+        .addTag( "gateway" )
+
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "authentication" )
+        .addTag( "name" ).addText( "ShiroProvider" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm" )
+        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
+        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
+        .addTag( "value" ).addText( "ldap://localhost:" + ldapTransport.getPort() ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
+        .addTag( "value" ).addText( "simple" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "urls./**" )
+        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "identity-assertion" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "name" ).addText( "Default" ).gotoParent()
+        .addTag( "provider" )
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "test-service-role" )
+        .gotoRoot();
+    return xml;
+  }
+
+  private static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+  public static InputStream getResourceStream( String resource ) throws IOException {
+    return getResourceUrl( resource ).openStream();
+  }
+
+  public static URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
+    return url;
+  }
+
+  public static String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public static String getResourceBaseName() {
+    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
+  }
+
+  //@Test
+  public void waitForManualTesting() throws IOException {
+    System.in.read();
+  }
+
+  @Test( timeout = LONG_TIMEOUT )
+  public void testDeployRedeployUndeploy() throws InterruptedException, IOException {
+    LOG_ENTER();
+    long sleep = 200;
+    int numFilesInWar = 5;
+    String username = "guest";
+    String password = "guest-password";
+    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
+    long topoTimestampBefore, topoTimestampAfter;
+
+    File topoDir = new File( config.getGatewayTopologyDir() );
+    File deployDir = new File( config.getGatewayDeploymentDir() );
+    File warDir;
+
+    // Make sure deployment directory is empty.
+    assertThat( topoDir.listFiles().length, is( 0 ) );
+    assertThat( deployDir.listFiles().length, is( 0 ) );
+
+    File descriptor = writeTestTopology( "test-cluster", createTopology() );
+    long writeTime = System.currentTimeMillis();
+
+    warDir = waitForFiles( deployDir, "test-cluster.war\\.[0-9A-Fa-f]+", 1, 0, sleep );
+    for( File webInfDir : warDir.listFiles() ) {
+      waitForFiles( webInfDir, ".*", numFilesInWar, 0, sleep );
+    }
+    waitForAccess( serviceUrl, username, password, sleep );
+
+    // Wait to make sure a second has passed to ensure the the file timestamps are different.
+    waitForElapsed( writeTime, 1000, 100 );
+
+    // Redeploy and make sure the timestamp is updated.
+    topoTimestampBefore = descriptor.lastModified();
+    GatewayServer.redeployTopologies( null );
+    writeTime = System.currentTimeMillis();
+    topoTimestampAfter = descriptor.lastModified();
+    assertThat( topoTimestampAfter, greaterThan( topoTimestampBefore ) );
+
+    // Check to make sure there are two war directories with the same root.
+    warDir = waitForFiles( deployDir, "test-cluster.war\\.[0-9A-Fa-f]+", 2, 1, sleep );
+    for( File webInfDir : warDir.listFiles() ) {
+      waitForFiles( webInfDir, ".*", numFilesInWar, 0, sleep );
+    }
+    waitForAccess( serviceUrl, username, password, sleep );
+
+    // Wait to make sure a second has passed to ensure the the file timestamps are different.
+    waitForElapsed( writeTime, 1000, 100 );
+
+    // Redeploy and make sure the timestamp is updated.
+    topoTimestampBefore = descriptor.lastModified();
+    GatewayServer.redeployTopologies( "test-cluster" );
+    writeTime = System.currentTimeMillis();
+    topoTimestampAfter = descriptor.lastModified();
+    assertThat( topoTimestampAfter, greaterThan( topoTimestampBefore ) );
+
+    // Check to make sure there are two war directories with the same root.
+    warDir = waitForFiles( deployDir, "test-cluster.war\\.[0-9A-Fa-f]+", 3, 2, sleep );
+    for( File webInfDir : warDir.listFiles() ) {
+      waitForFiles( webInfDir, ".*", numFilesInWar, 0, sleep );
+    }
+    waitForAccess( serviceUrl, username, password, sleep );
+
+    // Delete the test topology.
+    assertThat( "Failed to delete the topology file.", descriptor.delete(), is( true ) );
+
+    // Wait to make sure a second has passed to ensure the the file timestamps are different.
+    waitForElapsed( writeTime, 1000, 100 );
+
+    waitForFiles( deployDir, ".*", 0, -1, sleep );
+
+    // Wait a bit more to make sure undeployment finished.
+    Thread.sleep( sleep );
+
+    // Make sure the test topology is not accessible.
+    given().auth().preemptive().basic( username, password )
+        .expect().statusCode( HttpStatus.SC_NOT_FOUND )
+        .when().get( serviceUrl );
+
+    // Make sure deployment directory is empty.
+    assertThat( topoDir.listFiles().length, is( 0 ) );
+    assertThat( deployDir.listFiles().length, is( 0 ) );
+    LOG_EXIT();
+  }
+
+  private void waitForElapsed( long from, long total, long sleep ) throws InterruptedException {
+    while( System.currentTimeMillis() - from < total ) {
+      Thread.sleep( sleep );
+    }
+  }
+
+  private File writeTestTopology( String name, XMLTag xml ) throws IOException {
+    // Create the test topology.
+    File tempFile = new File( config.getGatewayTopologyDir(), name + ".xml." + UUID.randomUUID() );
+    FileOutputStream stream = new FileOutputStream( tempFile );
+    xml.toStream( stream );
+    stream.close();
+    File descriptor = new File( config.getGatewayTopologyDir(), name + ".xml" );
+    tempFile.renameTo( descriptor );
+    return descriptor;
+  }
+
+  private File waitForFiles( File dir, String pattern, int count, int index, long sleep ) throws InterruptedException {
+    RegexDirFilter filter = new RegexDirFilter( pattern );
+    while( true ) {
+      File[] files = dir.listFiles( filter );
+      if( files.length == count ) {
+        return ( index < 0 ) ? null : files[ index ];
+      }
+      Thread.sleep( sleep );
+    }
+  }
+
+  private void waitForAccess( String url, String username, String password, long sleep ) throws InterruptedException {
+    while( true ) {
+      Response response = given()
+          .auth().preemptive().basic( username, password )
+          .when().get( url ).andReturn();
+      if( response.getStatusCode() == HttpStatus.SC_NOT_FOUND ) {
+        Thread.sleep( sleep );
+        continue;
+      }
+      assertThat( response.getContentType(), containsString( "text/plain" ) );
+      assertThat( response.getBody().asString(), is( "test-service-response" ) );
+      break;
+    }
+  }
+
+  private class RegexDirFilter implements FilenameFilter {
+
+    Pattern pattern;
+
+    RegexDirFilter( String regex ) {
+      pattern = Pattern.compile( regex );
+    }
+
+    @Override
+    public boolean accept( File dir, String name ) {
+      return pattern.matcher( name ).matches();
+    }
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapDynamicGroupFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapDynamicGroupFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapDynamicGroupFuncTest.java
index 5eafc0d..bcede37 100755
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapDynamicGroupFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapDynamicGroupFuncTest.java
@@ -67,272 +67,272 @@ import com.mycila.xmltool.XMLTag;
  */
 public class GatewayLdapDynamicGroupFuncTest {
 
-//  private static final long SHORT_TIMEOUT = 2000L;
-//  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
-//
-//  private static Class RESOURCE_BASE_CLASS = GatewayLdapDynamicGroupFuncTest.class;
-//  private static Logger LOG = LoggerFactory.getLogger( GatewayLdapDynamicGroupFuncTest.class );
-//
-//  public static Enumeration<Appender> appenders;
-//  public static GatewayConfig config;
-//  public static GatewayServer gateway;
-//  public static String gatewayUrl;
-//  public static String clusterUrl;
-//  public static SimpleLdapDirectoryServer ldap;
-//  public static TcpTransport ldapTransport;
-//
-//  @BeforeClass
-//  public static void setupSuite() throws Exception {
-//    LOG_ENTER();
-//    //appenders = NoOpAppender.setUp();
-//    int port = setupLdap();
-//    setupGateway(port);
-//    LOG_EXIT();
-//  }
-//
-//  @AfterClass
-//  public static void cleanupSuite() throws Exception {
-//    LOG_ENTER();
-//    gateway.stop();
-//    ldap.stop( true );
-//    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
-//    //NoOpAppender.tearDown( appenders );
-//    LOG_EXIT();
-//  }
-//
-//  public static int setupLdap() throws Exception {
-//    URL usersUrl = getResourceUrl( "users.ldif" );
-//    int port = findFreePort();
-//    ldapTransport = new TcpTransport( port );
-//    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
-//    ldap.start();
-//    LOG.info( "LDAP port = " + ldapTransport.getPort() );
-//    return port;
-//  }
-//
-//  public static void setupGateway(int ldapPort) throws IOException, Exception {
-//
-//    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
-//    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
-//    gatewayDir.mkdirs();
-//
-//    GatewayTestConfig testConfig = new GatewayTestConfig();
-//    config = testConfig;
-//    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
-//
-//    File topoDir = new File( testConfig.getGatewayTopologyDir() );
-//    topoDir.mkdirs();
-//
-//    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
-//    deployDir.mkdirs();
-//
-//    File descriptor = new File( topoDir, "testdg-cluster.xml" );
-//    FileOutputStream stream = new FileOutputStream( descriptor );
-//    createTopology(ldapPort).toStream( stream );
-//    stream.close();
-//
-//    DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//    Map<String,String> options = new HashMap<String,String>();
-//    options.put( "persist-master", "false" );
-//    options.put( "master", "password" );
-//    try {
-//      srvcs.init( testConfig, options );
-//    } catch ( ServiceLifecycleException e ) {
-//      e.printStackTrace(); // I18N not required.
-//    }
-//
-//    /*
-//    System.setProperty(GatewayConfig.GATEWAY_HOME_VAR, gatewayDir.getAbsolutePath());
-//    System.err.println("GH 10: " + System.getProperty(GatewayConfig.GATEWAY_HOME_VAR));
-//    ByteArrayOutputStream outContent = new ByteArrayOutputStream();
-//    System.setOut(new PrintStream(outContent));
-//    String[] argvals = {"create-alias", "ldcSystemPassword", "--value", "guest-password", "--master", "hadoop", "--cluster", "testdg-cluster"};
-//    KnoxCLI cli = new KnoxCLI();
-//    cli.setConf(new GatewayConfigImpl());
-//    cli.run(argvals);
-//
-//    outContent.reset();
-//    String[] args1 = {"list-alias", "--cluster", "testdg-cluster", "--master", "hadoop"};
-//    cli = new KnoxCLI();
-//    cli.run(args1);
-//    System.err.println("ALIAS LIST: " + outContent.toString());
-//
-//    AliasService as1 = cli.getGatewayServices().getService(GatewayServices.ALIAS_SERVICE);
-//    char[] passwordChars1 = as1.getPasswordFromAliasForCluster( "test-cluster", "ldcsystemPassword");
-//    System.err.println("ALIAS value1: " + new String(passwordChars1));
-//    */
-//
-//    gateway = GatewayServer.startGateway( testConfig, srvcs );
-//    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
-//
-//    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
-//
-//    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
-//    clusterUrl = gatewayUrl + "/testdg-cluster";
-//
-//    ///*
-//    GatewayServices services = GatewayServer.getGatewayServices();
-//    AliasService aliasService = (AliasService)services.getService(GatewayServices.ALIAS_SERVICE);
-//    aliasService.addAliasForCluster("testdg-cluster", "ldcSystemPassword", "guest-password");
-//
-//    char[] password1 = aliasService.getPasswordFromAliasForCluster( "testdg-cluster", "ldcSystemPassword");
-//    //System.err.println("SETUP password 10: " + ((password1 == null) ? "NULL" : new String(password1)));
-//
-//    descriptor = new File( topoDir, "testdg-cluster.xml" );
-//    stream = new FileOutputStream( descriptor );
-//    createTopology(ldapPort).toStream( stream );
-//    stream.close();
-//
-//    try {
-//      Thread.sleep(5000);
-//    } catch (Exception e) {
-//
-//    }
-//    //*/
-//  }
-//
-//  private static XMLTag createTopology(int ldapPort) {
-//    XMLTag xml = XMLDoc.newDocument( true )
-//        .addRoot( "topology" )
-//        .addTag( "gateway" )
-//
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "authentication" )
-//        .addTag( "name" ).addText( "ShiroProvider" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm" )
-//        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapGroupContextFactory" )
-//        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory" )
-//        .addTag( "value" ).addText( "$ldapGroupContextFactory" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
-//        .addTag( "value" ).addText( "simple" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
-//        .addTag( "value" ).addText( "ldap://localhost:"  + ldapPort)
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
-//        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.authorizationEnabled" )
-//        .addTag( "value" ).addText( "true" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemAuthenticationMechanism" )
-//        .addTag( "value" ).addText( "simple" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.searchBase" )
-//        .addTag( "value" ).addText( "ou=groups,dc=hadoop,dc=apache,dc=org" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.groupObjectClass" )
-//        .addTag( "value" ).addText( "groupofurls" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.memberAttribute" )
-//        .addTag( "value" ).addText( "memberurl" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.memberAttributeValueTemplate" )
-//        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemUsername" )
-//        .addTag( "value" ).addText( "uid=guest,ou=people,dc=hadoop,dc=apache,dc=org" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.clusterName" )
-//        .addTag( "value" ).addText( "testdg-cluster" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemPassword" )
-//        .addTag( "value" ).addText( "S{ALIAS=ldcSystemPassword}" )
-//        // .addTag( "value" ).addText( "guest-password" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "urls./**" )
-//        .addTag( "value" ).addText( "authcBasic" )
-//
-//        .gotoParent().gotoParent().addTag( "provider" )
-//        .addTag( "role" ).addText( "authorization" )
-//        .addTag( "name" ).addText( "AclsAuthz" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "test-service-role.acl" ) // FIXME[dilli]
-//        .addTag( "value" ).addText( "*;directors;*" )
-//
-//        .gotoParent().gotoParent().addTag( "provider" )
-//        .addTag( "role" ).addText( "identity-assertion" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "name" ).addText( "Default" ).gotoParent()
-//
-//        .gotoRoot()
-//        .addTag( "service" )
-//        .addTag( "role" ).addText( "test-service-role" )
-//        .gotoRoot();
-//         // System.out.println( "GATEWAY=" + xml.toString() );
-//    return xml;
-//  }
-//
-//  private static int findFreePort() throws IOException {
-//    ServerSocket socket = new ServerSocket(0);
-//    int port = socket.getLocalPort();
-//    socket.close();
-//    return port;
-//  }
-//
-//  public static InputStream getResourceStream( String resource ) throws IOException {
-//    return getResourceUrl( resource ).openStream();
-//  }
-//
-//  public static URL getResourceUrl( String resource ) {
-//    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
-//    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
-//    return url;
-//  }
-//
-//  public static String getResourceName( String resource ) {
-//    return getResourceBaseName() + resource;
-//  }
-//
-//  public static String getResourceBaseName() {
-//    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
-//  }
-//
-//  // @Test
-//  public void waitForManualTesting() throws IOException {
-//    System.in.read();
-//  }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testGroupMember() throws ClassNotFoundException, Exception {
-//    LOG_ENTER();
-//    String username = "bob";
-//    String password = "bob-password";
-//    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
-//    given()
-//        //.log().all()
-//        .auth().preemptive().basic( username, password )
-//        .expect()
-//        //.log().all()
-//        .statusCode( HttpStatus.SC_OK )
-//        .contentType( "text/plain" )
-//        .body( is( "test-service-response" ) )
-//        .when().get( serviceUrl );
-//    LOG_EXIT();
-//  }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testNonGroupMember() throws ClassNotFoundException {
-//    LOG_ENTER();
-//    String username = "guest";
-//    String password = "guest-password";
-//    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
-//    given()
-//        //.log().all()
-//        .auth().preemptive().basic( username, password )
-//        .expect()
-//        //.log().all()
-//        .statusCode( HttpStatus.SC_FORBIDDEN )
-//        .when().get( serviceUrl );
-//    LOG_EXIT();
-//  }
+  private static final long SHORT_TIMEOUT = 2000L;
+  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
+
+  private static Class RESOURCE_BASE_CLASS = GatewayLdapDynamicGroupFuncTest.class;
+  private static Logger LOG = LoggerFactory.getLogger( GatewayLdapDynamicGroupFuncTest.class );
+
+  public static Enumeration<Appender> appenders;
+  public static GatewayConfig config;
+  public static GatewayServer gateway;
+  public static String gatewayUrl;
+  public static String clusterUrl;
+  public static SimpleLdapDirectoryServer ldap;
+  public static TcpTransport ldapTransport;
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    LOG_ENTER();
+    //appenders = NoOpAppender.setUp();
+    int port = setupLdap();
+    setupGateway(port);
+    LOG_EXIT();
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    LOG_ENTER();
+    gateway.stop();
+    ldap.stop( true );
+    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
+    //NoOpAppender.tearDown( appenders );
+    LOG_EXIT();
+  }
+
+  public static int setupLdap() throws Exception {
+    URL usersUrl = getResourceUrl( "users.ldif" );
+    int port = findFreePort();
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
+    ldap.start();
+    LOG.info( "LDAP port = " + ldapTransport.getPort() );
+    return port;
+  }
+
+  public static void setupGateway(int ldapPort) throws IOException, Exception {
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+    gatewayDir.mkdirs();
+
+    GatewayTestConfig testConfig = new GatewayTestConfig();
+    config = testConfig;
+    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File topoDir = new File( testConfig.getGatewayTopologyDir() );
+    topoDir.mkdirs();
+
+    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+    File descriptor = new File( topoDir, "testdg-cluster.xml" );
+    FileOutputStream stream = new FileOutputStream( descriptor );
+    createTopology(ldapPort).toStream( stream );
+    stream.close();
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put( "persist-master", "false" );
+    options.put( "master", "password" );
+    try {
+      srvcs.init( testConfig, options );
+    } catch ( ServiceLifecycleException e ) {
+      e.printStackTrace(); // I18N not required.
+    }
+
+    /*
+    System.setProperty(GatewayConfig.GATEWAY_HOME_VAR, gatewayDir.getAbsolutePath());
+    System.err.println("GH 10: " + System.getProperty(GatewayConfig.GATEWAY_HOME_VAR));
+    ByteArrayOutputStream outContent = new ByteArrayOutputStream();
+    System.setOut(new PrintStream(outContent));
+    String[] argvals = {"create-alias", "ldcSystemPassword", "--value", "guest-password", "--master", "hadoop", "--cluster", "testdg-cluster"};
+    KnoxCLI cli = new KnoxCLI();
+    cli.setConf(new GatewayConfigImpl());
+    cli.run(argvals);
+
+    outContent.reset();
+    String[] args1 = {"list-alias", "--cluster", "testdg-cluster", "--master", "hadoop"};
+    cli = new KnoxCLI();
+    cli.run(args1);
+    System.err.println("ALIAS LIST: " + outContent.toString());
+
+    AliasService as1 = cli.getGatewayServices().getService(GatewayServices.ALIAS_SERVICE);
+    char[] passwordChars1 = as1.getPasswordFromAliasForCluster( "test-cluster", "ldcsystemPassword");
+    System.err.println("ALIAS value1: " + new String(passwordChars1));
+    */
+
+    gateway = GatewayServer.startGateway( testConfig, srvcs );
+    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
+
+    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
+
+    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
+    clusterUrl = gatewayUrl + "/testdg-cluster";
+
+    ///*
+    GatewayServices services = GatewayServer.getGatewayServices();
+    AliasService aliasService = (AliasService)services.getService(GatewayServices.ALIAS_SERVICE);
+    aliasService.addAliasForCluster("testdg-cluster", "ldcSystemPassword", "guest-password");
+
+    char[] password1 = aliasService.getPasswordFromAliasForCluster( "testdg-cluster", "ldcSystemPassword");
+    //System.err.println("SETUP password 10: " + ((password1 == null) ? "NULL" : new String(password1)));
+
+    descriptor = new File( topoDir, "testdg-cluster.xml" );
+    stream = new FileOutputStream( descriptor );
+    createTopology(ldapPort).toStream( stream );
+    stream.close();
+
+    try {
+      Thread.sleep(5000);
+    } catch (Exception e) {
+
+    }
+    //*/
+  }
+
+  private static XMLTag createTopology(int ldapPort) {
+    XMLTag xml = XMLDoc.newDocument( true )
+        .addRoot( "topology" )
+        .addTag( "gateway" )
+
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "authentication" )
+        .addTag( "name" ).addText( "ShiroProvider" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm" )
+        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapGroupContextFactory" )
+        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory" )
+        .addTag( "value" ).addText( "$ldapGroupContextFactory" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
+        .addTag( "value" ).addText( "simple" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
+        .addTag( "value" ).addText( "ldap://localhost:"  + ldapPort)
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
+        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.authorizationEnabled" )
+        .addTag( "value" ).addText( "true" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemAuthenticationMechanism" )
+        .addTag( "value" ).addText( "simple" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.searchBase" )
+        .addTag( "value" ).addText( "ou=groups,dc=hadoop,dc=apache,dc=org" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.groupObjectClass" )
+        .addTag( "value" ).addText( "groupofurls" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.memberAttribute" )
+        .addTag( "value" ).addText( "memberurl" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.memberAttributeValueTemplate" )
+        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemUsername" )
+        .addTag( "value" ).addText( "uid=guest,ou=people,dc=hadoop,dc=apache,dc=org" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.clusterName" )
+        .addTag( "value" ).addText( "testdg-cluster" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemPassword" )
+        .addTag( "value" ).addText( "S{ALIAS=ldcSystemPassword}" )
+        // .addTag( "value" ).addText( "guest-password" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "urls./**" )
+        .addTag( "value" ).addText( "authcBasic" )
+
+        .gotoParent().gotoParent().addTag( "provider" )
+        .addTag( "role" ).addText( "authorization" )
+        .addTag( "name" ).addText( "AclsAuthz" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "param" )
+        .addTag( "name" ).addText( "test-service-role.acl" ) // FIXME[dilli]
+        .addTag( "value" ).addText( "*;directors;*" )
+
+        .gotoParent().gotoParent().addTag( "provider" )
+        .addTag( "role" ).addText( "identity-assertion" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "name" ).addText( "Default" ).gotoParent()
+
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "test-service-role" )
+        .gotoRoot();
+         // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  private static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+  public static InputStream getResourceStream( String resource ) throws IOException {
+    return getResourceUrl( resource ).openStream();
+  }
+
+  public static URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
+    return url;
+  }
+
+  public static String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public static String getResourceBaseName() {
+    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
+  }
+
+  // @Test
+  public void waitForManualTesting() throws IOException {
+    System.in.read();
+  }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testGroupMember() throws ClassNotFoundException, Exception {
+    LOG_ENTER();
+    String username = "bob";
+    String password = "bob-password";
+    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
+    given()
+        //.log().all()
+        .auth().preemptive().basic( username, password )
+        .expect()
+        //.log().all()
+        .statusCode( HttpStatus.SC_OK )
+        .contentType( "text/plain" )
+        .body( is( "test-service-response" ) )
+        .when().get( serviceUrl );
+    LOG_EXIT();
+  }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testNonGroupMember() throws ClassNotFoundException {
+    LOG_ENTER();
+    String username = "guest";
+    String password = "guest-password";
+    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
+    given()
+        //.log().all()
+        .auth().preemptive().basic( username, password )
+        .expect()
+        //.log().all()
+        .statusCode( HttpStatus.SC_FORBIDDEN )
+        .when().get( serviceUrl );
+    LOG_EXIT();
+  }
   
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest.java
index 6c60931..8719473 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest.java
@@ -67,273 +67,273 @@ import com.mycila.xmltool.XMLTag;
  */
 public class GatewayLdapGroupFuncTest {
 
-//  private static final long SHORT_TIMEOUT = 2000L;
-//  private static final long MEDIUM_TIMEOUT = 5 * 1000L;
-//
-//  private static Class RESOURCE_BASE_CLASS = GatewayLdapGroupFuncTest.class;
-//  private static Logger LOG = LoggerFactory.getLogger( GatewayLdapGroupFuncTest.class );
-//
-//  public static Enumeration<Appender> appenders;
-//  public static GatewayConfig config;
-//  public static GatewayServer gateway;
-//  public static String gatewayUrl;
-//  public static String clusterUrl;
-//  public static SimpleLdapDirectoryServer ldap;
-//  public static TcpTransport ldapTransport;
-//
-//  @BeforeClass
-//  public static void setupSuite() throws Exception {
-//    LOG_ENTER();
-//    //appenders = NoOpAppender.setUp();
-//    int port = setupLdap();
-//    setupGateway(port);
-//    LOG_EXIT();
-//  }
-//
-//  @AfterClass
-//  public static void cleanupSuite() throws Exception {
-//    LOG_ENTER();
-//    gateway.stop();
-//    ldap.stop( true );
-//    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
-//    //NoOpAppender.tearDown( appenders );
-//    LOG_EXIT();
-//  }
-//
-//  public static int setupLdap() throws Exception {
-//    URL usersUrl = getResourceUrl( "users.ldif" );
-//    int port = findFreePort();
-//    ldapTransport = new TcpTransport( port );
-//    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
-//    ldap.start();
-//    LOG.info( "LDAP port = " + ldapTransport.getPort() );
-//    return port;
-//  }
-//
-//  public static void setupGateway(int ldapPort) throws Exception {
-//
-//    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
-//    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
-//    gatewayDir.mkdirs();
-//
-//    GatewayTestConfig testConfig = new GatewayTestConfig();
-//    config = testConfig;
-//    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
-//
-//    File topoDir = new File( testConfig.getGatewayTopologyDir() );
-//    topoDir.mkdirs();
-//
-//    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
-//    deployDir.mkdirs();
-//
-//    File descriptor = new File( topoDir, "test-cluster.xml" );
-//    FileOutputStream stream = new FileOutputStream( descriptor );
-//    createTopology(ldapPort).toStream( stream );
-//    stream.close();
-//
-//    DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//    Map<String,String> options = new HashMap<String,String>();
-//    options.put( "persist-master", "true" );
-//    options.put( "master", "hadoop" );
-//
-//    try {
-//      srvcs.init( testConfig, options );
-//    } catch ( ServiceLifecycleException e ) {
-//      e.printStackTrace(); // I18N not required.
-//    }
-//
-//    /*
-//    System.setProperty(GatewayConfig.GATEWAY_HOME_VAR, gatewayDir.getAbsolutePath());
-//    System.err.println("GH 10: " + System.getProperty(GatewayConfig.GATEWAY_HOME_VAR));
-//    ByteArrayOutputStream outContent = new ByteArrayOutputStream();
-//    System.setOut(new PrintStream(outContent));
-//    String[] argvals = {"create-alias", "ldcSystemPassword", "--value", "guest-password", "--master", "hadoop", "--cluster", "test-cluster"};
-//    KnoxCLI cli = new KnoxCLI();
-//    cli.setConf(new GatewayConfigImpl());
-//    cli.run(argvals);
-//
-//    outContent.reset();
-//    String[] args1 = {"list-alias", "--cluster", "test-cluster", "--master", "hadoop"};
-//    cli = new KnoxCLI();
-//    cli.run(args1);
-//    System.err.println("ALIAS LIST: " + outContent.toString());
-//
-//    AliasService as1 = cli.getGatewayServices().getService(GatewayServices.ALIAS_SERVICE);
-//    char[] passwordChars1 = as1.getPasswordFromAliasForCluster( "test-cluster", "ldcsystemPassword");
-//    System.err.println("ALIAS value1: " + new String(passwordChars1));
-//    */
-//
-//    gateway = GatewayServer.startGateway( testConfig, srvcs );
-//    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
-//
-//    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
-//
-//    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
-//    clusterUrl = gatewayUrl + "/test-cluster";
-//
-//    ///*
-//    GatewayServices services = GatewayServer.getGatewayServices();
-//    AliasService aliasService = (AliasService)services.getService(GatewayServices.ALIAS_SERVICE);
-//    aliasService.addAliasForCluster("test-cluster", "ldcSystemPassword", "guest-password");
-//
-//    char[] password1 = aliasService.getPasswordFromAliasForCluster( "test-cluster", "ldcSystemPassword");
-//    //System.err.println("SETUP password 10: " + ((password1 == null) ? "NULL" : new String(password1)));
-//
-//    descriptor = new File( topoDir, "test-cluster.xml" );
-//    stream = new FileOutputStream( descriptor );
-//    createTopology(ldapPort).toStream( stream );
-//    stream.close();
-//
-//    try {
-//      Thread.sleep(5000);
-//    } catch (Exception e) {
-//
-//    }
-//    //*/
-//  }
-//
-//  private static XMLTag createTopology(int ldapPort) {
-//    XMLTag xml = XMLDoc.newDocument( true )
-//        .addRoot( "topology" )
-//        .addTag( "gateway" )
-//
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "authentication" )
-//        .addTag( "name" ).addText( "ShiroProvider" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm" )
-//        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapGroupContextFactory" )
-//        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory" )
-//        .addTag( "value" ).addText( "$ldapGroupContextFactory" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
-//        .addTag( "value" ).addText( "simple" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
-//        .addTag( "value" ).addText( "ldap://localhost:"  + ldapPort)
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
-//        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.authorizationEnabled" )
-//        .addTag( "value" ).addText( "true" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemAuthenticationMechanism" )
-//        .addTag( "value" ).addText( "simple" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.searchBase" )
-//        .addTag( "value" ).addText( "ou=groups,dc=hadoop,dc=apache,dc=org" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.groupObjectClass" )
-//        .addTag( "value" ).addText( "groupofnames" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.memberAttribute" )
-//        .addTag( "value" ).addText( "member" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.memberAttributeValueTemplate" )
-//        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.clusterName" )
-//        .addTag( "value" ).addText( "test-cluster" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemUsername" )
-//        .addTag( "value" ).addText( "uid=guest,ou=people,dc=hadoop,dc=apache,dc=org" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemPassword" )
-//        .addTag( "value" ).addText( "S{ALIAS=ldcSystemPassword}" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "urls./**" )
-//        .addTag( "value" ).addText( "authcBasic" )
-//
-//        .gotoParent().gotoParent().addTag( "provider" )
-//        .addTag( "role" ).addText( "authorization" )
-//        .addTag( "name" ).addText( "AclsAuthz" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "test-service-role.acl" ) // FIXME[dilli]
-//        .addTag( "value" ).addText( "*;analyst;*" )
-//
-//        .gotoParent().gotoParent().addTag( "provider" )
-//        .addTag( "role" ).addText( "identity-assertion" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "name" ).addText( "Default" ).gotoParent()
-//
-//        .gotoRoot()
-//        .addTag( "service" )
-//        .addTag( "role" ).addText( "test-service-role" )
-//        .gotoRoot();
-//         // System.out.println( "GATEWAY=" + xml.toString() );
-//    return xml;
-//  }
-//
-//  private static int findFreePort() throws IOException {
-//    ServerSocket socket = new ServerSocket(0);
-//    int port = socket.getLocalPort();
-//    socket.close();
-//    return port;
-//  }
-//
-//  public static InputStream getResourceStream( String resource ) throws IOException {
-//    return getResourceUrl( resource ).openStream();
-//  }
-//
-//  public static URL getResourceUrl( String resource ) {
-//    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
-//    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
-//    return url;
-//  }
-//
-//  public static String getResourceName( String resource ) {
-//    return getResourceBaseName() + resource;
-//  }
-//
-//  public static String getResourceBaseName() {
-//    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
-//  }
-//
-//  @Ignore
-//  // @Test
-//  public void waitForManualTesting() throws IOException {
-//    System.in.read();
-//  }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testGroupMember() throws ClassNotFoundException, Exception {
-//    LOG_ENTER();
-//    String username = "sam";
-//    String password = "sam-password";
-//    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
-//    given()
-//        //.log().all()
-//        .auth().preemptive().basic( username, password )
-//        .expect()
-//        //.log().all()
-//        .statusCode( HttpStatus.SC_OK )
-//        .contentType( "text/plain" )
-//        .body( is( "test-service-response" ) )
-//        .when().get( serviceUrl );
-//    LOG_EXIT();
-//  }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testNonGroupMember() throws ClassNotFoundException {
-//    LOG_ENTER();
-//    String username = "guest";
-//    String password = "guest-password";
-//    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
-//    given()
-//        //.log().all()
-//        .auth().preemptive().basic( username, password )
-//        .expect()
-//        //.log().all()
-//        .statusCode( HttpStatus.SC_FORBIDDEN )
-//        .when().get( serviceUrl );
-//    LOG_EXIT();
-//  }
+  private static final long SHORT_TIMEOUT = 2000L;
+  private static final long MEDIUM_TIMEOUT = 5 * 1000L;
+
+  private static Class RESOURCE_BASE_CLASS = GatewayLdapGroupFuncTest.class;
+  private static Logger LOG = LoggerFactory.getLogger( GatewayLdapGroupFuncTest.class );
+
+  public static Enumeration<Appender> appenders;
+  public static GatewayConfig config;
+  public static GatewayServer gateway;
+  public static String gatewayUrl;
+  public static String clusterUrl;
+  public static SimpleLdapDirectoryServer ldap;
+  public static TcpTransport ldapTransport;
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    LOG_ENTER();
+    //appenders = NoOpAppender.setUp();
+    int port = setupLdap();
+    setupGateway(port);
+    LOG_EXIT();
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    LOG_ENTER();
+    gateway.stop();
+    ldap.stop( true );
+    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
+    //NoOpAppender.tearDown( appenders );
+    LOG_EXIT();
+  }
+
+  public static int setupLdap() throws Exception {
+    URL usersUrl = getResourceUrl( "users.ldif" );
+    int port = findFreePort();
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
+    ldap.start();
+    LOG.info( "LDAP port = " + ldapTransport.getPort() );
+    return port;
+  }
+
+  public static void setupGateway(int ldapPort) throws Exception {
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+    gatewayDir.mkdirs();
+
+    GatewayTestConfig testConfig = new GatewayTestConfig();
+    config = testConfig;
+    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File topoDir = new File( testConfig.getGatewayTopologyDir() );
+    topoDir.mkdirs();
+
+    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+    File descriptor = new File( topoDir, "test-cluster.xml" );
+    FileOutputStream stream = new FileOutputStream( descriptor );
+    createTopology(ldapPort).toStream( stream );
+    stream.close();
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put( "persist-master", "true" );
+    options.put( "master", "hadoop" );
+
+    try {
+      srvcs.init( testConfig, options );
+    } catch ( ServiceLifecycleException e ) {
+      e.printStackTrace(); // I18N not required.
+    }
+
+    /*
+    System.setProperty(GatewayConfig.GATEWAY_HOME_VAR, gatewayDir.getAbsolutePath());
+    System.err.println("GH 10: " + System.getProperty(GatewayConfig.GATEWAY_HOME_VAR));
+    ByteArrayOutputStream outContent = new ByteArrayOutputStream();
+    System.setOut(new PrintStream(outContent));
+    String[] argvals = {"create-alias", "ldcSystemPassword", "--value", "guest-password", "--master", "hadoop", "--cluster", "test-cluster"};
+    KnoxCLI cli = new KnoxCLI();
+    cli.setConf(new GatewayConfigImpl());
+    cli.run(argvals);
+
+    outContent.reset();
+    String[] args1 = {"list-alias", "--cluster", "test-cluster", "--master", "hadoop"};
+    cli = new KnoxCLI();
+    cli.run(args1);
+    System.err.println("ALIAS LIST: " + outContent.toString());
+
+    AliasService as1 = cli.getGatewayServices().getService(GatewayServices.ALIAS_SERVICE);
+    char[] passwordChars1 = as1.getPasswordFromAliasForCluster( "test-cluster", "ldcsystemPassword");
+    System.err.println("ALIAS value1: " + new String(passwordChars1));
+    */
+
+    gateway = GatewayServer.startGateway( testConfig, srvcs );
+    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
+
+    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
+
+    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
+    clusterUrl = gatewayUrl + "/test-cluster";
+
+    ///*
+    GatewayServices services = GatewayServer.getGatewayServices();
+    AliasService aliasService = (AliasService)services.getService(GatewayServices.ALIAS_SERVICE);
+    aliasService.addAliasForCluster("test-cluster", "ldcSystemPassword", "guest-password");
+
+    char[] password1 = aliasService.getPasswordFromAliasForCluster( "test-cluster", "ldcSystemPassword");
+    //System.err.println("SETUP password 10: " + ((password1 == null) ? "NULL" : new String(password1)));
+
+    descriptor = new File( topoDir, "test-cluster.xml" );
+    stream = new FileOutputStream( descriptor );
+    createTopology(ldapPort).toStream( stream );
+    stream.close();
+
+    try {
+      Thread.sleep(5000);
+    } catch (Exception e) {
+
+    }
+    //*/
+  }
+
+  private static XMLTag createTopology(int ldapPort) {
+    XMLTag xml = XMLDoc.newDocument( true )
+        .addRoot( "topology" )
+        .addTag( "gateway" )
+
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "authentication" )
+        .addTag( "name" ).addText( "ShiroProvider" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm" )
+        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapGroupContextFactory" )
+        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory" )
+        .addTag( "value" ).addText( "$ldapGroupContextFactory" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
+        .addTag( "value" ).addText( "simple" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
+        .addTag( "value" ).addText( "ldap://localhost:"  + ldapPort)
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
+        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.authorizationEnabled" )
+        .addTag( "value" ).addText( "true" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemAuthenticationMechanism" )
+        .addTag( "value" ).addText( "simple" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.searchBase" )
+        .addTag( "value" ).addText( "ou=groups,dc=hadoop,dc=apache,dc=org" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.groupObjectClass" )
+        .addTag( "value" ).addText( "groupofnames" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.memberAttribute" )
+        .addTag( "value" ).addText( "member" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.memberAttributeValueTemplate" )
+        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.clusterName" )
+        .addTag( "value" ).addText( "test-cluster" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemUsername" )
+        .addTag( "value" ).addText( "uid=guest,ou=people,dc=hadoop,dc=apache,dc=org" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemPassword" )
+        .addTag( "value" ).addText( "S{ALIAS=ldcSystemPassword}" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "urls./**" )
+        .addTag( "value" ).addText( "authcBasic" )
+
+        .gotoParent().gotoParent().addTag( "provider" )
+        .addTag( "role" ).addText( "authorization" )
+        .addTag( "name" ).addText( "AclsAuthz" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "param" )
+        .addTag( "name" ).addText( "test-service-role.acl" ) // FIXME[dilli]
+        .addTag( "value" ).addText( "*;analyst;*" )
+
+        .gotoParent().gotoParent().addTag( "provider" )
+        .addTag( "role" ).addText( "identity-assertion" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "name" ).addText( "Default" ).gotoParent()
+
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "test-service-role" )
+        .gotoRoot();
+         // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  private static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+  public static InputStream getResourceStream( String resource ) throws IOException {
+    return getResourceUrl( resource ).openStream();
+  }
+
+  public static URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
+    return url;
+  }
+
+  public static String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public static String getResourceBaseName() {
+    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
+  }
+
+  @Ignore
+  // @Test
+  public void waitForManualTesting() throws IOException {
+    System.in.read();
+  }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testGroupMember() throws ClassNotFoundException, Exception {
+    LOG_ENTER();
+    String username = "sam";
+    String password = "sam-password";
+    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
+    given()
+        //.log().all()
+        .auth().preemptive().basic( username, password )
+        .expect()
+        //.log().all()
+        .statusCode( HttpStatus.SC_OK )
+        .contentType( "text/plain" )
+        .body( is( "test-service-response" ) )
+        .when().get( serviceUrl );
+    LOG_EXIT();
+  }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testNonGroupMember() throws ClassNotFoundException {
+    LOG_ENTER();
+    String username = "guest";
+    String password = "guest-password";
+    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
+    given()
+        //.log().all()
+        .auth().preemptive().basic( username, password )
+        .expect()
+        //.log().all()
+        .statusCode( HttpStatus.SC_FORBIDDEN )
+        .when().get( serviceUrl );
+    LOG_EXIT();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLocalServiceFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLocalServiceFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLocalServiceFuncTest.java
index 4475732..aa93422 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLocalServiceFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLocalServiceFuncTest.java
@@ -56,170 +56,170 @@ import static org.junit.Assert.assertThat;
 
 public class GatewayLocalServiceFuncTest {
 
-//  private static final long SHORT_TIMEOUT = 2000L;
-//  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
-//
-//  private static Class RESOURCE_BASE_CLASS = GatewayLocalServiceFuncTest.class;
-//  private static Logger LOG = LoggerFactory.getLogger( GatewayFuncTestDriver.class );
-//
-//  public static Enumeration<Appender> appenders;
-//  public static GatewayConfig config;
-//  public static GatewayServer gateway;
-//  public static String gatewayUrl;
-//  public static String clusterUrl;
-//  public static SimpleLdapDirectoryServer ldap;
-//  public static TcpTransport ldapTransport;
-//
-//  @BeforeClass
-//  public static void setupSuite() throws Exception {
-//    LOG_ENTER();
-//    appenders = NoOpAppender.setUp();
-//    setupLdap();
-//    setupGateway();
-//    LOG_EXIT();
-//  }
-//
-//  @AfterClass
-//  public static void cleanupSuite() throws Exception {
-//    LOG_ENTER();
-//    gateway.stop();
-//    ldap.stop( true );
-//    FileUtils.deleteQuietly( new File( config.getGatewayConfDir() ) );
-//    FileUtils.deleteQuietly( new File( config.getGatewayDataDir() ) );
-//    NoOpAppender.tearDown( appenders );
-//    LOG_EXIT();
-//  }
-//
-//  public static void setupLdap() throws Exception {
-//    URL usersUrl = getResourceUrl( "users.ldif" );
-//    int port = findFreePort();
-//    ldapTransport = new TcpTransport( port );
-//    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
-//    ldap.start();
-//    LOG.info( "LDAP port = " + ldapTransport.getPort() );
-//  }
-//
-//  public static void setupGateway() throws Exception {
-//
-//    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
-//    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
-//    gatewayDir.mkdirs();
-//
-//    GatewayTestConfig testConfig = new GatewayTestConfig();
-//    config = testConfig;
-//    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
-//
-//    File topoDir = new File( testConfig.getGatewayTopologyDir() );
-//    topoDir.mkdirs();
-//
-//    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
-//    deployDir.mkdirs();
-//
-//    File descriptor = new File( topoDir, "cluster.xml" );
-//    FileOutputStream stream = new FileOutputStream( descriptor );
-//    createTopology().toStream( stream );
-//    stream.close();
-//
-//    DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//    Map<String,String> options = new HashMap<String,String>();
-//    options.put( "persist-master", "false" );
-//    options.put( "master", "password" );
-//    try {
-//      srvcs.init( testConfig, options );
-//    } catch ( ServiceLifecycleException e ) {
-//      e.printStackTrace(); // I18N not required.
-//    }
-//    gateway = GatewayServer.startGateway( testConfig, srvcs );
-//    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
-//
-//    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
-//
-//    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
-//    clusterUrl = gatewayUrl + "/cluster";
-//  }
-//
-//  private static XMLTag createTopology() {
-//    XMLTag xml = XMLDoc.newDocument( true )
-//        .addRoot( "topology" )
-//        .addTag( "gateway" )
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "authentication" )
-//        .addTag( "name" ).addText( "ShiroProvider" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm" )
-//        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
-//        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
-//        .addTag( "value" ).addText( "ldap://localhost:" + ldapTransport.getPort() ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
-//        .addTag( "value" ).addText( "simple" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "urls./**" )
-//        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "identity-assertion" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "name" ).addText( "Default" ).gotoParent()
-//        .addTag( "provider" )
-//        .gotoRoot()
-//        .addTag( "service" )
-//        .addTag( "role" ).addText( "test-jersey-service-role" )
-//        .gotoRoot();
-//    // System.out.println( "GATEWAY=" + xml.toString() );
-//    return xml;
-//  }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testJerseyService() throws ClassNotFoundException {
-//    LOG_ENTER();
-//    assertThat( ClassLoader.getSystemClassLoader().loadClass( "org.glassfish.jersey.servlet.ServletContainer" ), notNullValue() );
-//    assertThat( ClassLoader.getSystemClassLoader().loadClass( "org.apache.hadoop.gateway.jersey.JerseyDispatchDeploymentContributor" ), notNullValue() );
-//    assertThat( ClassLoader.getSystemClassLoader().loadClass( "org.apache.hadoop.gateway.jersey.JerseyServiceDeploymentContributorBase" ), notNullValue() );
-//    assertThat( ClassLoader.getSystemClassLoader().loadClass( "org.apache.hadoop.gateway.TestJerseyService" ), notNullValue() );
-//
-//    String username = "guest";
-//    String password = "guest-password";
-//    String serviceUrl =  clusterUrl + "/test-jersey-service/test-jersey-resource-path";
-//    given()
-//        //.log().all()
-//        .auth().preemptive().basic( username, password )
-//        .expect()
-//        //.log().all()
-//        .statusCode( HttpStatus.SC_OK )
-//        .contentType( "text/plain" )
-//        .body( is( "test-jersey-resource-response" ) )
-//        .when().get( serviceUrl );
-//    LOG_EXIT();
-//  }
-//
-//  private static int findFreePort() throws IOException {
-//    ServerSocket socket = new ServerSocket(0);
-//    int port = socket.getLocalPort();
-//    socket.close();
-//    return port;
-//  }
-//
-//  public static InputStream getResourceStream( String resource ) throws IOException {
-//    return getResourceUrl( resource ).openStream();
-//  }
-//
-//  public static URL getResourceUrl( String resource ) {
-//    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
-//    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
-//    return url;
-//  }
-//
-//  public static String getResourceName( String resource ) {
-//    return getResourceBaseName() + resource;
-//  }
-//
-//  public static String getResourceBaseName() {
-//    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
-//  }
+  private static final long SHORT_TIMEOUT = 2000L;
+  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
+
+  private static Class RESOURCE_BASE_CLASS = GatewayLocalServiceFuncTest.class;
+  private static Logger LOG = LoggerFactory.getLogger( GatewayFuncTestDriver.class );
+
+  public static Enumeration<Appender> appenders;
+  public static GatewayConfig config;
+  public static GatewayServer gateway;
+  public static String gatewayUrl;
+  public static String clusterUrl;
+  public static SimpleLdapDirectoryServer ldap;
+  public static TcpTransport ldapTransport;
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    LOG_ENTER();
+    appenders = NoOpAppender.setUp();
+    setupLdap();
+    setupGateway();
+    LOG_EXIT();
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    LOG_ENTER();
+    gateway.stop();
+    ldap.stop( true );
+    FileUtils.deleteQuietly( new File( config.getGatewayConfDir() ) );
+    FileUtils.deleteQuietly( new File( config.getGatewayDataDir() ) );
+    NoOpAppender.tearDown( appenders );
+    LOG_EXIT();
+  }
+
+  public static void setupLdap() throws Exception {
+    URL usersUrl = getResourceUrl( "users.ldif" );
+    int port = findFreePort();
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
+    ldap.start();
+    LOG.info( "LDAP port = " + ldapTransport.getPort() );
+  }
+
+  public static void setupGateway() throws Exception {
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+    gatewayDir.mkdirs();
+
+    GatewayTestConfig testConfig = new GatewayTestConfig();
+    config = testConfig;
+    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File topoDir = new File( testConfig.getGatewayTopologyDir() );
+    topoDir.mkdirs();
+
+    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+    File descriptor = new File( topoDir, "cluster.xml" );
+    FileOutputStream stream = new FileOutputStream( descriptor );
+    createTopology().toStream( stream );
+    stream.close();
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put( "persist-master", "false" );
+    options.put( "master", "password" );
+    try {
+      srvcs.init( testConfig, options );
+    } catch ( ServiceLifecycleException e ) {
+      e.printStackTrace(); // I18N not required.
+    }
+    gateway = GatewayServer.startGateway( testConfig, srvcs );
+    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
+
+    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
+
+    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
+    clusterUrl = gatewayUrl + "/cluster";
+  }
+
+  private static XMLTag createTopology() {
+    XMLTag xml = XMLDoc.newDocument( true )
+        .addRoot( "topology" )
+        .addTag( "gateway" )
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "authentication" )
+        .addTag( "name" ).addText( "ShiroProvider" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm" )
+        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
+        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
+        .addTag( "value" ).addText( "ldap://localhost:" + ldapTransport.getPort() ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
+        .addTag( "value" ).addText( "simple" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "urls./**" )
+        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "identity-assertion" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "name" ).addText( "Default" ).gotoParent()
+        .addTag( "provider" )
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "test-jersey-service-role" )
+        .gotoRoot();
+    // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testJerseyService() throws ClassNotFoundException {
+    LOG_ENTER();
+    assertThat( ClassLoader.getSystemClassLoader().loadClass( "org.glassfish.jersey.servlet.ServletContainer" ), notNullValue() );
+    assertThat( ClassLoader.getSystemClassLoader().loadClass( "org.apache.hadoop.gateway.jersey.JerseyDispatchDeploymentContributor" ), notNullValue() );
+    assertThat( ClassLoader.getSystemClassLoader().loadClass( "org.apache.hadoop.gateway.jersey.JerseyServiceDeploymentContributorBase" ), notNullValue() );
+    assertThat( ClassLoader.getSystemClassLoader().loadClass( "org.apache.hadoop.gateway.TestJerseyService" ), notNullValue() );
+
+    String username = "guest";
+    String password = "guest-password";
+    String serviceUrl =  clusterUrl + "/test-jersey-service/test-jersey-resource-path";
+    given()
+        //.log().all()
+        .auth().preemptive().basic( username, password )
+        .expect()
+        //.log().all()
+        .statusCode( HttpStatus.SC_OK )
+        .contentType( "text/plain" )
+        .body( is( "test-jersey-resource-response" ) )
+        .when().get( serviceUrl );
+    LOG_EXIT();
+  }
+
+  private static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+  public static InputStream getResourceStream( String resource ) throws IOException {
+    return getResourceUrl( resource ).openStream();
+  }
+
+  public static URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
+    return url;
+  }
+
+  public static String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public static String getResourceBaseName() {
+    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
+  }
 
 }


[2/5] knox git commit: KNOX-620: Jenkins Knox-master-verify failing since #725 due to JDK version issues. Renabled remainder of func tests.

Posted by km...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliSysBindTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliSysBindTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliSysBindTest.java
index 81223ac..3676fdc 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliSysBindTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliSysBindTest.java
@@ -52,280 +52,280 @@ import static org.junit.Assert.assertThat;
 
 public class KnoxCliSysBindTest {
 
-//  private static final long SHORT_TIMEOUT = 1000L;
-//  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
-//
-//  private static Class RESOURCE_BASE_CLASS = KnoxCliSysBindTest.class;
-//  private static Logger LOG = LoggerFactory.getLogger( KnoxCliSysBindTest.class );
-//
-//  public static Enumeration<Appender> appenders;
-//  public static GatewayTestConfig config;
-//  public static GatewayServer gateway;
-//  public static String gatewayUrl;
-//  public static String clusterUrl;
-//  public static SimpleLdapDirectoryServer ldap;
-//  public static TcpTransport ldapTransport;
-//
-//  private static final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
-//  private static final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
-//  private static final String uuid = UUID.randomUUID().toString();
-//
-//  @BeforeClass
-//  public static void setupSuite() throws Exception {
-//    LOG_ENTER();
-//    System.setOut(new PrintStream(outContent));
-//    System.setErr(new PrintStream(errContent));
-//    setupLdap();
-//    setupGateway();
-//    LOG_EXIT();
-//  }
-//
-//  @AfterClass
-//  public static void cleanupSuite() throws Exception {
-//    LOG_ENTER();
-//    ldap.stop( true );
-//
-//    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
-//    //NoOpAppender.tearDown( appenders );
-//    LOG_EXIT();
-//  }
-//
-//  public static void setupLdap( ) throws Exception {
-//    URL usersUrl = getResourceUrl( "users.ldif" );
-//    int port = findFreePort();
-//    ldapTransport = new TcpTransport( port );
-//    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
-//    ldap.start();
-//    LOG.info( "LDAP port = " + ldapTransport.getPort() );
-//  }
-//
-//  public static void setupGateway() throws Exception {
-//
-//    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
-//    File gatewayDir = new File( targetDir, "gateway-home-" + uuid );
-//    gatewayDir.mkdirs();
-//
-//    GatewayTestConfig testConfig = new GatewayTestConfig();
-//    config = testConfig;
-//    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
-//
-//    File topoDir = new File( testConfig.getGatewayTopologyDir() );
-//    topoDir.mkdirs();
-//
-//    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
-//    deployDir.mkdirs();
-//
-//    writeTopology(topoDir, "test-cluster-1.xml", "guest", "guest-password", true);
-//    writeTopology(topoDir, "test-cluster-2.xml", "sam", "sam-password", true);
-//    writeTopology(topoDir, "test-cluster-3.xml", "admin", "admin-password", true);
-//    writeTopology(topoDir, "test-cluster-4.xml", "", "", false);
-//
-//
-//    DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//    Map<String,String> options = new HashMap<String,String>();
-//    options.put( "persist-master", "false" );
-//    options.put( "master", "password" );
-//    try {
-//      srvcs.init( testConfig, options );
-//    } catch ( ServiceLifecycleException e ) {
-//      e.printStackTrace(); // I18N not required.
-//    }
-//  }
-//
-//  private static void writeTopology(File topoDir, String name, String user, String pass, boolean goodTopology) throws Exception {
-//    File descriptor = new File(topoDir, name);
-//
-//    if(descriptor.exists()){
-//      descriptor.delete();
-//      descriptor = new File(topoDir, name);
-//    }
-//
-//    FileOutputStream stream = new FileOutputStream( descriptor, false );
-//
-//    if(goodTopology) {
-//      createTopology(user, pass).toStream( stream );
-//    } else {
-//      createBadTopology().toStream( stream );
-//    }
-//
-//    stream.close();
-//
-//  }
-//
-//
-//  private static int findFreePort() throws IOException {
-//    ServerSocket socket = new ServerSocket(0);
-//    int port = socket.getLocalPort();
-//    socket.close();
-//    return port;
-//  }
-//
-//  public static InputStream getResourceStream( String resource ) throws IOException {
-//    return getResourceUrl( resource ).openStream();
-//  }
-//
-//  public static URL getResourceUrl( String resource ) {
-//    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
-//    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
-//    return url;
-//  }
-//
-//  public static String getResourceName( String resource ) {
-//    return getResourceBaseName() + resource;
-//  }
-//
-//  public static String getResourceBaseName() {
-//    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
-//  }
-//
-//  private static XMLTag createBadTopology(){
-//    XMLTag xml = XMLDoc.newDocument(true)
-//        .addRoot("topology")
-//        .addTag( "gateway" )
-//        .addTag("provider")
-//        .addTag("role").addText("authentication")
-//        .addTag("name").addText("ShiroProvider")
-//        .addTag("enabled").addText("true")
-//        .addTag( "param" )
-//        .addTag("name").addText("main.ldapRealm")
-//        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
-//        .addTag( "param" )
-//        .addTag("name").addText("main.ldapRealm.userDnTemplate")
-//        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag( "param" )
-//        .addTag("name").addText("main.ldapRealm.contextFactory.url")
-//        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
-//        .addTag( "param" )
-//        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
-//        .addTag("value").addText("simple").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
-//        .addTag("value").addText("true").gotoParent()
-//        .addTag("param")
-//        .addTag( "name").addText( "urls./**")
-//        .addTag("value").addText( "authcBasic" ).gotoParent().gotoParent()
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "identity-assertion" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "name" ).addText( "Default" ).gotoParent()
-//        .gotoRoot()
-//        .addTag( "service")
-//        .addTag("role").addText( "KNOX" )
-//        .gotoRoot();
-//    // System.out.println( "GATEWAY=" + xml.toString() );
-//    return xml;
-//  }
-//
-//  private static XMLTag createTopology(String username, String password) {
-//
-//    XMLTag xml = XMLDoc.newDocument(true)
-//        .addRoot("topology")
-//        .addTag("gateway")
-//        .addTag("provider")
-//        .addTag("role").addText("authentication")
-//        .addTag("name").addText("ShiroProvider")
-//        .addTag("enabled").addText("true")
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm")
-//        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
-//        .addTag("param" )
-//        .addTag("name").addText("main.ldapGroupContextFactory")
-//        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.searchBase")
-//        .addTag("value").addText("ou=groups,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.groupObjectClass")
-//        .addTag("value").addText("groupOfNames").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.memberAttributeValueTemplate")
-//        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param" )
-//        .addTag("name").addText("main.ldapRealm.memberAttribute")
-//        .addTag("value").addText("member").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
-//        .addTag("value").addText("true").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.systemUsername")
-//        .addTag("value").addText("uid=" + username + ",ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.systemPassword")
-//        .addTag( "value").addText(password).gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.userDnTemplate")
-//        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.url")
-//        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
-//        .addTag("value").addText("simple").gotoParent()
-//        .addTag("param")
-//        .addTag("name" ).addText("urls./**")
-//        .addTag("value").addText("authcBasic").gotoParent().gotoParent()
-//        .addTag("provider" )
-//        .addTag("role").addText( "identity-assertion" )
-//        .addTag( "enabled").addText( "true" )
-//        .addTag("name").addText( "Default" ).gotoParent()
-//        .gotoRoot()
-//        .addTag( "service" )
-//        .addTag( "role" ).addText( "test-service-role" )
-//        .gotoRoot();
-//    // System.out.println( "GATEWAY=" + xml.toString() );
-//    return xml;
-//  }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testLDAPAuth() throws Exception {
-//    LOG_ENTER();
-//
-////    Test 1: Make sure authentication is successful
-//    outContent.reset();
-//    String args[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-1", "--d" };
-//    KnoxCLI cli = new KnoxCLI();
-//    cli.setConf(config);
-//    cli.run(args);
-//    assertThat(outContent.toString(), containsString("System LDAP Bind successful"));
-//
-//    //    Test 2: Make sure authentication fails
-//    outContent.reset();
-//    String args2[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-2", "--d" };
-//    cli = new KnoxCLI();
-//    cli.setConf(config);
-//    cli.run(args2);
-//    assertThat(outContent.toString(), containsString("System LDAP Bind successful"));
-//
-//
-//    //    Test 3: Make sure authentication is successful
-//    outContent.reset();
-//    String args3[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-3", "--d" };
-//    cli = new KnoxCLI();
-//    cli.setConf(config);
-//    cli.run(args3);
-//    assertThat(outContent.toString(), containsString("LDAP authentication failed"));
-//    assertThat(outContent.toString(), containsString("Unable to successfully bind to LDAP server with topology credentials"));
-//
-//    //    Test 4: Assert that we get a username/password not present error is printed
-//    outContent.reset();
-//    String args4[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-4" };
-//    cli = new KnoxCLI();
-//    cli.setConf(config);
-//    cli.run(args4);
-//    assertThat(outContent.toString(), containsString("Warn: main.ldapRealm.contextFactory.systemUsername is not present"));
-//    assertThat(outContent.toString(), containsString("Warn: main.ldapRealm.contextFactory.systemPassword is not present"));
-//
-//
-//    //    Test 5: Assert that we get a username/password not present error is printed
-//    outContent.reset();
-//    String args5[] = { "system-user-auth-test", "--master", "knox", "--cluster", "not-a-cluster" };
-//    cli = new KnoxCLI();
-//    cli.setConf(config);
-//    cli.run(args5);
-//    assertThat(outContent.toString(), containsString("Topology not-a-cluster does not exist"));
-//
-//    LOG_EXIT();
-//  }
+  private static final long SHORT_TIMEOUT = 1000L;
+  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
+
+  private static Class RESOURCE_BASE_CLASS = KnoxCliSysBindTest.class;
+  private static Logger LOG = LoggerFactory.getLogger( KnoxCliSysBindTest.class );
+
+  public static Enumeration<Appender> appenders;
+  public static GatewayTestConfig config;
+  public static GatewayServer gateway;
+  public static String gatewayUrl;
+  public static String clusterUrl;
+  public static SimpleLdapDirectoryServer ldap;
+  public static TcpTransport ldapTransport;
+
+  private static final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
+  private static final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
+  private static final String uuid = UUID.randomUUID().toString();
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    LOG_ENTER();
+    System.setOut(new PrintStream(outContent));
+    System.setErr(new PrintStream(errContent));
+    setupLdap();
+    setupGateway();
+    LOG_EXIT();
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    LOG_ENTER();
+    ldap.stop( true );
+
+    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
+    //NoOpAppender.tearDown( appenders );
+    LOG_EXIT();
+  }
+
+  public static void setupLdap( ) throws Exception {
+    URL usersUrl = getResourceUrl( "users.ldif" );
+    int port = findFreePort();
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
+    ldap.start();
+    LOG.info( "LDAP port = " + ldapTransport.getPort() );
+  }
+
+  public static void setupGateway() throws Exception {
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + uuid );
+    gatewayDir.mkdirs();
+
+    GatewayTestConfig testConfig = new GatewayTestConfig();
+    config = testConfig;
+    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File topoDir = new File( testConfig.getGatewayTopologyDir() );
+    topoDir.mkdirs();
+
+    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+    writeTopology(topoDir, "test-cluster-1.xml", "guest", "guest-password", true);
+    writeTopology(topoDir, "test-cluster-2.xml", "sam", "sam-password", true);
+    writeTopology(topoDir, "test-cluster-3.xml", "admin", "admin-password", true);
+    writeTopology(topoDir, "test-cluster-4.xml", "", "", false);
+
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put( "persist-master", "false" );
+    options.put( "master", "password" );
+    try {
+      srvcs.init( testConfig, options );
+    } catch ( ServiceLifecycleException e ) {
+      e.printStackTrace(); // I18N not required.
+    }
+  }
+
+  private static void writeTopology(File topoDir, String name, String user, String pass, boolean goodTopology) throws Exception {
+    File descriptor = new File(topoDir, name);
+
+    if(descriptor.exists()){
+      descriptor.delete();
+      descriptor = new File(topoDir, name);
+    }
+
+    FileOutputStream stream = new FileOutputStream( descriptor, false );
+
+    if(goodTopology) {
+      createTopology(user, pass).toStream( stream );
+    } else {
+      createBadTopology().toStream( stream );
+    }
+
+    stream.close();
+
+  }
+
+
+  private static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+  public static InputStream getResourceStream( String resource ) throws IOException {
+    return getResourceUrl( resource ).openStream();
+  }
+
+  public static URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
+    return url;
+  }
+
+  public static String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public static String getResourceBaseName() {
+    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
+  }
+
+  private static XMLTag createBadTopology(){
+    XMLTag xml = XMLDoc.newDocument(true)
+        .addRoot("topology")
+        .addTag( "gateway" )
+        .addTag("provider")
+        .addTag("role").addText("authentication")
+        .addTag("name").addText("ShiroProvider")
+        .addTag("enabled").addText("true")
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm")
+        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.userDnTemplate")
+        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.contextFactory.url")
+        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
+        .addTag("value").addText("simple").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
+        .addTag("value").addText("true").gotoParent()
+        .addTag("param")
+        .addTag( "name").addText( "urls./**")
+        .addTag("value").addText( "authcBasic" ).gotoParent().gotoParent()
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "identity-assertion" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "name" ).addText( "Default" ).gotoParent()
+        .gotoRoot()
+        .addTag( "service")
+        .addTag("role").addText( "KNOX" )
+        .gotoRoot();
+    // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  private static XMLTag createTopology(String username, String password) {
+
+    XMLTag xml = XMLDoc.newDocument(true)
+        .addRoot("topology")
+        .addTag("gateway")
+        .addTag("provider")
+        .addTag("role").addText("authentication")
+        .addTag("name").addText("ShiroProvider")
+        .addTag("enabled").addText("true")
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm")
+        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+        .addTag("param" )
+        .addTag("name").addText("main.ldapGroupContextFactory")
+        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.searchBase")
+        .addTag("value").addText("ou=groups,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.groupObjectClass")
+        .addTag("value").addText("groupOfNames").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.memberAttributeValueTemplate")
+        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param" )
+        .addTag("name").addText("main.ldapRealm.memberAttribute")
+        .addTag("value").addText("member").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
+        .addTag("value").addText("true").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.systemUsername")
+        .addTag("value").addText("uid=" + username + ",ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.systemPassword")
+        .addTag( "value").addText(password).gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.userDnTemplate")
+        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.url")
+        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
+        .addTag("value").addText("simple").gotoParent()
+        .addTag("param")
+        .addTag("name" ).addText("urls./**")
+        .addTag("value").addText("authcBasic").gotoParent().gotoParent()
+        .addTag("provider" )
+        .addTag("role").addText( "identity-assertion" )
+        .addTag( "enabled").addText( "true" )
+        .addTag("name").addText( "Default" ).gotoParent()
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "test-service-role" )
+        .gotoRoot();
+    // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testLDAPAuth() throws Exception {
+    LOG_ENTER();
+
+//    Test 1: Make sure authentication is successful
+    outContent.reset();
+    String args[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-1", "--d" };
+    KnoxCLI cli = new KnoxCLI();
+    cli.setConf(config);
+    cli.run(args);
+    assertThat(outContent.toString(), containsString("System LDAP Bind successful"));
+
+    //    Test 2: Make sure authentication fails
+    outContent.reset();
+    String args2[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-2", "--d" };
+    cli = new KnoxCLI();
+    cli.setConf(config);
+    cli.run(args2);
+    assertThat(outContent.toString(), containsString("System LDAP Bind successful"));
+
+
+    //    Test 3: Make sure authentication is successful
+    outContent.reset();
+    String args3[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-3", "--d" };
+    cli = new KnoxCLI();
+    cli.setConf(config);
+    cli.run(args3);
+    assertThat(outContent.toString(), containsString("LDAP authentication failed"));
+    assertThat(outContent.toString(), containsString("Unable to successfully bind to LDAP server with topology credentials"));
+
+    //    Test 4: Assert that we get a username/password not present error is printed
+    outContent.reset();
+    String args4[] = { "system-user-auth-test", "--master", "knox", "--cluster", "test-cluster-4" };
+    cli = new KnoxCLI();
+    cli.setConf(config);
+    cli.run(args4);
+    assertThat(outContent.toString(), containsString("Warn: main.ldapRealm.contextFactory.systemUsername is not present"));
+    assertThat(outContent.toString(), containsString("Warn: main.ldapRealm.contextFactory.systemPassword is not present"));
+
+
+    //    Test 5: Assert that we get a username/password not present error is printed
+    outContent.reset();
+    String args5[] = { "system-user-auth-test", "--master", "knox", "--cluster", "not-a-cluster" };
+    cli = new KnoxCLI();
+    cli.setConf(config);
+    cli.run(args5);
+    assertThat(outContent.toString(), containsString("Topology not-a-cluster does not exist"));
+
+    LOG_EXIT();
+  }
 
 
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/OozieServiceDefinitionTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/OozieServiceDefinitionTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/OozieServiceDefinitionTest.java
index 039ae1c..b2f982b 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/OozieServiceDefinitionTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/OozieServiceDefinitionTest.java
@@ -47,166 +47,166 @@ import static org.hamcrest.xml.HasXPath.hasXPath;
 
 public class OozieServiceDefinitionTest {
 
-//  private static final long SHORT_TIMEOUT = 1000L;
-//  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testOozieRewriteRulesForLiteralTemplateValuesBugKnox394() throws Exception {
-//    LOG_ENTER();
-//
-//    // This is a unique part of this test.
-//    String testResource = "oozie-request-with-var.xml";
-//
-//    // Mock out the service url registry which is required for several url rewrite functions to work.
-//    ServiceRegistry registry = EasyMock.createNiceMock( ServiceRegistry.class );
-//    EasyMock.expect( registry.lookupServiceURL( "test-cluster", "NAMENODE" ) ).andReturn( "test-scheme://test-host:42" ).anyTimes();
-//
-//    // Mock out the gateway services registry which is required for several url rewrite functions to work.
-//    GatewayServices services = EasyMock.createNiceMock( GatewayServices.class );
-//    EasyMock.expect( services.getService( GatewayServices.SERVICE_REGISTRY_SERVICE ) ).andReturn( registry ).anyTimes();
-//
-//    UrlRewriteProcessor rewriteProcessor = new UrlRewriteProcessor();
-//
-//    ServletContext servletContext = EasyMock.createNiceMock( ServletContext.class );
-//    EasyMock.expect( servletContext.getAttribute( UrlRewriteServletContextListener.PROCESSOR_ATTRIBUTE_NAME ) ).andReturn( rewriteProcessor ).anyTimes();
-//    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_SERVICES_ATTRIBUTE ) ).andReturn( services ).anyTimes();
-//    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE ) ).andReturn( "test-cluster" ).anyTimes();
-//
-//    HttpServletRequest servletRequest = EasyMock.createNiceMock( HttpServletRequest.class );
-//    EasyMock.expect( servletRequest.getInputStream() ).andReturn( new MockServletInputStream( TestUtils.getResourceStream( OozieServiceDefinitionTest.class, testResource ) ) ).anyTimes();
-//    EasyMock.expect( servletRequest.getContentType() ).andReturn( "text/xml" ).anyTimes();
-//
-//    FilterConfig filterConfig = EasyMock.createNiceMock( FilterConfig.class );
-//    EasyMock.expect( filterConfig.getServletContext() ).andReturn( servletContext ).anyTimes();
-//    EasyMock.expect( filterConfig.getInitParameter( UrlRewriteServletFilter.REQUEST_BODY_FILTER_PARAM ) ).andReturn( "OOZIE/oozie/configuration" ).anyTimes();
-//
-//    EasyMock.replay( registry, services, servletContext, servletRequest, filterConfig );
-//
-//    UrlRewriteEnvironment rewriteEnvironment = new UrlRewriteServletEnvironment( servletContext );
-//
-//    Reader rulesReader = TestUtils.getResourceReader( "services/oozie/4.0.0/rewrite.xml", "UTF-8" );
-//    UrlRewriteRulesDescriptor rewriteRules = UrlRewriteRulesDescriptorFactory.load( "xml", rulesReader );
-//    rulesReader.close();
-//
-//    rewriteProcessor.initialize( rewriteEnvironment, rewriteRules );
-//
-//    UrlRewriteRequest rewriteRequest = new UrlRewriteRequest( filterConfig, servletRequest );
-//
-//    InputStream stream = rewriteRequest.getInputStream();
-//
-//    Document document = TestUtils.parseXml( stream );
-//
-//    assertThat( document,
-//        hasXPath( "/configuration/property[name='oozie.wf.application.path']/value",
-//            equalTo( "${appPath}/workflow.xml" ) ) );
-//
-//    LOG_EXIT();
-//  }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testOozieRewriteRulesForLiteralComplexTemplateValuesBugKnox394() throws Exception {
-//    LOG_ENTER();
-//
-//    // This is a unique part of this test.
-//    String testResource = "oozie-request-with-complex-var.xml";
-//
-//    // Mock out the service url registry which is required for several url rewrite functions to work.
-//    ServiceRegistry registry = EasyMock.createNiceMock( ServiceRegistry.class );
-//    EasyMock.expect( registry.lookupServiceURL( "test-cluster", "NAMENODE" ) ).andReturn( "test-scheme://test-host:42" ).anyTimes();
-//
-//    // Mock out the gateway services registry which is required for several url rewrite functions to work.
-//    GatewayServices services = EasyMock.createNiceMock( GatewayServices.class );
-//    EasyMock.expect( services.getService( GatewayServices.SERVICE_REGISTRY_SERVICE ) ).andReturn( registry ).anyTimes();
-//
-//    UrlRewriteProcessor rewriteProcessor = new UrlRewriteProcessor();
-//
-//    ServletContext servletContext = EasyMock.createNiceMock( ServletContext.class );
-//    EasyMock.expect( servletContext.getAttribute( UrlRewriteServletContextListener.PROCESSOR_ATTRIBUTE_NAME ) ).andReturn( rewriteProcessor ).anyTimes();
-//    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_SERVICES_ATTRIBUTE ) ).andReturn( services ).anyTimes();
-//    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE ) ).andReturn( "test-cluster" ).anyTimes();
-//
-//    HttpServletRequest servletRequest = EasyMock.createNiceMock( HttpServletRequest.class );
-//    EasyMock.expect( servletRequest.getInputStream() ).andReturn( new MockServletInputStream( TestUtils.getResourceStream( OozieServiceDefinitionTest.class, testResource ) ) ).anyTimes();
-//    EasyMock.expect( servletRequest.getContentType() ).andReturn( "text/xml" ).anyTimes();
-//
-//    FilterConfig filterConfig = EasyMock.createNiceMock( FilterConfig.class );
-//    EasyMock.expect( filterConfig.getServletContext() ).andReturn( servletContext ).anyTimes();
-//    EasyMock.expect( filterConfig.getInitParameter( UrlRewriteServletFilter.REQUEST_BODY_FILTER_PARAM ) ).andReturn( "OOZIE/oozie/configuration" ).anyTimes();
-//
-//    EasyMock.replay( registry, services, servletContext, servletRequest, filterConfig );
-//
-//    UrlRewriteEnvironment rewriteEnvironment = new UrlRewriteServletEnvironment( servletContext );
-//
-//    Reader rulesReader = TestUtils.getResourceReader( "services/oozie/4.0.0/rewrite.xml", "UTF-8" );
-//    UrlRewriteRulesDescriptor rewriteRules = UrlRewriteRulesDescriptorFactory.load( "xml", rulesReader );
-//    rulesReader.close();
-//
-//    rewriteProcessor.initialize( rewriteEnvironment, rewriteRules );
-//
-//    UrlRewriteRequest rewriteRequest = new UrlRewriteRequest( filterConfig, servletRequest );
-//
-//    InputStream stream = rewriteRequest.getInputStream();
-//
-//    Document document = TestUtils.parseXml( stream );
-//
-//    assertThat( document,
-//        hasXPath( "/configuration/property[name='oozie.wf.application.path']/value",
-//            equalTo( "${nameNode}/user/${user.name}/${examplesRoot}/apps/hive" ) ) );
-//
-//    LOG_EXIT();
-//  }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testOozieRewriteRulesForValuesRelativeToServiceRegistry() throws Exception {
-//    LOG_ENTER();
-//
-//    // This is a unique part of this test.
-//    String testResource = "oozie-request-relative.xml";
-//
-//    // Mock out the service url registry which is required for several url rewrite functions to work.
-//    ServiceRegistry registry = EasyMock.createNiceMock( ServiceRegistry.class );
-//    EasyMock.expect( registry.lookupServiceURL( "test-cluster", "NAMENODE" ) ).andReturn( "test-scheme://test-host:42" ).anyTimes();
-//
-//    // Mock out the gateway services registry which is required for several url rewrite functions to work.
-//    GatewayServices services = EasyMock.createNiceMock( GatewayServices.class );
-//    EasyMock.expect( services.getService( GatewayServices.SERVICE_REGISTRY_SERVICE ) ).andReturn( registry ).anyTimes();
-//
-//    UrlRewriteProcessor rewriteProcessor = new UrlRewriteProcessor();
-//
-//    ServletContext servletContext = EasyMock.createNiceMock( ServletContext.class );
-//    EasyMock.expect( servletContext.getAttribute( UrlRewriteServletContextListener.PROCESSOR_ATTRIBUTE_NAME ) ).andReturn( rewriteProcessor ).anyTimes();
-//    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_SERVICES_ATTRIBUTE ) ).andReturn( services ).anyTimes();
-//    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE ) ).andReturn( "test-cluster" ).anyTimes();
-//
-//    HttpServletRequest servletRequest = EasyMock.createNiceMock( HttpServletRequest.class );
-//    EasyMock.expect( servletRequest.getInputStream() ).andReturn( new MockServletInputStream( TestUtils.getResourceStream( OozieServiceDefinitionTest.class, testResource ) ) ).anyTimes();
-//    EasyMock.expect( servletRequest.getContentType() ).andReturn( "text/xml" ).anyTimes();
-//
-//    FilterConfig filterConfig = EasyMock.createNiceMock( FilterConfig.class );
-//    EasyMock.expect( filterConfig.getServletContext() ).andReturn( servletContext ).anyTimes();
-//    EasyMock.expect( filterConfig.getInitParameter( UrlRewriteServletFilter.REQUEST_BODY_FILTER_PARAM ) ).andReturn( "OOZIE/oozie/configuration" ).anyTimes();
-//
-//    EasyMock.replay( registry, services, servletContext, servletRequest, filterConfig );
-//
-//    UrlRewriteEnvironment rewriteEnvironment = new UrlRewriteServletEnvironment( servletContext );
-//
-//    Reader rulesReader = TestUtils.getResourceReader( "services/oozie/4.0.0/rewrite.xml", "UTF-8" );
-//    UrlRewriteRulesDescriptor rewriteRules = UrlRewriteRulesDescriptorFactory.load( "xml", rulesReader );
-//    rulesReader.close();
-//
-//    rewriteProcessor.initialize( rewriteEnvironment, rewriteRules );
-//
-//    UrlRewriteRequest rewriteRequest = new UrlRewriteRequest( filterConfig, servletRequest );
-//
-//    InputStream stream = rewriteRequest.getInputStream();
-//
-//    Document document = TestUtils.parseXml( stream );
-//
-//    assertThat( document,
-//        hasXPath( "/configuration/property[name='oozie.wf.application.path']/value",
-//            equalTo( "test-scheme://test-host:42/workflow.xml" ) ) );
-//
-//    LOG_EXIT();
-//  }
+  private static final long SHORT_TIMEOUT = 1000L;
+  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testOozieRewriteRulesForLiteralTemplateValuesBugKnox394() throws Exception {
+    LOG_ENTER();
+
+    // This is a unique part of this test.
+    String testResource = "oozie-request-with-var.xml";
+
+    // Mock out the service url registry which is required for several url rewrite functions to work.
+    ServiceRegistry registry = EasyMock.createNiceMock( ServiceRegistry.class );
+    EasyMock.expect( registry.lookupServiceURL( "test-cluster", "NAMENODE" ) ).andReturn( "test-scheme://test-host:42" ).anyTimes();
+
+    // Mock out the gateway services registry which is required for several url rewrite functions to work.
+    GatewayServices services = EasyMock.createNiceMock( GatewayServices.class );
+    EasyMock.expect( services.getService( GatewayServices.SERVICE_REGISTRY_SERVICE ) ).andReturn( registry ).anyTimes();
+
+    UrlRewriteProcessor rewriteProcessor = new UrlRewriteProcessor();
+
+    ServletContext servletContext = EasyMock.createNiceMock( ServletContext.class );
+    EasyMock.expect( servletContext.getAttribute( UrlRewriteServletContextListener.PROCESSOR_ATTRIBUTE_NAME ) ).andReturn( rewriteProcessor ).anyTimes();
+    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_SERVICES_ATTRIBUTE ) ).andReturn( services ).anyTimes();
+    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE ) ).andReturn( "test-cluster" ).anyTimes();
+
+    HttpServletRequest servletRequest = EasyMock.createNiceMock( HttpServletRequest.class );
+    EasyMock.expect( servletRequest.getInputStream() ).andReturn( new MockServletInputStream( TestUtils.getResourceStream( OozieServiceDefinitionTest.class, testResource ) ) ).anyTimes();
+    EasyMock.expect( servletRequest.getContentType() ).andReturn( "text/xml" ).anyTimes();
+
+    FilterConfig filterConfig = EasyMock.createNiceMock( FilterConfig.class );
+    EasyMock.expect( filterConfig.getServletContext() ).andReturn( servletContext ).anyTimes();
+    EasyMock.expect( filterConfig.getInitParameter( UrlRewriteServletFilter.REQUEST_BODY_FILTER_PARAM ) ).andReturn( "OOZIE/oozie/configuration" ).anyTimes();
+
+    EasyMock.replay( registry, services, servletContext, servletRequest, filterConfig );
+
+    UrlRewriteEnvironment rewriteEnvironment = new UrlRewriteServletEnvironment( servletContext );
+
+    Reader rulesReader = TestUtils.getResourceReader( "services/oozie/4.0.0/rewrite.xml", "UTF-8" );
+    UrlRewriteRulesDescriptor rewriteRules = UrlRewriteRulesDescriptorFactory.load( "xml", rulesReader );
+    rulesReader.close();
+
+    rewriteProcessor.initialize( rewriteEnvironment, rewriteRules );
+
+    UrlRewriteRequest rewriteRequest = new UrlRewriteRequest( filterConfig, servletRequest );
+
+    InputStream stream = rewriteRequest.getInputStream();
+
+    Document document = TestUtils.parseXml( stream );
+
+    assertThat( document,
+        hasXPath( "/configuration/property[name='oozie.wf.application.path']/value",
+            equalTo( "${appPath}/workflow.xml" ) ) );
+
+    LOG_EXIT();
+  }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testOozieRewriteRulesForLiteralComplexTemplateValuesBugKnox394() throws Exception {
+    LOG_ENTER();
+
+    // This is a unique part of this test.
+    String testResource = "oozie-request-with-complex-var.xml";
+
+    // Mock out the service url registry which is required for several url rewrite functions to work.
+    ServiceRegistry registry = EasyMock.createNiceMock( ServiceRegistry.class );
+    EasyMock.expect( registry.lookupServiceURL( "test-cluster", "NAMENODE" ) ).andReturn( "test-scheme://test-host:42" ).anyTimes();
+
+    // Mock out the gateway services registry which is required for several url rewrite functions to work.
+    GatewayServices services = EasyMock.createNiceMock( GatewayServices.class );
+    EasyMock.expect( services.getService( GatewayServices.SERVICE_REGISTRY_SERVICE ) ).andReturn( registry ).anyTimes();
+
+    UrlRewriteProcessor rewriteProcessor = new UrlRewriteProcessor();
+
+    ServletContext servletContext = EasyMock.createNiceMock( ServletContext.class );
+    EasyMock.expect( servletContext.getAttribute( UrlRewriteServletContextListener.PROCESSOR_ATTRIBUTE_NAME ) ).andReturn( rewriteProcessor ).anyTimes();
+    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_SERVICES_ATTRIBUTE ) ).andReturn( services ).anyTimes();
+    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE ) ).andReturn( "test-cluster" ).anyTimes();
+
+    HttpServletRequest servletRequest = EasyMock.createNiceMock( HttpServletRequest.class );
+    EasyMock.expect( servletRequest.getInputStream() ).andReturn( new MockServletInputStream( TestUtils.getResourceStream( OozieServiceDefinitionTest.class, testResource ) ) ).anyTimes();
+    EasyMock.expect( servletRequest.getContentType() ).andReturn( "text/xml" ).anyTimes();
+
+    FilterConfig filterConfig = EasyMock.createNiceMock( FilterConfig.class );
+    EasyMock.expect( filterConfig.getServletContext() ).andReturn( servletContext ).anyTimes();
+    EasyMock.expect( filterConfig.getInitParameter( UrlRewriteServletFilter.REQUEST_BODY_FILTER_PARAM ) ).andReturn( "OOZIE/oozie/configuration" ).anyTimes();
+
+    EasyMock.replay( registry, services, servletContext, servletRequest, filterConfig );
+
+    UrlRewriteEnvironment rewriteEnvironment = new UrlRewriteServletEnvironment( servletContext );
+
+    Reader rulesReader = TestUtils.getResourceReader( "services/oozie/4.0.0/rewrite.xml", "UTF-8" );
+    UrlRewriteRulesDescriptor rewriteRules = UrlRewriteRulesDescriptorFactory.load( "xml", rulesReader );
+    rulesReader.close();
+
+    rewriteProcessor.initialize( rewriteEnvironment, rewriteRules );
+
+    UrlRewriteRequest rewriteRequest = new UrlRewriteRequest( filterConfig, servletRequest );
+
+    InputStream stream = rewriteRequest.getInputStream();
+
+    Document document = TestUtils.parseXml( stream );
+
+    assertThat( document,
+        hasXPath( "/configuration/property[name='oozie.wf.application.path']/value",
+            equalTo( "${nameNode}/user/${user.name}/${examplesRoot}/apps/hive" ) ) );
+
+    LOG_EXIT();
+  }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testOozieRewriteRulesForValuesRelativeToServiceRegistry() throws Exception {
+    LOG_ENTER();
+
+    // This is a unique part of this test.
+    String testResource = "oozie-request-relative.xml";
+
+    // Mock out the service url registry which is required for several url rewrite functions to work.
+    ServiceRegistry registry = EasyMock.createNiceMock( ServiceRegistry.class );
+    EasyMock.expect( registry.lookupServiceURL( "test-cluster", "NAMENODE" ) ).andReturn( "test-scheme://test-host:42" ).anyTimes();
+
+    // Mock out the gateway services registry which is required for several url rewrite functions to work.
+    GatewayServices services = EasyMock.createNiceMock( GatewayServices.class );
+    EasyMock.expect( services.getService( GatewayServices.SERVICE_REGISTRY_SERVICE ) ).andReturn( registry ).anyTimes();
+
+    UrlRewriteProcessor rewriteProcessor = new UrlRewriteProcessor();
+
+    ServletContext servletContext = EasyMock.createNiceMock( ServletContext.class );
+    EasyMock.expect( servletContext.getAttribute( UrlRewriteServletContextListener.PROCESSOR_ATTRIBUTE_NAME ) ).andReturn( rewriteProcessor ).anyTimes();
+    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_SERVICES_ATTRIBUTE ) ).andReturn( services ).anyTimes();
+    EasyMock.expect( servletContext.getAttribute( GatewayServices.GATEWAY_CLUSTER_ATTRIBUTE ) ).andReturn( "test-cluster" ).anyTimes();
+
+    HttpServletRequest servletRequest = EasyMock.createNiceMock( HttpServletRequest.class );
+    EasyMock.expect( servletRequest.getInputStream() ).andReturn( new MockServletInputStream( TestUtils.getResourceStream( OozieServiceDefinitionTest.class, testResource ) ) ).anyTimes();
+    EasyMock.expect( servletRequest.getContentType() ).andReturn( "text/xml" ).anyTimes();
+
+    FilterConfig filterConfig = EasyMock.createNiceMock( FilterConfig.class );
+    EasyMock.expect( filterConfig.getServletContext() ).andReturn( servletContext ).anyTimes();
+    EasyMock.expect( filterConfig.getInitParameter( UrlRewriteServletFilter.REQUEST_BODY_FILTER_PARAM ) ).andReturn( "OOZIE/oozie/configuration" ).anyTimes();
+
+    EasyMock.replay( registry, services, servletContext, servletRequest, filterConfig );
+
+    UrlRewriteEnvironment rewriteEnvironment = new UrlRewriteServletEnvironment( servletContext );
+
+    Reader rulesReader = TestUtils.getResourceReader( "services/oozie/4.0.0/rewrite.xml", "UTF-8" );
+    UrlRewriteRulesDescriptor rewriteRules = UrlRewriteRulesDescriptorFactory.load( "xml", rulesReader );
+    rulesReader.close();
+
+    rewriteProcessor.initialize( rewriteEnvironment, rewriteRules );
+
+    UrlRewriteRequest rewriteRequest = new UrlRewriteRequest( filterConfig, servletRequest );
+
+    InputStream stream = rewriteRequest.getInputStream();
+
+    Document document = TestUtils.parseXml( stream );
+
+    assertThat( document,
+        hasXPath( "/configuration/property[name='oozie.wf.application.path']/value",
+            equalTo( "test-scheme://test-host:42/workflow.xml" ) ) );
+
+    LOG_EXIT();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
index e0af24e..7823978 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
@@ -42,438 +42,438 @@ import static org.hamcrest.CoreMatchers.is;
 @Category({FunctionalTests.class, MediumTests.class})
 public class WebHdfsHaFuncTest {
 
-//   private static final long SHORT_TIMEOUT = 1000L;
-//   private static final long MEDIUM_TIMEOUT = 10 * SHORT_TIMEOUT;
-//
-//   // Specifies if the test requests should go through the gateway or directly to the services.
-//   // This is frequently used to verify the behavior of the test both with and without the gateway.
-//   private static final boolean USE_GATEWAY = true;
-//
-//   // Specifies if the test requests should be sent to mock services or the real services.
-//   // This is frequently used to verify the behavior of the test both with and without mock services.
-//   private static final boolean USE_MOCK_SERVICES = true;
-//
-//   private static GatewayFuncTestDriver driver = new GatewayFuncTestDriver();
-//
-//   private static MockServer masterServer;
-//
-//   private static MockServer standbyServer;
-//
-//   private static int findFreePort() throws IOException {
-//      ServerSocket socket = new ServerSocket(0);
-//      int port = socket.getLocalPort();
-//      socket.close();
-//      return port;
-//   }
-//
-//   /**
-//    * Creates a deployment of a gateway instance that all test methods will share.  This method also creates a
-//    * registry of sorts for all of the services that will be used by the test methods.
-//    * The createTopology method is used to create the topology file that would normally be read from disk.
-//    * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs.
-//    * <p/>
-//    * This would normally be done once for this suite but the failure tests start affecting each other depending
-//    * on the state the last 'active' url
-//    *
-//    * @throws Exception Thrown if any failure occurs.
-//    */
-//   @Before
-//   public void setup() throws Exception {
-//      LOG_ENTER();
-//      //Log.setLog(new NoOpLogger());
-//      masterServer = new MockServer("master", true);
-//      standbyServer = new MockServer("standby", true);
-//      GatewayTestConfig config = new GatewayTestConfig();
-//      config.setGatewayPath("gateway");
-//      driver.setResourceBase(WebHdfsHaFuncTest.class);
-//      driver.setupLdap(findFreePort());
-//      driver.setupService("WEBHDFS", "http://vm.local:50070/webhdfs", "/cluster/webhdfs", USE_MOCK_SERVICES);
-//      driver.setupGateway(config, "cluster", createTopology(), USE_GATEWAY);
-//      LOG_EXIT();
-//   }
-//
-//   @After
-//   public void cleanup() throws Exception {
-//      LOG_ENTER();
-//      driver.cleanup();
-//      driver.reset();
-//      masterServer.reset();
-//      standbyServer.reset();
-//      LOG_EXIT();
-//   }
-//
-//   /**
-//    * Creates a topology that is deployed to the gateway instance for the test suite.
-//    * Note that this topology is shared by all of the test methods in this suite.
-//    *
-//    * @return A populated XML structure for a topology file.
-//    */
-//   private static XMLTag createTopology() {
-//      XMLTag xml = XMLDoc.newDocument(true)
-//            .addRoot("topology")
-//            .addTag("gateway")
-//            .addTag("provider")
-//            .addTag("role").addText("webappsec")
-//            .addTag("name").addText("WebAppSec")
-//            .addTag("enabled").addText("true")
-//            .addTag("param")
-//            .addTag("name").addText("csrf.enabled")
-//            .addTag("value").addText("true").gotoParent().gotoParent()
-//            .addTag("provider")
-//            .addTag("role").addText("authentication")
-//            .addTag("name").addText("ShiroProvider")
-//            .addTag("enabled").addText("true")
-//            .addTag("param")
-//            .addTag("name").addText("main.ldapRealm")
-//            .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
-//            .addTag("param")
-//            .addTag("name").addText("main.ldapRealm.userDnTemplate")
-//            .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//            .addTag("param")
-//            .addTag("name").addText("main.ldapRealm.contextFactory.url")
-//            .addTag("value").addText(driver.getLdapUrl()).gotoParent()
-//            .addTag("param")
-//            .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
-//            .addTag("value").addText("simple").gotoParent()
-//            .addTag("param")
-//            .addTag("name").addText("urls./**")
-//            .addTag("value").addText("authcBasic").gotoParent().gotoParent()
-//            .addTag("provider")
-//            .addTag("role").addText("identity-assertion")
-//            .addTag("enabled").addText("true")
-//            .addTag("name").addText("Default").gotoParent()
-//            .addTag("provider")
-//            .addTag("role").addText("authorization")
-//            .addTag("enabled").addText("true")
-//            .addTag("name").addText("AclsAuthz").gotoParent()
-//            .addTag("param")
-//            .addTag("name").addText("webhdfs-acl")
-//            .addTag("value").addText("hdfs;*;*").gotoParent()
-//            .addTag("provider")
-//            .addTag("role").addText("ha")
-//            .addTag("enabled").addText("true")
-//            .addTag("name").addText("HaProvider")
-//            .addTag("param")
-//            .addTag("name").addText("WEBHDFS")
-//            .addTag("value").addText("maxFailoverAttempts=3;failoverSleep=15;maxRetryAttempts=3;retrySleep=10;enabled=true").gotoParent()
-//            .gotoRoot()
-//            .addTag("service")
-//            .addTag("role").addText("WEBHDFS")
-//            .addTag("url").addText("http://localhost:" + masterServer.getPort() + "/webhdfs")
-//            .addTag("url").addText("http://localhost:" + standbyServer.getPort() + "/webhdfs").gotoParent()
-//            .gotoRoot();
-////     System.out.println( "GATEWAY=" + xml.toString() );
-//      return xml;
-//   }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//   public void testBasicListOperation() throws IOException {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      masterServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_OK)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
-//            .contentType("application/json");
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "LISTSTATUS")
-//            .expect()
-//            .log().ifError()
-//            .statusCode(HttpStatus.SC_OK)
-//            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
-//            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
-//      masterServer.isEmpty();
-//      LOG_EXIT();
-//   }
-//
-//   @Test( timeout = MEDIUM_TIMEOUT )
-//   @Ignore( "KNOX-446" )
-//   public void testFailoverListOperation() throws Exception {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      //Shutdown master and expect standby to serve the list response
-//      masterServer.stop();
-//      standbyServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_OK)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
-//            .contentType("application/json");
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "LISTSTATUS")
-//            .expect()
-//            .log().ifError()
-//            .statusCode(HttpStatus.SC_OK)
-//            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
-//            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
-//      standbyServer.isEmpty();
-//      masterServer.start();
-//      LOG_EXIT();
-//   }
-//
-//   @Test( timeout = MEDIUM_TIMEOUT )
-//   public void testFailoverLimit() throws Exception {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      //Shutdown master and expect standby to serve the list response
-//      masterServer.stop();
-//      standbyServer.stop();
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "LISTSTATUS")
-//            .expect()
-////            .log().ifError()
-//            .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR)
-//            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
-//      standbyServer.start();
-//      masterServer.start();
-//      LOG_EXIT();
-//   }
-//
-//
-//   @Test( timeout = MEDIUM_TIMEOUT )
-//   @Ignore( "KNOX-446" )
-//   public void testServerInStandby() throws IOException {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      //make master the server that is in standby
-//      masterServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
-//            .contentType("application/json");
-//      //standby server is 'active' in this test case and serves the list response
-//      standbyServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_OK)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
-//            .contentType("application/json");
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "LISTSTATUS")
-//            .expect()
+   private static final long SHORT_TIMEOUT = 1000L;
+   private static final long MEDIUM_TIMEOUT = 10 * SHORT_TIMEOUT;
+
+   // Specifies if the test requests should go through the gateway or directly to the services.
+   // This is frequently used to verify the behavior of the test both with and without the gateway.
+   private static final boolean USE_GATEWAY = true;
+
+   // Specifies if the test requests should be sent to mock services or the real services.
+   // This is frequently used to verify the behavior of the test both with and without mock services.
+   private static final boolean USE_MOCK_SERVICES = true;
+
+   private static GatewayFuncTestDriver driver = new GatewayFuncTestDriver();
+
+   private static MockServer masterServer;
+
+   private static MockServer standbyServer;
+
+   private static int findFreePort() throws IOException {
+      ServerSocket socket = new ServerSocket(0);
+      int port = socket.getLocalPort();
+      socket.close();
+      return port;
+   }
+
+   /**
+    * Creates a deployment of a gateway instance that all test methods will share.  This method also creates a
+    * registry of sorts for all of the services that will be used by the test methods.
+    * The createTopology method is used to create the topology file that would normally be read from disk.
+    * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs.
+    * <p/>
+    * This would normally be done once for this suite but the failure tests start affecting each other depending
+    * on the state the last 'active' url
+    *
+    * @throws Exception Thrown if any failure occurs.
+    */
+   @Before
+   public void setup() throws Exception {
+      LOG_ENTER();
+      //Log.setLog(new NoOpLogger());
+      masterServer = new MockServer("master", true);
+      standbyServer = new MockServer("standby", true);
+      GatewayTestConfig config = new GatewayTestConfig();
+      config.setGatewayPath("gateway");
+      driver.setResourceBase(WebHdfsHaFuncTest.class);
+      driver.setupLdap(findFreePort());
+      driver.setupService("WEBHDFS", "http://vm.local:50070/webhdfs", "/cluster/webhdfs", USE_MOCK_SERVICES);
+      driver.setupGateway(config, "cluster", createTopology(), USE_GATEWAY);
+      LOG_EXIT();
+   }
+
+   @After
+   public void cleanup() throws Exception {
+      LOG_ENTER();
+      driver.cleanup();
+      driver.reset();
+      masterServer.reset();
+      standbyServer.reset();
+      LOG_EXIT();
+   }
+
+   /**
+    * Creates a topology that is deployed to the gateway instance for the test suite.
+    * Note that this topology is shared by all of the test methods in this suite.
+    *
+    * @return A populated XML structure for a topology file.
+    */
+   private static XMLTag createTopology() {
+      XMLTag xml = XMLDoc.newDocument(true)
+            .addRoot("topology")
+            .addTag("gateway")
+            .addTag("provider")
+            .addTag("role").addText("webappsec")
+            .addTag("name").addText("WebAppSec")
+            .addTag("enabled").addText("true")
+            .addTag("param")
+            .addTag("name").addText("csrf.enabled")
+            .addTag("value").addText("true").gotoParent().gotoParent()
+            .addTag("provider")
+            .addTag("role").addText("authentication")
+            .addTag("name").addText("ShiroProvider")
+            .addTag("enabled").addText("true")
+            .addTag("param")
+            .addTag("name").addText("main.ldapRealm")
+            .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+            .addTag("param")
+            .addTag("name").addText("main.ldapRealm.userDnTemplate")
+            .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+            .addTag("param")
+            .addTag("name").addText("main.ldapRealm.contextFactory.url")
+            .addTag("value").addText(driver.getLdapUrl()).gotoParent()
+            .addTag("param")
+            .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
+            .addTag("value").addText("simple").gotoParent()
+            .addTag("param")
+            .addTag("name").addText("urls./**")
+            .addTag("value").addText("authcBasic").gotoParent().gotoParent()
+            .addTag("provider")
+            .addTag("role").addText("identity-assertion")
+            .addTag("enabled").addText("true")
+            .addTag("name").addText("Default").gotoParent()
+            .addTag("provider")
+            .addTag("role").addText("authorization")
+            .addTag("enabled").addText("true")
+            .addTag("name").addText("AclsAuthz").gotoParent()
+            .addTag("param")
+            .addTag("name").addText("webhdfs-acl")
+            .addTag("value").addText("hdfs;*;*").gotoParent()
+            .addTag("provider")
+            .addTag("role").addText("ha")
+            .addTag("enabled").addText("true")
+            .addTag("name").addText("HaProvider")
+            .addTag("param")
+            .addTag("name").addText("WEBHDFS")
+            .addTag("value").addText("maxFailoverAttempts=3;failoverSleep=15;maxRetryAttempts=3;retrySleep=10;enabled=true").gotoParent()
+            .gotoRoot()
+            .addTag("service")
+            .addTag("role").addText("WEBHDFS")
+            .addTag("url").addText("http://localhost:" + masterServer.getPort() + "/webhdfs")
+            .addTag("url").addText("http://localhost:" + standbyServer.getPort() + "/webhdfs").gotoParent()
+            .gotoRoot();
+//     System.out.println( "GATEWAY=" + xml.toString() );
+      return xml;
+   }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+   public void testBasicListOperation() throws IOException {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      masterServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_OK)
+            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "LISTSTATUS")
+            .expect()
+            .log().ifError()
+            .statusCode(HttpStatus.SC_OK)
+            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
+            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
+      masterServer.isEmpty();
+      LOG_EXIT();
+   }
+
+   @Test( timeout = MEDIUM_TIMEOUT )
+   @Ignore( "KNOX-446" )
+   public void testFailoverListOperation() throws Exception {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //Shutdown master and expect standby to serve the list response
+      masterServer.stop();
+      standbyServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_OK)
+            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "LISTSTATUS")
+            .expect()
+            .log().ifError()
+            .statusCode(HttpStatus.SC_OK)
+            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
+            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
+      standbyServer.isEmpty();
+      masterServer.start();
+      LOG_EXIT();
+   }
+
+   @Test( timeout = MEDIUM_TIMEOUT )
+   public void testFailoverLimit() throws Exception {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //Shutdown master and expect standby to serve the list response
+      masterServer.stop();
+      standbyServer.stop();
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "LISTSTATUS")
+            .expect()
 //            .log().ifError()
-//            .statusCode(HttpStatus.SC_OK)
-//            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
-//            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
-//      masterServer.isEmpty();
-//      standbyServer.isEmpty();
-//      LOG_EXIT();
-//   }
-//
-//   @Test( timeout = MEDIUM_TIMEOUT )
-//   public void testServerInStandbyFailoverLimit() throws IOException {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      //make master the server that is in standby
-//      masterServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
-//            .contentType("application/json");
-//      standbyServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
-//            .contentType("application/json");
-//      masterServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
-//            .contentType("application/json");
-//      standbyServer.expect()
-//            .method("GET")
-//            .pathInfo("/webhdfs/v1/")
-//            .queryParam("op", "LISTSTATUS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
-//            .contentType("application/json");
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "LISTSTATUS")
-//            .expect()
-////            .log().ifError()
-//            .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR)
-//            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
-//      masterServer.isEmpty();
-//      standbyServer.isEmpty();
-//      LOG_EXIT();
-//   }
-//
-//   @Test( timeout = MEDIUM_TIMEOUT )
-//   public void testServerInSafeMode() throws IOException {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      //master is in safe mode
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
-//            .contentType("application/json");
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_OK)
-//            .content(driver.getResourceBytes("webhdfs-rename-safemode-off.json"))
-//            .contentType("application/json");
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .expect()
+            .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR)
+            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
+      standbyServer.start();
+      masterServer.start();
+      LOG_EXIT();
+   }
+
+
+   @Test( timeout = MEDIUM_TIMEOUT )
+   @Ignore( "KNOX-446" )
+   public void testServerInStandby() throws IOException {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //make master the server that is in standby
+      masterServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
+            .contentType("application/json");
+      //standby server is 'active' in this test case and serves the list response
+      standbyServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_OK)
+            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "LISTSTATUS")
+            .expect()
+            .log().ifError()
+            .statusCode(HttpStatus.SC_OK)
+            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
+            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
+      masterServer.isEmpty();
+      standbyServer.isEmpty();
+      LOG_EXIT();
+   }
+
+   @Test( timeout = MEDIUM_TIMEOUT )
+   public void testServerInStandbyFailoverLimit() throws IOException {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //make master the server that is in standby
+      masterServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
+            .contentType("application/json");
+      standbyServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
+            .contentType("application/json");
+      masterServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
+            .contentType("application/json");
+      standbyServer.expect()
+            .method("GET")
+            .pathInfo("/webhdfs/v1/")
+            .queryParam("op", "LISTSTATUS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "LISTSTATUS")
+            .expect()
 //            .log().ifError()
-//            .statusCode(HttpStatus.SC_OK)
-//            .content("boolean", is(true))
-//            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/foo.txt");
-//      masterServer.isEmpty();
-//      LOG_EXIT();
-//   }
-//
-//   @Test( timeout = MEDIUM_TIMEOUT )
-//   public void testServerInSafeModeRetriableException() throws IOException {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      //master is in safe mode
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/new")
-//            .queryParam("op", "MKDIRS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-mkdirs-safemode.json"))
-//            .contentType("application/json");
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/new")
-//            .queryParam("op", "MKDIRS")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_OK)
-//            .content(driver.getResourceBytes("webhdfs-rename-safemode-off.json"))
-//            .contentType("application/json");
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "MKDIRS")
-//            .expect()
+            .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR)
+            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
+      masterServer.isEmpty();
+      standbyServer.isEmpty();
+      LOG_EXIT();
+   }
+
+   @Test( timeout = MEDIUM_TIMEOUT )
+   public void testServerInSafeMode() throws IOException {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //master is in safe mode
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
+            .contentType("application/json");
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_OK)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode-off.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .expect()
+            .log().ifError()
+            .statusCode(HttpStatus.SC_OK)
+            .content("boolean", is(true))
+            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/foo.txt");
+      masterServer.isEmpty();
+      LOG_EXIT();
+   }
+
+   @Test( timeout = MEDIUM_TIMEOUT )
+   public void testServerInSafeModeRetriableException() throws IOException {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //master is in safe mode
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/new")
+            .queryParam("op", "MKDIRS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-mkdirs-safemode.json"))
+            .contentType("application/json");
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/new")
+            .queryParam("op", "MKDIRS")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_OK)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode-off.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "MKDIRS")
+            .expect()
+            .log().ifError()
+            .statusCode(HttpStatus.SC_OK)
+            .content("boolean", is(true))
+            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/new");
+      masterServer.isEmpty();
+      LOG_EXIT();
+   }
+
+   @Test( timeout = MEDIUM_TIMEOUT )
+   public void testServerInSafeModeRetryLimit() throws IOException {
+      LOG_ENTER();
+      String username = "hdfs";
+      String password = "hdfs-password";
+      //master is in safe mode
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
+            .contentType("application/json");
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
+            .contentType("application/json");
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
+            .contentType("application/json");
+      masterServer.expect()
+            .method("POST")
+            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .queryParam("user.name", username)
+            .respond()
+            .status(HttpStatus.SC_FORBIDDEN)
+            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
+            .contentType("application/json");
+      given()
+            .auth().preemptive().basic(username, password)
+            .header("X-XSRF-Header", "jksdhfkhdsf")
+            .queryParam("op", "RENAME")
+            .queryParam("destination", "/user/hdfs/foo.txt")
+            .expect()
 //            .log().ifError()
-//            .statusCode(HttpStatus.SC_OK)
-//            .content("boolean", is(true))
-//            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/new");
-//      masterServer.isEmpty();
-//      LOG_EXIT();
-//   }
-//
-//   @Test( timeout = MEDIUM_TIMEOUT )
-//   public void testServerInSafeModeRetryLimit() throws IOException {
-//      LOG_ENTER();
-//      String username = "hdfs";
-//      String password = "hdfs-password";
-//      //master is in safe mode
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
-//            .contentType("application/json");
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
-//            .contentType("application/json");
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
-//            .contentType("application/json");
-//      masterServer.expect()
-//            .method("POST")
-//            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .queryParam("user.name", username)
-//            .respond()
-//            .status(HttpStatus.SC_FORBIDDEN)
-//            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
-//            .contentType("application/json");
-//      given()
-//            .auth().preemptive().basic(username, password)
-//            .header("X-XSRF-Header", "jksdhfkhdsf")
-//            .queryParam("op", "RENAME")
-//            .queryParam("destination", "/user/hdfs/foo.txt")
-//            .expect()
-////            .log().ifError()
-//            .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR)
-//            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/foo.txt");
-//      masterServer.isEmpty();
-//      LOG_EXIT();
-//   }
+            .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR)
+            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/foo.txt");
+      masterServer.isEmpty();
+      LOG_EXIT();
+   }
 }


[3/5] knox git commit: KNOX-620: Jenkins Knox-master-verify failing since #725 due to JDK version issues. Renabled remainder of func tests.

Posted by km...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/Knox242FuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/Knox242FuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/Knox242FuncTest.java
index bd0b548..29f60b5 100755
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/Knox242FuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/Knox242FuncTest.java
@@ -68,265 +68,265 @@ import com.mycila.xmltool.XMLTag;
  */
 public class Knox242FuncTest {
 
-//  private static final long SHORT_TIMEOUT = 1000L;
-//  private static final long MEDIUM_TIMEOUT = 10 * SHORT_TIMEOUT;
-//
-//  private static Class RESOURCE_BASE_CLASS = Knox242FuncTest.class;
-//  private static Logger LOG = LoggerFactory.getLogger( Knox242FuncTest.class );
-//
-//  public static Enumeration<Appender> appenders;
-//  public static GatewayConfig config;
-//  public static GatewayServer gateway;
-//  public static String gatewayUrl;
-//  public static String clusterUrl;
-//  public static SimpleLdapDirectoryServer ldap;
-//  public static TcpTransport ldapTransport;
-//
-//  @BeforeClass
-//  public static void setupSuite() throws Exception {
-//    LOG_ENTER();
-//    //appenders = NoOpAppender.setUp();
-//    int port = setupLdap();
-//    setupGateway(port);
-//    LOG_EXIT();
-//  }
-//
-//  @AfterClass
-//  public static void cleanupSuite() throws Exception {
-//    LOG_ENTER();
-//    gateway.stop();
-//    ldap.stop( true );
-//    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
-//    //NoOpAppender.tearDown( appenders );
-//    LOG_EXIT();
-//  }
-//
-//  public static int setupLdap() throws Exception {
-//    URL usersUrl = getResourceUrl( "users.ldif" );
-//    int port = findFreePort();
-//    ldapTransport = new TcpTransport( port );
-//    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
-//    ldap.start();
-//    LOG.info( "LDAP port = " + ldapTransport.getPort() );
-//    return port;
-//  }
-//
-//  public static void setupGateway(int ldapPort) throws IOException, Exception {
-//
-//    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
-//    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
-//    gatewayDir.mkdirs();
-//
-//    GatewayTestConfig testConfig = new GatewayTestConfig();
-//    config = testConfig;
-//    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
-//
-//    File topoDir = new File( testConfig.getGatewayTopologyDir() );
-//    topoDir.mkdirs();
-//
-//    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
-//    deployDir.mkdirs();
-//
-//    File descriptor = new File( topoDir, "testdg-cluster.xml" );
-//    FileOutputStream stream = new FileOutputStream( descriptor );
-//    createTopology(ldapPort).toStream( stream );
-//    stream.close();
-//
-//    DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//    Map<String,String> options = new HashMap<String,String>();
-//    options.put( "persist-master", "false" );
-//    options.put( "master", "password" );
-//    try {
-//      srvcs.init( testConfig, options );
-//    } catch ( ServiceLifecycleException e ) {
-//      e.printStackTrace(); // I18N not required.
-//    }
-//
-//    gateway = GatewayServer.startGateway( testConfig, srvcs );
-//    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
-//
-//    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
-//
-//    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
-//    clusterUrl = gatewayUrl + "/testdg-cluster";
-//
-//    GatewayServices services = GatewayServer.getGatewayServices();
-//    AliasService aliasService = (AliasService)services.getService(GatewayServices.ALIAS_SERVICE);
-//    aliasService.addAliasForCluster("testdg-cluster", "ldcSystemPassword", "guest-password");
-//
-//    char[] password1 = aliasService.getPasswordFromAliasForCluster( "testdg-cluster", "ldcSystemPassword");
-//    //System.err.println("SETUP password 10: " + ((password1 == null) ? "NULL" : new String(password1)));
-//
-//    descriptor = new File( topoDir, "testdg-cluster.xml" );
-//    stream = new FileOutputStream( descriptor );
-//    createTopology(ldapPort).toStream( stream );
-//    stream.close();
-//
-//    try {
-//      Thread.sleep(5000);
-//    } catch (Exception e) {
-//
-//    }
-//  }
-//
-//  private static XMLTag createTopology(int ldapPort) {
-//    XMLTag xml = XMLDoc.newDocument( true )
-//        .addRoot( "topology" )
-//        .addTag( "gateway" )
-//
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "authentication" )
-//        .addTag( "name" ).addText( "ShiroProvider" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm" )
-//        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapGroupContextFactory" )
-//        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory" )
-//        .addTag( "value" ).addText( "$ldapGroupContextFactory" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
-//        .addTag( "value" ).addText( "simple" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
-//        .addTag( "value" ).addText( "ldap://localhost:"  + ldapPort)
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
-//        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
-//
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.searchBase" )
-//        .addTag( "value" ).addText( "dc=hadoop,dc=apache,dc=org" )
-//
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.userSearchAttributeName" )
-//        .addTag( "value" ).addText( "uid" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.userObjectClass" )
-//        .addTag( "value" ).addText( "person" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.userSearchBase" )
-//        .addTag( "value" ).addText( "dc=hadoop,dc=apache,dc=org" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.groupSearchBase" )
-//        .addTag( "value" ).addText( "ou=groups,dc=hadoop,dc=apache,dc=org" )
-//
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.authorizationEnabled" )
-//        .addTag( "value" ).addText( "true" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemAuthenticationMechanism" )
-//        .addTag( "value" ).addText( "simple" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.groupObjectClass" )
-//        .addTag( "value" ).addText( "groupofurls" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.memberAttribute" )
-//        .addTag( "value" ).addText( "memberurl" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.memberAttributeValueTemplate" )
-//        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemUsername" )
-//        .addTag( "value" ).addText( "uid=guest,ou=people,dc=hadoop,dc=apache,dc=org" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.clusterName" )
-//        .addTag( "value" ).addText( "testdg-cluster" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemPassword" )
-//        .addTag( "value" ).addText( "S{ALIAS=ldcSystemPassword}" )
-//        // .addTag( "value" ).addText( "guest-password" )
-//        .gotoParent().addTag( "param" )
-//        .addTag( "name" ).addText( "urls./**" )
-//        .addTag( "value" ).addText( "authcBasic" )
-//
-//        .gotoParent().gotoParent().addTag( "provider" )
-//        .addTag( "role" ).addText( "authorization" )
-//        .addTag( "name" ).addText( "AclsAuthz" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "test-service-role.acl" )
-//        .addTag( "value" ).addText( "*;directors;*" )
-//
-//        .gotoParent().gotoParent().addTag( "provider" )
-//        .addTag( "role" ).addText( "identity-assertion" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "name" ).addText( "Default" ).gotoParent()
-//
-//        .gotoRoot()
-//        .addTag( "service" )
-//        .addTag( "role" ).addText( "test-service-role" )
-//        .gotoRoot();
-//         // System.out.println( "GATEWAY=" + xml.toString() );
-//    return xml;
-//  }
-//
-//  private static int findFreePort() throws IOException {
-//    ServerSocket socket = new ServerSocket(0);
-//    int port = socket.getLocalPort();
-//    socket.close();
-//    return port;
-//  }
-//
-//  public static InputStream getResourceStream( String resource ) throws IOException {
-//    return getResourceUrl( resource ).openStream();
-//  }
-//
-//  public static URL getResourceUrl( String resource ) {
-//    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
-//    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
-//    return url;
-//  }
-//
-//  public static String getResourceName( String resource ) {
-//    return getResourceBaseName() + resource;
-//  }
-//
-//  public static String getResourceBaseName() {
-//    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
-//  }
-//
-//  @Ignore
-//  // @Test
-//  public void waitForManualTesting() throws IOException {
-//    System.in.read();
-//  }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testGroupMember() throws ClassNotFoundException, Exception {
-//    LOG_ENTER();
-//    String username = "joe";
-//    String password = "joe-password";
-//    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
-//    given()
-//        //.log().all()
-//        .auth().preemptive().basic( username, password )
-//        .expect()
-//        //.log().all()
-//        .statusCode( HttpStatus.SC_OK )
-//        .contentType( "text/plain" )
-//        .body( is( "test-service-response" ) )
-//        .when().get( serviceUrl );
-//    LOG_EXIT();
-//  }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testNonGroupMember() throws ClassNotFoundException {
-//    LOG_ENTER();
-//    String username = "guest";
-//    String password = "guest-password";
-//    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
-//    given()
-//        //.log().all()
-//        .auth().preemptive().basic( username, password )
-//        .expect()
-//        //.log().all()
-//        .statusCode( HttpStatus.SC_FORBIDDEN )
-//        .when().get( serviceUrl );
-//    LOG_EXIT();
-//  }
+  private static final long SHORT_TIMEOUT = 1000L;
+  private static final long MEDIUM_TIMEOUT = 10 * SHORT_TIMEOUT;
+
+  private static Class RESOURCE_BASE_CLASS = Knox242FuncTest.class;
+  private static Logger LOG = LoggerFactory.getLogger( Knox242FuncTest.class );
+
+  public static Enumeration<Appender> appenders;
+  public static GatewayConfig config;
+  public static GatewayServer gateway;
+  public static String gatewayUrl;
+  public static String clusterUrl;
+  public static SimpleLdapDirectoryServer ldap;
+  public static TcpTransport ldapTransport;
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    LOG_ENTER();
+    //appenders = NoOpAppender.setUp();
+    int port = setupLdap();
+    setupGateway(port);
+    LOG_EXIT();
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    LOG_ENTER();
+    gateway.stop();
+    ldap.stop( true );
+    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
+    //NoOpAppender.tearDown( appenders );
+    LOG_EXIT();
+  }
+
+  public static int setupLdap() throws Exception {
+    URL usersUrl = getResourceUrl( "users.ldif" );
+    int port = findFreePort();
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
+    ldap.start();
+    LOG.info( "LDAP port = " + ldapTransport.getPort() );
+    return port;
+  }
+
+  public static void setupGateway(int ldapPort) throws IOException, Exception {
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+    gatewayDir.mkdirs();
+
+    GatewayTestConfig testConfig = new GatewayTestConfig();
+    config = testConfig;
+    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File topoDir = new File( testConfig.getGatewayTopologyDir() );
+    topoDir.mkdirs();
+
+    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+    File descriptor = new File( topoDir, "testdg-cluster.xml" );
+    FileOutputStream stream = new FileOutputStream( descriptor );
+    createTopology(ldapPort).toStream( stream );
+    stream.close();
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put( "persist-master", "false" );
+    options.put( "master", "password" );
+    try {
+      srvcs.init( testConfig, options );
+    } catch ( ServiceLifecycleException e ) {
+      e.printStackTrace(); // I18N not required.
+    }
+
+    gateway = GatewayServer.startGateway( testConfig, srvcs );
+    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
+
+    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
+
+    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
+    clusterUrl = gatewayUrl + "/testdg-cluster";
+
+    GatewayServices services = GatewayServer.getGatewayServices();
+    AliasService aliasService = (AliasService)services.getService(GatewayServices.ALIAS_SERVICE);
+    aliasService.addAliasForCluster("testdg-cluster", "ldcSystemPassword", "guest-password");
+
+    char[] password1 = aliasService.getPasswordFromAliasForCluster( "testdg-cluster", "ldcSystemPassword");
+    //System.err.println("SETUP password 10: " + ((password1 == null) ? "NULL" : new String(password1)));
+
+    descriptor = new File( topoDir, "testdg-cluster.xml" );
+    stream = new FileOutputStream( descriptor );
+    createTopology(ldapPort).toStream( stream );
+    stream.close();
+
+    try {
+      Thread.sleep(5000);
+    } catch (Exception e) {
+
+    }
+  }
+
+  private static XMLTag createTopology(int ldapPort) {
+    XMLTag xml = XMLDoc.newDocument( true )
+        .addRoot( "topology" )
+        .addTag( "gateway" )
+
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "authentication" )
+        .addTag( "name" ).addText( "ShiroProvider" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm" )
+        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapGroupContextFactory" )
+        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory" )
+        .addTag( "value" ).addText( "$ldapGroupContextFactory" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
+        .addTag( "value" ).addText( "simple" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
+        .addTag( "value" ).addText( "ldap://localhost:"  + ldapPort)
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
+        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
+
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.searchBase" )
+        .addTag( "value" ).addText( "dc=hadoop,dc=apache,dc=org" )
+
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.userSearchAttributeName" )
+        .addTag( "value" ).addText( "uid" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.userObjectClass" )
+        .addTag( "value" ).addText( "person" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.userSearchBase" )
+        .addTag( "value" ).addText( "dc=hadoop,dc=apache,dc=org" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.groupSearchBase" )
+        .addTag( "value" ).addText( "ou=groups,dc=hadoop,dc=apache,dc=org" )
+
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.authorizationEnabled" )
+        .addTag( "value" ).addText( "true" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemAuthenticationMechanism" )
+        .addTag( "value" ).addText( "simple" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.groupObjectClass" )
+        .addTag( "value" ).addText( "groupofurls" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.memberAttribute" )
+        .addTag( "value" ).addText( "memberurl" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.memberAttributeValueTemplate" )
+        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemUsername" )
+        .addTag( "value" ).addText( "uid=guest,ou=people,dc=hadoop,dc=apache,dc=org" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.clusterName" )
+        .addTag( "value" ).addText( "testdg-cluster" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemPassword" )
+        .addTag( "value" ).addText( "S{ALIAS=ldcSystemPassword}" )
+        // .addTag( "value" ).addText( "guest-password" )
+        .gotoParent().addTag( "param" )
+        .addTag( "name" ).addText( "urls./**" )
+        .addTag( "value" ).addText( "authcBasic" )
+
+        .gotoParent().gotoParent().addTag( "provider" )
+        .addTag( "role" ).addText( "authorization" )
+        .addTag( "name" ).addText( "AclsAuthz" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "param" )
+        .addTag( "name" ).addText( "test-service-role.acl" )
+        .addTag( "value" ).addText( "*;directors;*" )
+
+        .gotoParent().gotoParent().addTag( "provider" )
+        .addTag( "role" ).addText( "identity-assertion" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "name" ).addText( "Default" ).gotoParent()
+
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "test-service-role" )
+        .gotoRoot();
+         // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  private static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+  public static InputStream getResourceStream( String resource ) throws IOException {
+    return getResourceUrl( resource ).openStream();
+  }
+
+  public static URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
+    return url;
+  }
+
+  public static String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public static String getResourceBaseName() {
+    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
+  }
+
+  @Ignore
+  // @Test
+  public void waitForManualTesting() throws IOException {
+    System.in.read();
+  }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testGroupMember() throws ClassNotFoundException, Exception {
+    LOG_ENTER();
+    String username = "joe";
+    String password = "joe-password";
+    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
+    given()
+        //.log().all()
+        .auth().preemptive().basic( username, password )
+        .expect()
+        //.log().all()
+        .statusCode( HttpStatus.SC_OK )
+        .contentType( "text/plain" )
+        .body( is( "test-service-response" ) )
+        .when().get( serviceUrl );
+    LOG_EXIT();
+  }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testNonGroupMember() throws ClassNotFoundException {
+    LOG_ENTER();
+    String username = "guest";
+    String password = "guest-password";
+    String serviceUrl =  clusterUrl + "/test-service-path/test-service-resource";
+    given()
+        //.log().all()
+        .auth().preemptive().basic( username, password )
+        .expect()
+        //.log().all()
+        .statusCode( HttpStatus.SC_FORBIDDEN )
+        .when().get( serviceUrl );
+    LOG_EXIT();
+  }
   
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestNegative.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestNegative.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestNegative.java
index 10ab41d..a79e613 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestNegative.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestNegative.java
@@ -53,284 +53,284 @@ import static org.junit.Assert.assertThat;
 
 public class KnoxCliLdapFuncTestNegative {
 
-//  private static final long SHORT_TIMEOUT = 1000L;
-//
-//  private static Class RESOURCE_BASE_CLASS = KnoxCliLdapFuncTestPositive.class;
-//  private static Logger LOG = LoggerFactory.getLogger( KnoxCliLdapFuncTestPositive.class );
-//
-//  public static Enumeration<Appender> appenders;
-//  public static GatewayTestConfig config;
-//  public static GatewayServer gateway;
-//  public static String gatewayUrl;
-//  public static String clusterUrl;
-//  public static SimpleLdapDirectoryServer ldap;
-//  public static TcpTransport ldapTransport;
-//
-//  private static final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
-//  private static final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
-//  private static final String uuid = UUID.randomUUID().toString();
-//
-//  @BeforeClass
-//  public static void setupSuite() throws Exception {
-//    LOG_ENTER();
-//    System.setOut(new PrintStream(outContent));
-//    System.setErr(new PrintStream(errContent));
-//    setupLdap();
-//    setupGateway();
-//    LOG_EXIT();
-//  }
-//
-//  @AfterClass
-//  public static void cleanupSuite() throws Exception {
-//    LOG_ENTER();
-//    ldap.stop( true );
-//
-//    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
-//    //NoOpAppender.tearDown( appenders );
-//    LOG_EXIT();
-//  }
-//
-//  public static void setupLdap( ) throws Exception {
-//    URL usersUrl = getResourceUrl( "users.ldif" );
-//    int port = findFreePort();
-//    ldapTransport = new TcpTransport( port );
-//    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
-//    ldap.start();
-//    LOG.info( "LDAP port = " + ldapTransport.getPort() );
-//  }
-//
-//  public static void setupGateway() throws Exception {
-//
-//    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
-//    File gatewayDir = new File( targetDir, "gateway-home-" + uuid );
-//    gatewayDir.mkdirs();
-//
-//    GatewayTestConfig testConfig = new GatewayTestConfig();
-//    config = testConfig;
-//    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
-//
-//    File topoDir = new File( testConfig.getGatewayTopologyDir() );
-//    topoDir.mkdirs();
-//
-//    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
-//    deployDir.mkdirs();
-//
-//    createTopology(topoDir, "test-cluster.xml", true);
-//    createTopology(topoDir, "bad-cluster.xml", false);
-//
-//    DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//    Map<String,String> options = new HashMap<String,String>();
-//    options.put( "persist-master", "false" );
-//    options.put( "master", "password" );
-//    try {
-//      srvcs.init( testConfig, options );
-//    } catch ( ServiceLifecycleException e ) {
-//      e.printStackTrace(); // I18N not required.
-//    }
-//  }
-//
-//  private static void createTopology(File topoDir, String name, boolean goodTopology) throws Exception {
-//    File descriptor = new File(topoDir, name);
-//
-//    if(descriptor.exists()){
-//      descriptor.delete();
-//      descriptor = new File(topoDir, name);
-//    }
-//
-//    FileOutputStream stream = new FileOutputStream( descriptor, false );
-//    if(goodTopology){
-//      createTopology().toStream( stream );
-//    } else {
-//      createBadTopology().toStream( stream );
-//    }
-//    stream.close();
-//
-//  }
-//
-//  private static int findFreePort() throws IOException {
-//    ServerSocket socket = new ServerSocket(0);
-//    int port = socket.getLocalPort();
-//    socket.close();
-//    return port;
-//  }
-//
-//  public static InputStream getResourceStream( String resource ) throws IOException {
-//    return getResourceUrl( resource ).openStream();
-//  }
-//
-//  public static URL getResourceUrl( String resource ) {
-//    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
-//    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
-//    return url;
-//  }
-//
-//  public static String getResourceName( String resource ) {
-//    return getResourceBaseName() + resource;
-//  }
-//
-//  public static String getResourceBaseName() {
-//    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
-//  }
-//
-//  private static XMLTag createBadTopology(){
-//    XMLTag xml = XMLDoc.newDocument(true)
-//        .addRoot("topology")
-//        .addTag("gateway")
-//        .addTag( "provider" )
-//        .addTag("role").addText("authentication")
-//        .addTag( "name" ).addText( "ShiroProvider" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag("param")
-//        .addTag( "name" ).addText("main.ldapRealm")
-//        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
-//        .addTag("param")
-//        .addTag( "name" ).addText("main.ldapRealm.userDnTemplate")
-//        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param")
-//        .addTag( "name" ).addText("main.ldapRealm.contextFactory.url")
-//        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.systemUsername")
-//        .addTag("value").addText("uid=guest,ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.systemPassword")
-//        .addTag( "value" ).addText("guest-password").gotoParent()
-//        .addTag("param")
-//        .addTag( "name" ).addText("main.ldapRealm.contextFactory.authenticationMechanism")
-//        .addTag("value").addText("simple").gotoParent()
-//        .addTag("param")
-//        .addTag( "name" ).addText("urls./**")
-//        .addTag("value").addText("authcBasic").gotoParent().gotoParent()
-//        .addTag("provider")
-//        .addTag( "role" ).addText("identity-assertion")
-//        .addTag("enabled").addText("true")
-//        .addTag("name").addText("Default").gotoParent()
-//        .addTag("provider")
-//        .gotoRoot()
-//        .addTag( "service" )
-//        .addTag( "role" ).addText( "KNOX" )
-//        .gotoRoot();
-//    // System.out.println( "GATEWAY=" + xml.toString() );
-//    return xml;
-//  }
-//
-//  private static XMLTag createTopology() {
-//
-//    XMLTag xml = XMLDoc.newDocument(true)
-//        .addRoot("topology")
-//        .addTag("gateway" )
-//        .addTag("provider")
-//        .addTag("role").addText("authentication")
-//        .addTag("name").addText("ShiroProvider")
-//        .addTag("enabled").addText("true")
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm")
-//        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
-//        .addTag("param" )
-//        .addTag("name").addText("main.ldapGroupContextFactory")
-//        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.searchBase")
-//        .addTag("value").addText("ou=groups,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.groupObjectClass")
-//        .addTag("value").addText("groupOfNames").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.memberAttributeValueTemplate")
-//        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param" )
-//        .addTag("name").addText("main.ldapRealm.memberAttribute")
-//        .addTag("value").addText("member").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
-//        .addTag("value").addText("true").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.systemUsername")
-//        .addTag("value").addText("uid=guest,ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.systemPassword")
-//        .addTag( "value" ).addText("guest-password").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.userDnTemplate")
-//        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.url")
-//        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
-//        .addTag("value").addText("simple").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.cachingEnabled")
-//        .addTag("value").addText("false").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("com.sun.jndi.ldap.connect.pool")
-//        .addTag("value").addText("false").gotoParent()
-//        .addTag("param")
-//        .addTag("name" ).addText("urls./**")
-//        .addTag("value" ).addText("authcBasic").gotoParent().gotoParent()
-//        .addTag("provider" )
-//        .addTag("role").addText( "identity-assertion" )
-//        .addTag( "enabled").addText( "true" )
-//        .addTag("name").addText( "Default" ).gotoParent()
-//        .gotoRoot()
-//        .addTag( "service" )
-//        .addTag( "role" ).addText( "test-service-role" )
-//        .gotoRoot();
-//    // System.out.println( "GATEWAY=" + xml.toString() );
-//    return xml;
-//  }
-//
-//  @Test( timeout = SHORT_TIMEOUT )
-//  public void testBadTopology() throws Exception {
-//    LOG_ENTER();
-//
-//    //    Test 4: Authenticate a user with a bad topology configured with nothing required for group lookup in the topology
-//    outContent.reset();
-//    String username = "tom";
-//    String password = "tom-password";
-//    KnoxCLI cli = new KnoxCLI();
-//    cli.setConf(config);
-//
-//    String args1[] = {"user-auth-test", "--master", "knox", "--cluster", "bad-cluster",
-//        "--u", username, "--p", password, "--g" };
-//    cli.run( args1 );
-//
-//    assertThat(outContent.toString(), containsString("LDAP authentication successful"));
-//    assertThat(outContent.toString(), containsString("Your topology file may be incorrectly configured for group lookup"));
-//    assertThat(outContent.toString(), containsString("Warn: "));
-//    assertFalse(outContent.toString().contains("analyst"));
-//
-//
-//    outContent.reset();
-//    username = "bad-name";
-//    password = "bad-password";
-//    cli = new KnoxCLI();
-//    cli.setConf( config );
-//
-//    String args2[] = {"user-auth-test", "--master", "knox", "--cluster", "bad-cluster",
-//        "--u", username, "--p", password, "--g" };
-//    cli.run( args2 );
-//
-//    assertThat(outContent.toString(), containsString("LDAP authentication failed"));
-//    assertThat(outContent.toString(), containsString("INVALID_CREDENTIALS"));
-//
-//    outContent.reset();
-//    username = "sam";
-//    password = "sam-password";
-//    cli = new KnoxCLI();
-//    cli.setConf( config );
-//
-//    String args3[] = {"user-auth-test", "--master", "knox", "--cluster", "bad-cluster",
-//        "--u", username, "--p", password, "--g" };
-//    cli.run( args3 );
-//
-//    assertThat(outContent.toString(), containsString("LDAP authentication successful"));
-//    assertThat(outContent.toString(), containsString("Your topology file may be incorrectly configured for group lookup"));
-//    assertThat(outContent.toString(), containsString("Warn:"));
-//    assertFalse(outContent.toString().contains("analyst"));
-//    assertFalse(outContent.toString().contains("scientist"));
-//
-//    LOG_EXIT();
-//  }
+  private static final long SHORT_TIMEOUT = 1000L;
+
+  private static Class RESOURCE_BASE_CLASS = KnoxCliLdapFuncTestPositive.class;
+  private static Logger LOG = LoggerFactory.getLogger( KnoxCliLdapFuncTestPositive.class );
+
+  public static Enumeration<Appender> appenders;
+  public static GatewayTestConfig config;
+  public static GatewayServer gateway;
+  public static String gatewayUrl;
+  public static String clusterUrl;
+  public static SimpleLdapDirectoryServer ldap;
+  public static TcpTransport ldapTransport;
+
+  private static final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
+  private static final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
+  private static final String uuid = UUID.randomUUID().toString();
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    LOG_ENTER();
+    System.setOut(new PrintStream(outContent));
+    System.setErr(new PrintStream(errContent));
+    setupLdap();
+    setupGateway();
+    LOG_EXIT();
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    LOG_ENTER();
+    ldap.stop( true );
+
+    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
+    //NoOpAppender.tearDown( appenders );
+    LOG_EXIT();
+  }
+
+  public static void setupLdap( ) throws Exception {
+    URL usersUrl = getResourceUrl( "users.ldif" );
+    int port = findFreePort();
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
+    ldap.start();
+    LOG.info( "LDAP port = " + ldapTransport.getPort() );
+  }
+
+  public static void setupGateway() throws Exception {
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + uuid );
+    gatewayDir.mkdirs();
+
+    GatewayTestConfig testConfig = new GatewayTestConfig();
+    config = testConfig;
+    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File topoDir = new File( testConfig.getGatewayTopologyDir() );
+    topoDir.mkdirs();
+
+    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+    createTopology(topoDir, "test-cluster.xml", true);
+    createTopology(topoDir, "bad-cluster.xml", false);
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put( "persist-master", "false" );
+    options.put( "master", "password" );
+    try {
+      srvcs.init( testConfig, options );
+    } catch ( ServiceLifecycleException e ) {
+      e.printStackTrace(); // I18N not required.
+    }
+  }
+
+  private static void createTopology(File topoDir, String name, boolean goodTopology) throws Exception {
+    File descriptor = new File(topoDir, name);
+
+    if(descriptor.exists()){
+      descriptor.delete();
+      descriptor = new File(topoDir, name);
+    }
+
+    FileOutputStream stream = new FileOutputStream( descriptor, false );
+    if(goodTopology){
+      createTopology().toStream( stream );
+    } else {
+      createBadTopology().toStream( stream );
+    }
+    stream.close();
+
+  }
+
+  private static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+  public static InputStream getResourceStream( String resource ) throws IOException {
+    return getResourceUrl( resource ).openStream();
+  }
+
+  public static URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
+    return url;
+  }
+
+  public static String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public static String getResourceBaseName() {
+    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
+  }
+
+  private static XMLTag createBadTopology(){
+    XMLTag xml = XMLDoc.newDocument(true)
+        .addRoot("topology")
+        .addTag("gateway")
+        .addTag( "provider" )
+        .addTag("role").addText("authentication")
+        .addTag( "name" ).addText( "ShiroProvider" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag("param")
+        .addTag( "name" ).addText("main.ldapRealm")
+        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+        .addTag("param")
+        .addTag( "name" ).addText("main.ldapRealm.userDnTemplate")
+        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag( "name" ).addText("main.ldapRealm.contextFactory.url")
+        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.systemUsername")
+        .addTag("value").addText("uid=guest,ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.systemPassword")
+        .addTag( "value" ).addText("guest-password").gotoParent()
+        .addTag("param")
+        .addTag( "name" ).addText("main.ldapRealm.contextFactory.authenticationMechanism")
+        .addTag("value").addText("simple").gotoParent()
+        .addTag("param")
+        .addTag( "name" ).addText("urls./**")
+        .addTag("value").addText("authcBasic").gotoParent().gotoParent()
+        .addTag("provider")
+        .addTag( "role" ).addText("identity-assertion")
+        .addTag("enabled").addText("true")
+        .addTag("name").addText("Default").gotoParent()
+        .addTag("provider")
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "KNOX" )
+        .gotoRoot();
+    // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  private static XMLTag createTopology() {
+
+    XMLTag xml = XMLDoc.newDocument(true)
+        .addRoot("topology")
+        .addTag("gateway" )
+        .addTag("provider")
+        .addTag("role").addText("authentication")
+        .addTag("name").addText("ShiroProvider")
+        .addTag("enabled").addText("true")
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm")
+        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+        .addTag("param" )
+        .addTag("name").addText("main.ldapGroupContextFactory")
+        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.searchBase")
+        .addTag("value").addText("ou=groups,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.groupObjectClass")
+        .addTag("value").addText("groupOfNames").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.memberAttributeValueTemplate")
+        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param" )
+        .addTag("name").addText("main.ldapRealm.memberAttribute")
+        .addTag("value").addText("member").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
+        .addTag("value").addText("true").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.systemUsername")
+        .addTag("value").addText("uid=guest,ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.systemPassword")
+        .addTag( "value" ).addText("guest-password").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.userDnTemplate")
+        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.url")
+        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
+        .addTag("value").addText("simple").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.cachingEnabled")
+        .addTag("value").addText("false").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("com.sun.jndi.ldap.connect.pool")
+        .addTag("value").addText("false").gotoParent()
+        .addTag("param")
+        .addTag("name" ).addText("urls./**")
+        .addTag("value" ).addText("authcBasic").gotoParent().gotoParent()
+        .addTag("provider" )
+        .addTag("role").addText( "identity-assertion" )
+        .addTag( "enabled").addText( "true" )
+        .addTag("name").addText( "Default" ).gotoParent()
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "test-service-role" )
+        .gotoRoot();
+    // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  @Test( timeout = SHORT_TIMEOUT )
+  public void testBadTopology() throws Exception {
+    LOG_ENTER();
+
+    //    Test 4: Authenticate a user with a bad topology configured with nothing required for group lookup in the topology
+    outContent.reset();
+    String username = "tom";
+    String password = "tom-password";
+    KnoxCLI cli = new KnoxCLI();
+    cli.setConf(config);
+
+    String args1[] = {"user-auth-test", "--master", "knox", "--cluster", "bad-cluster",
+        "--u", username, "--p", password, "--g" };
+    cli.run( args1 );
+
+    assertThat(outContent.toString(), containsString("LDAP authentication successful"));
+    assertThat(outContent.toString(), containsString("Your topology file may be incorrectly configured for group lookup"));
+    assertThat(outContent.toString(), containsString("Warn: "));
+    assertFalse(outContent.toString().contains("analyst"));
+
+
+    outContent.reset();
+    username = "bad-name";
+    password = "bad-password";
+    cli = new KnoxCLI();
+    cli.setConf( config );
+
+    String args2[] = {"user-auth-test", "--master", "knox", "--cluster", "bad-cluster",
+        "--u", username, "--p", password, "--g" };
+    cli.run( args2 );
+
+    assertThat(outContent.toString(), containsString("LDAP authentication failed"));
+    assertThat(outContent.toString(), containsString("INVALID_CREDENTIALS"));
+
+    outContent.reset();
+    username = "sam";
+    password = "sam-password";
+    cli = new KnoxCLI();
+    cli.setConf( config );
+
+    String args3[] = {"user-auth-test", "--master", "knox", "--cluster", "bad-cluster",
+        "--u", username, "--p", password, "--g" };
+    cli.run( args3 );
+
+    assertThat(outContent.toString(), containsString("LDAP authentication successful"));
+    assertThat(outContent.toString(), containsString("Your topology file may be incorrectly configured for group lookup"));
+    assertThat(outContent.toString(), containsString("Warn:"));
+    assertFalse(outContent.toString().contains("analyst"));
+    assertFalse(outContent.toString().contains("scientist"));
+
+    LOG_EXIT();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestPositive.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestPositive.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestPositive.java
index 54da5f5..fb08531 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestPositive.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestPositive.java
@@ -53,288 +53,288 @@ import static org.junit.Assert.assertThat;
 
 public class KnoxCliLdapFuncTestPositive {
 
-//  private static final long SHORT_TIMEOUT = 1000L;
-//
-//  private static Class RESOURCE_BASE_CLASS = KnoxCliLdapFuncTestPositive.class;
-//  private static Logger LOG = LoggerFactory.getLogger( KnoxCliLdapFuncTestPositive.class );
-//
-//  public static Enumeration<Appender> appenders;
-//  public static GatewayTestConfig config;
-//  public static GatewayServer gateway;
-//  public static String gatewayUrl;
-//  public static String clusterUrl;
-//  public static SimpleLdapDirectoryServer ldap;
-//  public static TcpTransport ldapTransport;
-//
-//  private static final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
-//  private static final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
-//  private static final String uuid = UUID.randomUUID().toString();
-//
-//  @BeforeClass
-//  public static void setupSuite() throws Exception {
-//    LOG_ENTER();
-//    System.setOut(new PrintStream(outContent));
-//    System.setErr(new PrintStream(errContent));
-//    setupLdap();
-//    setupGateway();
-//    LOG_EXIT();
-//  }
-//
-//  @AfterClass
-//  public static void cleanupSuite() throws Exception {
-//    LOG_ENTER();
-//    ldap.stop( true );
-//
-//    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
-//    //NoOpAppender.tearDown( appenders );
-//    LOG_EXIT();
-//  }
-//
-//  public static void setupLdap( ) throws Exception {
-//    URL usersUrl = getResourceUrl( "users.ldif" );
-//    int port = findFreePort();
-//    ldapTransport = new TcpTransport( port );
-//    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
-//    ldap.start();
-//    LOG.info( "LDAP port = " + ldapTransport.getPort() );
-//  }
-//
-//  public static void setupGateway() throws Exception {
-//
-//    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
-//    File gatewayDir = new File( targetDir, "gateway-home-" + uuid );
-//    gatewayDir.mkdirs();
-//
-//    GatewayTestConfig testConfig = new GatewayTestConfig();
-//    config = testConfig;
-//    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
-//
-//    File topoDir = new File( testConfig.getGatewayTopologyDir() );
-//    topoDir.mkdirs();
-//
-//    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
-//    deployDir.mkdirs();
-//
-//    createTopology(topoDir, "test-cluster.xml", true);
-//    createTopology(topoDir, "bad-cluster.xml", false);
-//
-//    DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//    Map<String,String> options = new HashMap<String,String>();
-//    options.put( "persist-master", "false" );
-//    options.put( "master", "password" );
-//    try {
-//      srvcs.init( testConfig, options );
-//    } catch ( ServiceLifecycleException e ) {
-//      e.printStackTrace(); // I18N not required.
-//    }
-//  }
-//
-//  private static void createTopology(File topoDir, String name, boolean goodTopology) throws Exception {
-//    File descriptor = new File(topoDir, name);
-//
-//    if(descriptor.exists()){
-//      descriptor.delete();
-//      descriptor = new File(topoDir, name);
-//    }
-//
-//    FileOutputStream stream = new FileOutputStream( descriptor, false );
-//    if(goodTopology){
-//      createTopology().toStream( stream );
-//    } else {
-//      createBadTopology().toStream( stream );
-//    }
-//    stream.close();
-//
-//  }
-//
-//  private static int findFreePort() throws IOException {
-//    ServerSocket socket = new ServerSocket(0);
-//    int port = socket.getLocalPort();
-//    socket.close();
-//    return port;
-//  }
-//
-//  public static InputStream getResourceStream( String resource ) throws IOException {
-//    return getResourceUrl( resource ).openStream();
-//  }
-//
-//  public static URL getResourceUrl( String resource ) {
-//    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
-//    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
-//    return url;
-//  }
-//
-//  public static String getResourceName( String resource ) {
-//    return getResourceBaseName() + resource;
-//  }
-//
-//  public static String getResourceBaseName() {
-//    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
-//  }
-//
-//  private static XMLTag createBadTopology(){
-//    XMLTag xml = XMLDoc.newDocument(true)
-//        .addRoot("topology")
-//        .addTag( "gateway" )
-//        .addTag("provider")
-//        .addTag("role").addText("authentication")
-//        .addTag("name").addText("ShiroProvider")
-//        .addTag("enabled").addText("true")
-//        .addTag( "param" )
-//        .addTag("name").addText("main.ldapRealm")
-//        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
-//        .addTag( "param" )
-//        .addTag("name").addText("main.ldapRealm.userDnTemplate")
-//        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag( "param" )
-//        .addTag("name").addText("main.ldapRealm.contextFactory.url")
-//        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
-//        .addTag( "param" )
-//        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
-//        .addTag("value").addText("simple").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
-//        .addTag("value").addText("true").gotoParent()
-//        .addTag("param")
-//        .addTag( "name").addText( "urls./**")
-//        .addTag("value").addText( "authcBasic" ).gotoParent().gotoParent()
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "identity-assertion" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "name" ).addText( "Default" ).gotoParent()
-//        .gotoRoot()
-//        .addTag( "service")
-//        .addTag("role").addText( "KNOX" )
-//        .gotoRoot();
-//    // System.out.println( "GATEWAY=" + xml.toString() );
-//    return xml;
-//  }
-//
-//  private static XMLTag createTopology() {
-//
-//    XMLTag xml = XMLDoc.newDocument(true)
-//        .addRoot("topology")
-//        .addTag("gateway")
-//        .addTag("provider")
-//        .addTag("role").addText("authentication")
-//        .addTag("name").addText("ShiroProvider")
-//        .addTag("enabled").addText("true")
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm")
-//        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
-//        .addTag("param" )
-//        .addTag("name").addText("main.ldapGroupContextFactory")
-//        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.searchBase")
-//        .addTag("value").addText("ou=groups,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.groupObjectClass")
-//        .addTag("value").addText("groupOfNames").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.memberAttributeValueTemplate")
-//        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param" )
-//        .addTag("name").addText("main.ldapRealm.memberAttribute")
-//        .addTag("value").addText("member").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
-//        .addTag("value").addText("true").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.systemUsername")
-//        .addTag("value").addText("uid=guest,ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.systemPassword")
-//        .addTag( "value" ).addText("guest-password").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.userDnTemplate")
-//        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.url")
-//        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
-//        .addTag("param")
-//        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
-//        .addTag("value").addText("simple").gotoParent()
-//        .addTag("param")
-//        .addTag("name" ).addText("urls./**")
-//        .addTag("value" ).addText("authcBasic").gotoParent().gotoParent()
-//        .addTag("provider" )
-//        .addTag("role").addText( "identity-assertion" )
-//        .addTag( "enabled").addText( "true" )
-//        .addTag("name").addText( "Default" ).gotoParent()
-//        .gotoRoot()
-//        .addTag( "service" )
-//        .addTag( "role" ).addText( "test-service-role" )
-//        .gotoRoot();
-//    // System.out.println( "GATEWAY=" + xml.toString() );
-//    return xml;
-//  }
-//
-//  @Test( timeout = SHORT_TIMEOUT )
-//  public void testLDAPAuth() throws Exception {
-//    LOG_ENTER();
-//
-////    Test 1: Make sure authenication is successful and return groups
-//    outContent.reset();
-//    String username = "sam";
-//    String password = "sam-password";
-//    String args[] = {"user-auth-test", "--master", "knox", "--cluster", "test-cluster", "--u", username, "--p", password,
-//        "--g"};
-//    KnoxCLI cli = new KnoxCLI();
-//    cli.setConf(config);
-//    cli.run(args);
-//    assertThat(outContent.toString(), containsString("success"));
-//    assertThat(outContent.toString(), containsString("analyst"));
-//    assertThat(outContent.toString(), containsString("scientist"));
-//
-////    Test 2: Give an invalid name and password combinatinon.
-//    outContent.reset();
-//    cli = new KnoxCLI();
-//    cli.setConf(config);
-//    username = "bad-name";
-//    password = "bad-password";
-//    String args2[] = {"user-auth-test", "--master", "knox", "--cluster", "test-cluster", "--u", username, "--p", password};
-//    cli.run(args2);
-//    assertThat(outContent.toString(), containsString("LDAP authentication failed"));
-//
-////    Test 3: Authenticate a user who belongs to no groups, but specify groups with --g
-//    outContent.reset();
-//    cli = new KnoxCLI();
-//    cli.setConf(config);
-//    username = "guest";
-//    password = "guest-password";
-//    String args3[] = {"user-auth-test", "--master", "knox", "--cluster", "test-cluster",
-//        "--u", username, "--p", password, "--g" };
-//    cli.run(args3);
-//    assertThat(outContent.toString(), containsString("LDAP authentication success"));
-//    assertThat(outContent.toString(), containsString("does not belong to any groups"));
-//
-//    //    Test 4: Pass a non-existent topology
-//    outContent.reset();
-//    cli = new KnoxCLI();
-//    cli.setConf(config);
-//    username = "guest";
-//    password = "guest-password";
-//    String args4[] = {"user-auth-test", "--master", "knox", "--cluster", "cluster-dne",
-//        "--u", username, "--p", password };
-//    cli.run(args4);
-//    assertThat(outContent.toString(), containsString("Topology cluster-dne does not exist"));
-//
-//
-//    //    Test 5: Authenticate a user who belongs to no groups, but specify groups with --g
-//    outContent.reset();
-//    cli = new KnoxCLI();
-//    cli.setConf(config);
-//    username = "guest";
-//    password = "guest-password";
-//    String args5[] = {"user-auth-test", "--master", "knox", "--cluster", "test-cluster",
-//        "--u", username, "--p", password };
-//    cli.run( args5 );
-//    assertThat(outContent.toString(), containsString("LDAP authentication success"));
-//    assertThat(outContent.toString(), not(containsString("does not belong to any groups")));
-//
-//    LOG_EXIT();
-//  }
+  private static final long SHORT_TIMEOUT = 1000L;
+
+  private static Class RESOURCE_BASE_CLASS = KnoxCliLdapFuncTestPositive.class;
+  private static Logger LOG = LoggerFactory.getLogger( KnoxCliLdapFuncTestPositive.class );
+
+  public static Enumeration<Appender> appenders;
+  public static GatewayTestConfig config;
+  public static GatewayServer gateway;
+  public static String gatewayUrl;
+  public static String clusterUrl;
+  public static SimpleLdapDirectoryServer ldap;
+  public static TcpTransport ldapTransport;
+
+  private static final ByteArrayOutputStream outContent = new ByteArrayOutputStream();
+  private static final ByteArrayOutputStream errContent = new ByteArrayOutputStream();
+  private static final String uuid = UUID.randomUUID().toString();
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    LOG_ENTER();
+    System.setOut(new PrintStream(outContent));
+    System.setErr(new PrintStream(errContent));
+    setupLdap();
+    setupGateway();
+    LOG_EXIT();
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    LOG_ENTER();
+    ldap.stop( true );
+
+    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
+    //NoOpAppender.tearDown( appenders );
+    LOG_EXIT();
+  }
+
+  public static void setupLdap( ) throws Exception {
+    URL usersUrl = getResourceUrl( "users.ldif" );
+    int port = findFreePort();
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
+    ldap.start();
+    LOG.info( "LDAP port = " + ldapTransport.getPort() );
+  }
+
+  public static void setupGateway() throws Exception {
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + uuid );
+    gatewayDir.mkdirs();
+
+    GatewayTestConfig testConfig = new GatewayTestConfig();
+    config = testConfig;
+    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File topoDir = new File( testConfig.getGatewayTopologyDir() );
+    topoDir.mkdirs();
+
+    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+    createTopology(topoDir, "test-cluster.xml", true);
+    createTopology(topoDir, "bad-cluster.xml", false);
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put( "persist-master", "false" );
+    options.put( "master", "password" );
+    try {
+      srvcs.init( testConfig, options );
+    } catch ( ServiceLifecycleException e ) {
+      e.printStackTrace(); // I18N not required.
+    }
+  }
+
+  private static void createTopology(File topoDir, String name, boolean goodTopology) throws Exception {
+    File descriptor = new File(topoDir, name);
+
+    if(descriptor.exists()){
+      descriptor.delete();
+      descriptor = new File(topoDir, name);
+    }
+
+    FileOutputStream stream = new FileOutputStream( descriptor, false );
+    if(goodTopology){
+      createTopology().toStream( stream );
+    } else {
+      createBadTopology().toStream( stream );
+    }
+    stream.close();
+
+  }
+
+  private static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+  public static InputStream getResourceStream( String resource ) throws IOException {
+    return getResourceUrl( resource ).openStream();
+  }
+
+  public static URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
+    return url;
+  }
+
+  public static String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public static String getResourceBaseName() {
+    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
+  }
+
+  private static XMLTag createBadTopology(){
+    XMLTag xml = XMLDoc.newDocument(true)
+        .addRoot("topology")
+        .addTag( "gateway" )
+        .addTag("provider")
+        .addTag("role").addText("authentication")
+        .addTag("name").addText("ShiroProvider")
+        .addTag("enabled").addText("true")
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm")
+        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.userDnTemplate")
+        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.contextFactory.url")
+        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
+        .addTag("value").addText("simple").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
+        .addTag("value").addText("true").gotoParent()
+        .addTag("param")
+        .addTag( "name").addText( "urls./**")
+        .addTag("value").addText( "authcBasic" ).gotoParent().gotoParent()
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "identity-assertion" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "name" ).addText( "Default" ).gotoParent()
+        .gotoRoot()
+        .addTag( "service")
+        .addTag("role").addText( "KNOX" )
+        .gotoRoot();
+    // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  private static XMLTag createTopology() {
+
+    XMLTag xml = XMLDoc.newDocument(true)
+        .addRoot("topology")
+        .addTag("gateway")
+        .addTag("provider")
+        .addTag("role").addText("authentication")
+        .addTag("name").addText("ShiroProvider")
+        .addTag("enabled").addText("true")
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm")
+        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+        .addTag("param" )
+        .addTag("name").addText("main.ldapGroupContextFactory")
+        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.searchBase")
+        .addTag("value").addText("ou=groups,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.groupObjectClass")
+        .addTag("value").addText("groupOfNames").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.memberAttributeValueTemplate")
+        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param" )
+        .addTag("name").addText("main.ldapRealm.memberAttribute")
+        .addTag("value").addText("member").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
+        .addTag("value").addText("true").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.systemUsername")
+        .addTag("value").addText("uid=guest,ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.systemPassword")
+        .addTag( "value" ).addText("guest-password").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.userDnTemplate")
+        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.url")
+        .addTag("value").addText("ldap://localhost:" + ldapTransport.getPort()).gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
+        .addTag("value").addText("simple").gotoParent()
+        .addTag("param")
+        .addTag("name" ).addText("urls./**")
+        .addTag("value" ).addText("authcBasic").gotoParent().gotoParent()
+        .addTag("provider" )
+        .addTag("role").addText( "identity-assertion" )
+        .addTag( "enabled").addText( "true" )
+        .addTag("name").addText( "Default" ).gotoParent()
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "test-service-role" )
+        .gotoRoot();
+    // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  @Test( timeout = SHORT_TIMEOUT )
+  public void testLDAPAuth() throws Exception {
+    LOG_ENTER();
+
+//    Test 1: Make sure authenication is successful and return groups
+    outContent.reset();
+    String username = "sam";
+    String password = "sam-password";
+    String args[] = {"user-auth-test", "--master", "knox", "--cluster", "test-cluster", "--u", username, "--p", password,
+        "--g"};
+    KnoxCLI cli = new KnoxCLI();
+    cli.setConf(config);
+    cli.run(args);
+    assertThat(outContent.toString(), containsString("success"));
+    assertThat(outContent.toString(), containsString("analyst"));
+    assertThat(outContent.toString(), containsString("scientist"));
+
+//    Test 2: Give an invalid name and password combinatinon.
+    outContent.reset();
+    cli = new KnoxCLI();
+    cli.setConf(config);
+    username = "bad-name";
+    password = "bad-password";
+    String args2[] = {"user-auth-test", "--master", "knox", "--cluster", "test-cluster", "--u", username, "--p", password};
+    cli.run(args2);
+    assertThat(outContent.toString(), containsString("LDAP authentication failed"));
+
+//    Test 3: Authenticate a user who belongs to no groups, but specify groups with --g
+    outContent.reset();
+    cli = new KnoxCLI();
+    cli.setConf(config);
+    username = "guest";
+    password = "guest-password";
+    String args3[] = {"user-auth-test", "--master", "knox", "--cluster", "test-cluster",
+        "--u", username, "--p", password, "--g" };
+    cli.run(args3);
+    assertThat(outContent.toString(), containsString("LDAP authentication success"));
+    assertThat(outContent.toString(), containsString("does not belong to any groups"));
+
+    //    Test 4: Pass a non-existent topology
+    outContent.reset();
+    cli = new KnoxCLI();
+    cli.setConf(config);
+    username = "guest";
+    password = "guest-password";
+    String args4[] = {"user-auth-test", "--master", "knox", "--cluster", "cluster-dne",
+        "--u", username, "--p", password };
+    cli.run(args4);
+    assertThat(outContent.toString(), containsString("Topology cluster-dne does not exist"));
+
+
+    //    Test 5: Authenticate a user who belongs to no groups, but specify groups with --g
+    outContent.reset();
+    cli = new KnoxCLI();
+    cli.setConf(config);
+    username = "guest";
+    password = "guest-password";
+    String args5[] = {"user-auth-test", "--master", "knox", "--cluster", "test-cluster",
+        "--u", username, "--p", password };
+    cli.run( args5 );
+    assertThat(outContent.toString(), containsString("LDAP authentication success"));
+    assertThat(outContent.toString(), not(containsString("does not belong to any groups")));
+
+    LOG_EXIT();
+  }
 
 
 }


[5/5] knox git commit: KNOX-620: Jenkins Knox-master-verify failing since #725 due to JDK version issues. Renabled remainder of func tests.

Posted by km...@apache.org.
KNOX-620: Jenkins Knox-master-verify failing since #725 due to JDK version issues. Renabled remainder of func tests.


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/56cedc0a
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/56cedc0a
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/56cedc0a

Branch: refs/heads/master
Commit: 56cedc0ae038b1447ae125b4b8d1094bd1db5551
Parents: 2bb8d1e
Author: Kevin Minder <ke...@hortonworks.com>
Authored: Mon Nov 9 16:10:36 2015 -0500
Committer: Kevin Minder <ke...@hortonworks.com>
Committed: Mon Nov 9 16:10:36 2015 -0500

----------------------------------------------------------------------
 .../hadoop/gateway/GatewayAdminFuncTest.java    |  336 ++--
 .../gateway/GatewayAdminTopologyFuncTest.java   | 1512 +++++++++---------
 .../hadoop/gateway/GatewayDeployFuncTest.java   |  576 +++----
 .../GatewayLdapDynamicGroupFuncTest.java        |  534 +++----
 .../gateway/GatewayLdapGroupFuncTest.java       |  536 +++----
 .../gateway/GatewayLocalServiceFuncTest.java    |  330 ++--
 .../apache/hadoop/gateway/Knox242FuncTest.java  |  520 +++---
 .../gateway/KnoxCliLdapFuncTestNegative.java    |  558 +++----
 .../gateway/KnoxCliLdapFuncTestPositive.java    |  564 +++----
 .../hadoop/gateway/KnoxCliSysBindTest.java      |  548 +++----
 .../gateway/OozieServiceDefinitionTest.java     |  322 ++--
 .../hadoop/gateway/WebHdfsHaFuncTest.java       |  862 +++++-----
 .../deploy/DeploymentFactoryFuncTest.java       |  918 +++++------
 13 files changed, 4058 insertions(+), 4058 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminFuncTest.java
index 2198de2..91f2162 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminFuncTest.java
@@ -52,173 +52,173 @@ import static org.junit.Assert.assertThat;
 
 public class GatewayAdminFuncTest {
 
-//  private static final long SHORT_TIMEOUT = 1000L;
-//  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
-//  private static final long LONG_TIMEOUT = 5 * MEDIUM_TIMEOUT;
-//
-//  private static Class RESOURCE_BASE_CLASS = GatewayAdminFuncTest.class;
-//  private static Logger LOG = LoggerFactory.getLogger( GatewayAdminFuncTest.class );
-//
-//  //public static Enumeration<Appender> appenders;
-//  public static GatewayConfig config;
-//  public static GatewayServer gateway;
-//  public static String gatewayUrl;
-//  public static String clusterUrl;
-//  public static SimpleLdapDirectoryServer ldap;
-//  public static TcpTransport ldapTransport;
-//
-//  @BeforeClass
-//  public static void setupSuite() throws Exception {
-//    TestUtils.LOG_ENTER();
-//    //appenders = NoOpAppender.setUp();
-//    setupLdap();
-//    setupGateway();
-//    TestUtils.LOG_EXIT();
-//  }
-//
-//  @AfterClass
-//  public static void cleanupSuite() throws Exception {
-//    TestUtils.LOG_ENTER();
-//    gateway.stop();
-//    ldap.stop( true );
-//    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
-//    //NoOpAppender.tearDown( appenders );
-//    TestUtils.LOG_EXIT();
-//  }
-//
-//  public static void setupLdap() throws Exception {
-//    URL usersUrl = getResourceUrl( "users.ldif" );
-//    int port = findFreePort();
-//    ldapTransport = new TcpTransport( port );
-//    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
-//    ldap.start();
-//    LOG.info( "LDAP port = " + ldapTransport.getPort() );
-//  }
-//
-//  public static void setupGateway() throws Exception {
-//
-//    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
-//    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
-//    gatewayDir.mkdirs();
-//
-//    GatewayTestConfig testConfig = new GatewayTestConfig();
-//    config = testConfig;
-//    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
-//
-//    File topoDir = new File( testConfig.getGatewayTopologyDir() );
-//    topoDir.mkdirs();
-//
-//    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
-//    deployDir.mkdirs();
-//
-//    File descriptor = new File( topoDir, "test-cluster.xml" );
-//    FileOutputStream stream = new FileOutputStream( descriptor );
-//    createTopology().toStream( stream );
-//    stream.close();
-//
-//    DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//    Map<String,String> options = new HashMap<String,String>();
-//    options.put( "persist-master", "false" );
-//    options.put( "master", "password" );
-//    try {
-//      srvcs.init( testConfig, options );
-//    } catch ( ServiceLifecycleException e ) {
-//      e.printStackTrace(); // I18N not required.
-//    }
-//    gateway = GatewayServer.startGateway( testConfig, srvcs );
-//    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
-//
-//    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
-//
-//    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
-//    clusterUrl = gatewayUrl + "/test-cluster";
-//  }
-//
-//  private static XMLTag createTopology() {
-//    XMLTag xml = XMLDoc.newDocument( true )
-//        .addRoot( "topology" )
-//        .addTag( "gateway" )
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "authentication" )
-//        .addTag( "name" ).addText( "ShiroProvider" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm" )
-//        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
-//        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
-//        .addTag( "value" ).addText( "ldap://localhost:" + ldapTransport.getPort() ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
-//        .addTag( "value" ).addText( "simple" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "urls./**" )
-//        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "identity-assertion" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "name" ).addText( "Default" ).gotoParent()
-//        .addTag( "provider" )
-//        .gotoRoot()
-//        .addTag( "service" )
-//        .addTag( "role" ).addText( "KNOX" )
-//        .gotoRoot();
-//    // System.out.println( "GATEWAY=" + xml.toString() );
-//    return xml;
-//  }
-//
-//  private static int findFreePort() throws IOException {
-//    ServerSocket socket = new ServerSocket(0);
-//    int port = socket.getLocalPort();
-//    socket.close();
-//    return port;
-//  }
-//
-//  public static InputStream getResourceStream( String resource ) throws IOException {
-//    return getResourceUrl( resource ).openStream();
-//  }
-//
-//  public static URL getResourceUrl( String resource ) {
-//    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
-//    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
-//    return url;
-//  }
-//
-//  public static String getResourceName( String resource ) {
-//    return getResourceBaseName() + resource;
-//  }
-//
-//  public static String getResourceBaseName() {
-//    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
-//  }
-//
-//  //@Test
-//  public void waitForManualTesting() throws IOException {
-//    System.out.println( clusterUrl );
-//    System.in.read();
-//  }
-//
-//  @Test( timeout = MEDIUM_TIMEOUT )
-//  public void testAdminService() throws ClassNotFoundException {
-//    TestUtils.LOG_ENTER();
-//
-//    String username = "guest";
-//    String password = "guest-password";
-//    String serviceUrl =  clusterUrl + "/api/v1/version";
-//    given()
-//        //.log().all()
-//        .auth().preemptive().basic( username, password )
-//        .header("Accept", MediaType.APPLICATION_JSON)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_OK)
-//        //.body( is( "{\"hash\":\"unknown\",\"version\":\"unknown\"}" ) )
-//        .when().get( serviceUrl );
-//
-//    TestUtils.LOG_EXIT();
-//  }
+  private static final long SHORT_TIMEOUT = 1000L;
+  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
+  private static final long LONG_TIMEOUT = 5 * MEDIUM_TIMEOUT;
+
+  private static Class RESOURCE_BASE_CLASS = GatewayAdminFuncTest.class;
+  private static Logger LOG = LoggerFactory.getLogger( GatewayAdminFuncTest.class );
+
+  //public static Enumeration<Appender> appenders;
+  public static GatewayConfig config;
+  public static GatewayServer gateway;
+  public static String gatewayUrl;
+  public static String clusterUrl;
+  public static SimpleLdapDirectoryServer ldap;
+  public static TcpTransport ldapTransport;
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    TestUtils.LOG_ENTER();
+    //appenders = NoOpAppender.setUp();
+    setupLdap();
+    setupGateway();
+    TestUtils.LOG_EXIT();
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    TestUtils.LOG_ENTER();
+    gateway.stop();
+    ldap.stop( true );
+    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
+    //NoOpAppender.tearDown( appenders );
+    TestUtils.LOG_EXIT();
+  }
+
+  public static void setupLdap() throws Exception {
+    URL usersUrl = getResourceUrl( "users.ldif" );
+    int port = findFreePort();
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
+    ldap.start();
+    LOG.info( "LDAP port = " + ldapTransport.getPort() );
+  }
+
+  public static void setupGateway() throws Exception {
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+    gatewayDir.mkdirs();
+
+    GatewayTestConfig testConfig = new GatewayTestConfig();
+    config = testConfig;
+    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File topoDir = new File( testConfig.getGatewayTopologyDir() );
+    topoDir.mkdirs();
+
+    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+    File descriptor = new File( topoDir, "test-cluster.xml" );
+    FileOutputStream stream = new FileOutputStream( descriptor );
+    createTopology().toStream( stream );
+    stream.close();
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put( "persist-master", "false" );
+    options.put( "master", "password" );
+    try {
+      srvcs.init( testConfig, options );
+    } catch ( ServiceLifecycleException e ) {
+      e.printStackTrace(); // I18N not required.
+    }
+    gateway = GatewayServer.startGateway( testConfig, srvcs );
+    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
+
+    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
+
+    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
+    clusterUrl = gatewayUrl + "/test-cluster";
+  }
+
+  private static XMLTag createTopology() {
+    XMLTag xml = XMLDoc.newDocument( true )
+        .addRoot( "topology" )
+        .addTag( "gateway" )
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "authentication" )
+        .addTag( "name" ).addText( "ShiroProvider" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm" )
+        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
+        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
+        .addTag( "value" ).addText( "ldap://localhost:" + ldapTransport.getPort() ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
+        .addTag( "value" ).addText( "simple" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "urls./**" )
+        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "identity-assertion" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "name" ).addText( "Default" ).gotoParent()
+        .addTag( "provider" )
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "KNOX" )
+        .gotoRoot();
+    // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  private static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+  public static InputStream getResourceStream( String resource ) throws IOException {
+    return getResourceUrl( resource ).openStream();
+  }
+
+  public static URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
+    return url;
+  }
+
+  public static String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public static String getResourceBaseName() {
+    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
+  }
+
+  //@Test
+  public void waitForManualTesting() throws IOException {
+    System.out.println( clusterUrl );
+    System.in.read();
+  }
+
+  @Test( timeout = MEDIUM_TIMEOUT )
+  public void testAdminService() throws ClassNotFoundException {
+    TestUtils.LOG_ENTER();
+
+    String username = "guest";
+    String password = "guest-password";
+    String serviceUrl =  clusterUrl + "/api/v1/version";
+    given()
+        //.log().all()
+        .auth().preemptive().basic( username, password )
+        .header("Accept", MediaType.APPLICATION_JSON)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_OK)
+        //.body( is( "{\"hash\":\"unknown\",\"version\":\"unknown\"}" ) )
+        .when().get( serviceUrl );
+
+    TestUtils.LOG_EXIT();
+  }
 
 }

http://git-wip-us.apache.org/repos/asf/knox/blob/56cedc0a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
index bbda477..ef18740 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
@@ -69,761 +69,761 @@ import static org.junit.Assert.fail;
 
 public class GatewayAdminTopologyFuncTest {
 
-//  private static final long SHORT_TIMEOUT = 1000L;
-//  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
-//  private static final long LONG_TIMEOUT = 5 * MEDIUM_TIMEOUT;
-//
-//  private static Class RESOURCE_BASE_CLASS = GatewayAdminTopologyFuncTest.class;
-//  private static Logger LOG = LoggerFactory.getLogger( GatewayAdminTopologyFuncTest.class );
-//
-//  public static Enumeration<Appender> appenders;
-//  public static GatewayConfig config;
-//  public static GatewayServer gateway;
-//  public static String gatewayUrl;
-//  public static String clusterUrl;
-//  public static SimpleLdapDirectoryServer ldap;
-//  public static TcpTransport ldapTransport;
-//
-//  @BeforeClass
-//  public static void setupSuite() throws Exception {
-//    //appenders = NoOpAppender.setUp();
-//    setupLdap();
-//    setupGateway(new GatewayTestConfig());
-//  }
-//
-//  @AfterClass
-//  public static void cleanupSuite() throws Exception {
-//    gateway.stop();
-//    ldap.stop( true );
-//    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
-//    //NoOpAppender.tearDown( appenders );
-//  }
-//
-//  public static void setupLdap() throws Exception {
-//    URL usersUrl = getResourceUrl( "users.ldif" );
-//    int port = findFreePort();
-//    ldapTransport = new TcpTransport( port );
-//    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
-//    ldap.start();
-//    LOG.info( "LDAP port = " + ldapTransport.getPort() );
-//  }
-//
-//  public static void setupGateway(GatewayTestConfig testConfig) throws Exception {
-//
-//    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
-//    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
-//    gatewayDir.mkdirs();
-//
-//    config = testConfig;
-//    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
-//
-//    File topoDir = new File( testConfig.getGatewayTopologyDir() );
-//    topoDir.mkdirs();
-//
-//    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
-//    deployDir.mkdirs();
-//
-//    File descriptor = new File( topoDir, "admin.xml" );
-//    FileOutputStream stream = new FileOutputStream( descriptor );
-//    createKnoxTopology().toStream( stream );
-//    stream.close();
-//
-//    File descriptor2 = new File( topoDir, "test-cluster.xml" );
-//    FileOutputStream stream2 = new FileOutputStream( descriptor2 );
-//    createNormalTopology().toStream( stream2 );
-//    stream.close();
-//
-//
-//    DefaultGatewayServices srvcs = new DefaultGatewayServices();
-//    Map<String,String> options = new HashMap<String,String>();
-//    options.put( "persist-master", "false" );
-//    options.put( "master", "password" );
-//
-//    try {
-//      srvcs.init( testConfig, options );
-//    } catch ( ServiceLifecycleException e ) {
-//      e.printStackTrace(); // I18N not required.
-//    }
-//    gateway = GatewayServer.startGateway( testConfig, srvcs );
-//    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
-//
-//    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
-//
-//    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
-//    clusterUrl = gatewayUrl + "/admin";
-//  }
-//
-//  private static XMLTag createNormalTopology() {
-//    XMLTag xml = XMLDoc.newDocument( true )
-//        .addRoot( "topology" )
-//        .addTag( "gateway" )
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "webappsec" )
-//        .addTag( "name" ).addText( "WebAppSec" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "csrf.enabled" )
-//        .addTag( "value" ).addText( "true" ).gotoParent().gotoParent()
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "authentication" )
-//        .addTag( "name" ).addText( "ShiroProvider" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm" )
-//        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
-//        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
-//        .addTag( "value" ).addText( "ldap://localhost:" + ldapTransport.getPort() ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
-//        .addTag( "value" ).addText( "simple" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "urls./**" )
-//        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "identity-assertion" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "name" ).addText( "Default" ).gotoParent()
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "authorization" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "name" ).addText( "AclsAuthz" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "webhdfs-acl" )
-//        .addTag( "value" ).addText( "hdfs;*;*" ).gotoParent()
-//        .gotoRoot()
-//        .addTag( "service" )
-//        .addTag( "role" ).addText( "WEBHDFS" )
-//        .addTag( "url" ).addText( "http://localhost:50070/webhdfs/v1" ).gotoParent()
-//        .gotoRoot();
-////     System.out.println( "GATEWAY=" + xml.toString() );
-//    return xml;
-//  }
-//
-//  private static XMLTag createKnoxTopology() {
-//    XMLTag xml = XMLDoc.newDocument( true )
-//        .addRoot( "topology" )
-//        .addTag( "gateway" )
-//        .addTag( "provider" )
-//        .addTag( "role" ).addText( "authentication" )
-//        .addTag( "name" ).addText( "ShiroProvider" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm" )
-//        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
-//        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
-//        .addTag( "value" ).addText( "ldap://localhost:" + ldapTransport.getPort() ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
-//        .addTag( "value" ).addText( "simple" ).gotoParent()
-//        .addTag( "param" )
-//        .addTag( "name" ).addText( "urls./**" )
-//        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
-//        .addTag("provider")
-//        .addTag( "role" ).addText( "authorization" )
-//        .addTag( "name" ).addText( "AclsAuthz" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag("param")
-//        .addTag("name").addText("knox.acl")
-//        .addTag("value").addText("admin;*;*").gotoParent().gotoParent()
-//        .addTag("provider")
-//        .addTag( "role" ).addText( "identity-assertion" )
-//        .addTag( "enabled" ).addText( "true" )
-//        .addTag( "name" ).addText( "Default" ).gotoParent()
-//        .addTag( "provider" )
-//        .gotoRoot()
-//        .addTag( "service" )
-//        .addTag( "role" ).addText( "KNOX" )
-//        .gotoRoot();
-//    // System.out.println( "GATEWAY=" + xml.toString() );
-//    return xml;
-//  }
-//
-//  private static int findFreePort() throws IOException {
-//    ServerSocket socket = new ServerSocket(0);
-//    int port = socket.getLocalPort();
-//    socket.close();
-//    return port;
-//  }
-//
-//  public static InputStream getResourceStream( String resource ) throws IOException {
-//    return getResourceUrl( resource ).openStream();
-//  }
-//
-//  public static URL getResourceUrl( String resource ) {
-//    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
-//    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
-//    return url;
-//  }
-//
-//  public static String getResourceName( String resource ) {
-//    return getResourceBaseName() + resource;
-//  }
-//
-//  public static String getResourceBaseName() {
-//    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
-//  }
-//
-//  //@Test
-//  public void waitForManualTesting() throws IOException {
-//    System.in.read();
-//  }
-//
-//  @Test( timeout = LONG_TIMEOUT )
-//  public void testTopologyCollection() throws ClassNotFoundException {
-//    LOG_ENTER();
-//
-//    String username = "admin";
-//    String password = "admin-password";
-//    String serviceUrl =  clusterUrl + "/api/v1/topologies";
-//    String href1 = given()
-//        //.log().all()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_JSON)
-//        .contentType(MediaType.APPLICATION_JSON)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_OK)
-//        .body("topologies.topology[0].name", not(nullValue()))
-//        .body("topologies.topology[1].name", not(nullValue()))
-//        .body("topologies.topology[0].uri", not(nullValue()))
-//        .body("topologies.topology[1].uri", not(nullValue()))
-//        .body("topologies.topology[0].href", not(nullValue()))
-//        .body("topologies.topology[1].href", not(nullValue()))
-//        .body("topologies.topology[0].timestamp", not(nullValue()))
-//        .body("topologies.topology[1].timestamp", not(nullValue()))
-//        .when().get(serviceUrl).thenReturn().getBody().path("topologies.topology.href[1]");
-//
-//       given()
-//        //.log().all()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_XML)
-//        .expect()
-//        //.log().all()
-//        .body("topologies.topology.href[1]", equalTo(href1))
-//        .statusCode(HttpStatus.SC_OK)
-//        .when().get(serviceUrl);
-//
-//
-//
-//
-//
-//    given()
-//        //.log().all()
-//        .auth().preemptive().basic(username, password)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_OK)
-//        .contentType(MediaType.APPLICATION_XML)
-//        .get(serviceUrl);
-//
-//
-//    given().auth().preemptive().basic(username, password)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_OK)
-//        .contentType("application/json")
-//        .body("topology.name", equalTo("test-cluster"))
-//        .when().get(href1);
-//
-//  }
-//
-//  @Test( timeout = LONG_TIMEOUT )
-//  public void testTopologyObject() throws ClassNotFoundException {
-//    LOG_ENTER();
-//
-//    String username = "admin";
-//    String password = "admin-password";
-//    String serviceUrl =  clusterUrl + "/api/v1/topologies";
-//    String hrefJson = given()
-//        //.log().all()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_JSON)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_OK)
-//        .when().get(serviceUrl).thenReturn().getBody().path("topologies.topology[1].href");
-//
-//    String timestampJson = given()
-//        //.log().all()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_JSON)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_OK)
-//        .contentType("application/json")
-//        .when().get(serviceUrl).andReturn()
-//        .getBody().path("topologies.topology[1].timestamp");
-//
-//        given()
-//        //.log().all()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_JSON)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_OK)
-//        .body("topology.name", equalTo("test-cluster"))
-//        .body("topology.timestamp", equalTo(Long.parseLong(timestampJson)))
-//        .when()
-//        .get(hrefJson);
-//
-//
-//    String hrefXml = given()
-//        //.log().all()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_XML)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_OK)
-//        .when().get(serviceUrl).thenReturn().getBody().path("topologies.topology[1].href");
-//
-//    given()
-//        //.log().all()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_XML)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_OK)
-//        .when()
-//        .get(hrefXml);
-//
-//  }
-//
-//  @Test( timeout = LONG_TIMEOUT )
-//  public void testPositiveAuthorization() throws ClassNotFoundException{
-//    LOG_ENTER();
-//
-//    String adminUser = "admin";
-//    String adminPass = "admin-password";
-//    String url =  clusterUrl + "/api/v1/topologies";
-//
-//    given()
-//        //.log().all()
-//        .auth().preemptive().basic(adminUser, adminPass)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_OK)
-//        .contentType(ContentType.JSON)
-//        .body("topologies.topology[0].name", not(nullValue()))
-//        .body("topologies.topology[1].name", not(nullValue()))
-//        .body("topologies.topology[0].uri", not(nullValue()))
-//        .body("topologies.topology[1].uri", not(nullValue()))
-//        .body("topologies.topology[0].href", not(nullValue()))
-//        .body("topologies.topology[1].href", not(nullValue()))
-//        .body("topologies.topology[0].timestamp", not(nullValue()))
-//        .body("topologies.topology[1].timestamp", not(nullValue()))
-//        .get(url);
-//
-//  }
-//
-//  @Test( timeout = LONG_TIMEOUT )
-//  public void testNegativeAuthorization() throws ClassNotFoundException{
-//    LOG_ENTER();
-//
-//    String guestUser = "guest";
-//    String guestPass = "guest-password";
-//    String url =  clusterUrl + "/api/v1/topologies";
-//
-//    given()
-//        //.log().all()
-//        .auth().basic(guestUser, guestPass)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_FORBIDDEN)
-//        .get(url);
-//
-//
-//
-//  }
-//
-//  private Topology createTestTopology(){
-//    Topology topology = new Topology();
-//    topology.setName("test-topology");
-//
-//    try {
-//      topology.setUri(new URI(gatewayUrl + "/" + topology.getName()));
-//    } catch (URISyntaxException ex) {
-//      assertThat(topology.getUri(), not(nullValue()));
-//    }
-//
-//    Provider identityProvider = new Provider();
-//    identityProvider.setName("Default");
-//    identityProvider.setRole("identity-assertion");
-//    identityProvider.setEnabled(true);
-//
-//    Provider AuthenicationProvider = new Provider();
-//    AuthenicationProvider.setName("ShiroProvider");
-//    AuthenicationProvider.setRole("authentication");
-//    AuthenicationProvider.setEnabled(true);
-//
-//    Param ldapMain = new Param();
-//    ldapMain.setName("main.ldapRealm");
-//    ldapMain.setValue("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm");
-//
-//    Param ldapGroupContextFactory = new Param();
-//    ldapGroupContextFactory.setName("main.ldapGroupContextFactory");
-//    ldapGroupContextFactory.setValue("org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory");
-//
-//    Param ldapRealmContext = new Param();
-//    ldapRealmContext.setName("main.ldapRealm.contextFactory");
-//    ldapRealmContext.setValue("$ldapGroupContextFactory");
-//
-//    Param ldapURL = new Param();
-//    ldapURL.setName("main.ldapRealm.contextFactory.url");
-//    ldapURL.setValue("ldap://localhost:" + ldapTransport.getPort());
-//
-//    Param ldapUserTemplate = new Param();
-//    ldapUserTemplate.setName("main.ldapRealm.userDnTemplate");
-//    ldapUserTemplate.setValue("uid={0},ou=people,dc=hadoop,dc=apache,dc=org");
-//
-//    Param authcBasic = new Param();
-//    authcBasic.setName("urls./**");
-//    authcBasic.setValue("authcBasic");
-//
-//    AuthenicationProvider.addParam(ldapGroupContextFactory);
-//    AuthenicationProvider.addParam(ldapMain);
-//    AuthenicationProvider.addParam(ldapRealmContext);
-//    AuthenicationProvider.addParam(ldapURL);
-//    AuthenicationProvider.addParam(ldapUserTemplate);
-//    AuthenicationProvider.addParam(authcBasic);
-//
-//    Service testService = new Service();
-//    testService.setRole("test-service-role");
-//
-//    topology.addProvider(AuthenicationProvider);
-//    topology.addProvider(identityProvider);
-//    topology.addService(testService);
-//    topology.setTimestamp(System.nanoTime());
-//
-//    return topology;
-//  }
-//
-//  @Test( timeout = LONG_TIMEOUT )
-//  public void testDeployTopology() throws ClassNotFoundException {
-//    LOG_ENTER();
-//
-//    Topology testTopology = createTestTopology();
-//
-//    String user = "guest";
-//    String password = "guest-password";
-//
-//    String url = gatewayUrl + "/" + testTopology.getName() + "/test-service-path/test-service-resource";
-//
-//    GatewayServices srvs = GatewayServer.getGatewayServices();
-//
-//    TopologyService ts = srvs.getService(GatewayServices.TOPOLOGY_SERVICE);
-//
-//    assertThat(testTopology, not(nullValue()));
-//    assertThat(testTopology.getName(), is("test-topology"));
-//
-//    given()
-//        //.log().all()
-//        .auth().preemptive().basic(user, password)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_NOT_FOUND)
-//        .when()
-//        .get(url);
-//
-//    ts.deployTopology(testTopology);
-//
-//    given()
-//        //.log().all()
-//        .auth().preemptive().basic(user, password)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_OK)
-//        .contentType("text/plain")
-//        .body(is("test-service-response"))
-//        .when()
-//        .get(url).getBody();
-//
-//    ts.deleteTopology(testTopology);
-//
-//    given()
-//        //.log().all()
-//        .auth().preemptive().basic(user, password)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_NOT_FOUND)
-//        .when()
-//        .get(url);
-//  }
-//
-//  @Test( timeout = LONG_TIMEOUT )
-//  public void testDeleteTopology() throws ClassNotFoundException {
-//    LOG_ENTER();
-//
-//    Topology test = createTestTopology();
-//
-//    String username = "admin";
-//    String password = "admin-password";
-//    String url =  clusterUrl + "/api/v1/topologies/" + test.getName();
-//
-//    GatewayServices gs = GatewayServer.getGatewayServices();
-//
-//    TopologyService ts = gs.getService(GatewayServices.TOPOLOGY_SERVICE);
-//
-//    ts.deployTopology(test);
-//
-//    given()
-//        .auth().preemptive().basic(username, password)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_OK)
-//        .contentType(MediaType.APPLICATION_JSON)
-//        .get(url);
-//
-//    given()
-//        .auth().preemptive().basic(username, password)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_OK)
-//        .contentType(MediaType.APPLICATION_JSON)
-//        .delete(url);
-//
-//    given()
-//        //.log().all()
-//        .auth().preemptive().basic(username, password)
-//        .expect()
-//        //.log().all()
-//        .statusCode(HttpStatus.SC_NO_CONTENT)
-//        .get(url);
-//  }
-//
-//  @Test( timeout = LONG_TIMEOUT )
-//  public void testPutTopology() throws ClassNotFoundException {
-//    LOG_ENTER() ;
-//
-//    String username = "admin";
-//    String password = "admin-password";
-//    String url =  clusterUrl + "/api/v1/topologies/test-put";
-//
-//    String JsonPut =
-//        given()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_JSON)
-//        .get(clusterUrl + "/api/v1/topologies/test-cluster")
-//        .getBody().asString();
-//
-//    String XML = given()
-//        //.log().all()
-//        .auth().preemptive().basic(username, password)
-//        .contentType(MediaType.APPLICATION_JSON)
-//        .header("Accept", MediaType.APPLICATION_XML)
-//        .body(JsonPut)
-//        .expect()
-//        .statusCode(HttpStatus.SC_OK)
-//        //.log().all()
-//        .put(url).getBody().asString();
-//
-//
-//        given()
-//            .auth().preemptive().basic(username, password)
-//            .header("Accept", MediaType.APPLICATION_XML)
-//            .expect()
-//            .statusCode(HttpStatus.SC_OK)
-//            .body(equalTo(XML))
-//            .get(url)
-//            .getBody().asString();
-//
-//
-//    String XmlPut =
-//        given()
-//            .auth().preemptive().basic(username, password)
-//            .header("Accept", MediaType.APPLICATION_XML)
-//            .get(clusterUrl + "/api/v1/topologies/test-cluster")
-//            .getBody().asString();
-//
-//    String JSON = given()
-//        //.log().all()
-//        .auth().preemptive().basic(username, password)
-//        .contentType(MediaType.APPLICATION_XML)
-//        .header("Accept", MediaType.APPLICATION_JSON)
-//        .body(XmlPut)
-//        .expect()
-//        .statusCode(HttpStatus.SC_OK)
-//            //.log().all()
-//        .put(url).getBody().asString();
-//
-//    given()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_JSON)
-//        .expect()
-//        .statusCode(HttpStatus.SC_OK)
-//        .body(equalTo(JSON))
-//        .get(url)
-//        .getBody().asString();
-//
-//  }
-//
-//  @Test( timeout = LONG_TIMEOUT )
-//  public void testXForwardedHeaders() {
-//    LOG_ENTER();
-//
-//    String username = "admin";
-//    String password = "admin-password";
-//    String url =  clusterUrl + "/api/v1/topologies";
-//
-////    X-Forward header values
-//    String port = String.valueOf(777);
-//    String server = "myserver";
-//    String host = server + ":" + port;
-//    String proto = "protocol";
-//    String context = "/mycontext";
-//    String newUrl = proto + "://" + host + context;
-////    String port = String.valueOf(gateway.getAddresses()[0].getPort());
-//
-////     Case 1: Add in all x-forward headers (host, port, server, context, proto)
-//    given()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_XML)
-//        .header("X-Forwarded-Host", host )
-//        .header("X-Forwarded-Port", port )
-//        .header("X-Forwarded-Server", server )
-//        .header("X-Forwarded-Context", context)
-//        .header("X-Forwarded-Proto", proto)
-//        .expect()
-//        .statusCode(HttpStatus.SC_OK)
-//        .body(containsString(newUrl))
-//        .body(containsString("test-cluster"))
-//        .body(containsString("admin"))
-//        .get(url).prettyPrint();
-//
-//
-////     Case 2: add in x-forward headers (host, server, proto, context)
-//    given()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_XML)
-//        .header("X-Forwarded-Host", host )
-//        .header("X-Forwarded-Server", server )
-//        .header("X-Forwarded-Context", context )
-//        .header("X-Forwarded-Proto", proto )
-//        .expect()
-//        .statusCode(HttpStatus.SC_OK)
-//        .body(containsString(server))
-//        .body(containsString(context))
-//        .body(containsString(proto))
-//        .body(containsString(host))
-//        .body(containsString("test-cluster"))
-//        .body(containsString("admin"))
-//        .get(url);
-//
-////     Case 3: add in x-forward headers (host, proto, port, context)
-//    given()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_XML)
-//        .header("X-Forwarded-Host", host )
-//        .header("X-Forwarded-Port", port )
-//        .header("X-Forwarded-Context", context )
-//        .header("X-Forwarded-Proto", proto)
-//        .expect()
-//        .statusCode(HttpStatus.SC_OK)
-//        .body(containsString(host))
-//        .body(containsString(port))
-//        .body(containsString(context))
-//        .body(containsString(proto))
-//        .body(containsString("test-cluster"))
-//        .body(containsString("admin"))
-//        .get(url);
-//
-////     Case 4: add in x-forward headers (host, proto, port, context) no port in host.
-//    given()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_XML)
-//        .header("X-Forwarded-Host", server)
-//        .header("X-Forwarded-Port", port)
-//        .header("X-Forwarded-Context", context)
-//        .header("X-Forwarded-Proto", proto)
-//        .expect()
-//        .statusCode(HttpStatus.SC_OK)
-//        .body(containsString(server))
-//        .body(containsString(port))
-//        .body(containsString(context))
-//        .body(containsString(proto))
-//        .body(containsString("test-cluster"))
-//        .body(containsString("admin"))
-//        .get(url);
-//
-////     Case 5: add in x-forward headers (host, port)
-//    given()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_XML)
-//        .header("X-Forwarded-Host", host )
-//        .header("X-Forwarded-Port", port )
-//        .expect()
-//        .statusCode(HttpStatus.SC_OK)
-//        .body(containsString(host))
-//        .body(containsString(port))
-//        .body(containsString("test-cluster"))
-//        .body(containsString("admin"))
-//        .get(url);
-//
-////     Case 6: Normal Request
-//    given()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_XML)
-//        .expect()
-//        .statusCode(HttpStatus.SC_OK)
-//        .body(containsString(url))
-//        .body(containsString("test-cluster"))
-//        .body(containsString("admin"))
-//        .get(url);
-//
-//  }
-//
-//  @Test( timeout = LONG_TIMEOUT )
-//  public void testGatewayPathChange() throws Exception {
-//    LOG_ENTER();
-//    String username = "admin";
-//    String password = "admin-password";
-//    String url =  clusterUrl + "/api/v1/topologies";
-//
-////     Case 1: Normal Request (No Change in gateway.path). Ensure HTTP OK resp + valid URL.
-//    given()
-//        .auth().preemptive().basic(username, password)
-//        .header("Accept", MediaType.APPLICATION_XML)
-//        .expect()
-//        .statusCode(HttpStatus.SC_OK)
-//        .body(containsString(url + "/test-cluster"))
-//        .get(url);
-//
-//
-////     Case 2: Change gateway.path to another String. Ensure HTTP OK resp + valid URL.
-//   try {
-//     gateway.stop();
-//
-//     GatewayTestConfig conf = new GatewayTestConfig();
-//     conf.setGatewayPath("new-gateway-path");
-//     setupGateway(conf);
-//
-//     String newUrl = clusterUrl + "/api/v1/topologies";
-//
-//     given()
-//         .auth().preemptive().basic(username, password)
-//         .header("Accept", MediaType.APPLICATION_XML)
-//         .expect()
-//         .statusCode(HttpStatus.SC_OK)
-//         .body(containsString(newUrl + "/test-cluster"))
-//         .get(newUrl);
-//   } catch(Exception e){
-//     fail(e.getMessage());
-//   }
-//    finally {
-////        Restart the gateway with old settings.
-//       gateway.stop();
-//      setupGateway(new GatewayTestConfig());
-//    }
-//
-//  }
-//
-//  private static final String CLASS = GatewayAdminTopologyFuncTest.class.getCanonicalName();
+  private static final long SHORT_TIMEOUT = 1000L;
+  private static final long MEDIUM_TIMEOUT = 5 * SHORT_TIMEOUT;
+  private static final long LONG_TIMEOUT = 5 * MEDIUM_TIMEOUT;
+
+  private static Class RESOURCE_BASE_CLASS = GatewayAdminTopologyFuncTest.class;
+  private static Logger LOG = LoggerFactory.getLogger( GatewayAdminTopologyFuncTest.class );
+
+  public static Enumeration<Appender> appenders;
+  public static GatewayConfig config;
+  public static GatewayServer gateway;
+  public static String gatewayUrl;
+  public static String clusterUrl;
+  public static SimpleLdapDirectoryServer ldap;
+  public static TcpTransport ldapTransport;
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    //appenders = NoOpAppender.setUp();
+    setupLdap();
+    setupGateway(new GatewayTestConfig());
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    gateway.stop();
+    ldap.stop( true );
+    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
+    //NoOpAppender.tearDown( appenders );
+  }
+
+  public static void setupLdap() throws Exception {
+    URL usersUrl = getResourceUrl( "users.ldif" );
+    int port = findFreePort();
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
+    ldap.start();
+    LOG.info( "LDAP port = " + ldapTransport.getPort() );
+  }
+
+  public static void setupGateway(GatewayTestConfig testConfig) throws Exception {
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+    gatewayDir.mkdirs();
+
+    config = testConfig;
+    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File topoDir = new File( testConfig.getGatewayTopologyDir() );
+    topoDir.mkdirs();
+
+    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+    File descriptor = new File( topoDir, "admin.xml" );
+    FileOutputStream stream = new FileOutputStream( descriptor );
+    createKnoxTopology().toStream( stream );
+    stream.close();
+
+    File descriptor2 = new File( topoDir, "test-cluster.xml" );
+    FileOutputStream stream2 = new FileOutputStream( descriptor2 );
+    createNormalTopology().toStream( stream2 );
+    stream.close();
+
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put( "persist-master", "false" );
+    options.put( "master", "password" );
+
+    try {
+      srvcs.init( testConfig, options );
+    } catch ( ServiceLifecycleException e ) {
+      e.printStackTrace(); // I18N not required.
+    }
+    gateway = GatewayServer.startGateway( testConfig, srvcs );
+    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
+
+    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
+
+    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
+    clusterUrl = gatewayUrl + "/admin";
+  }
+
+  private static XMLTag createNormalTopology() {
+    XMLTag xml = XMLDoc.newDocument( true )
+        .addRoot( "topology" )
+        .addTag( "gateway" )
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "webappsec" )
+        .addTag( "name" ).addText( "WebAppSec" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "param" )
+        .addTag( "name" ).addText( "csrf.enabled" )
+        .addTag( "value" ).addText( "true" ).gotoParent().gotoParent()
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "authentication" )
+        .addTag( "name" ).addText( "ShiroProvider" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm" )
+        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
+        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
+        .addTag( "value" ).addText( "ldap://localhost:" + ldapTransport.getPort() ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
+        .addTag( "value" ).addText( "simple" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "urls./**" )
+        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "identity-assertion" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "name" ).addText( "Default" ).gotoParent()
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "authorization" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "name" ).addText( "AclsAuthz" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "webhdfs-acl" )
+        .addTag( "value" ).addText( "hdfs;*;*" ).gotoParent()
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "WEBHDFS" )
+        .addTag( "url" ).addText( "http://localhost:50070/webhdfs/v1" ).gotoParent()
+        .gotoRoot();
+//     System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  private static XMLTag createKnoxTopology() {
+    XMLTag xml = XMLDoc.newDocument( true )
+        .addRoot( "topology" )
+        .addTag( "gateway" )
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "authentication" )
+        .addTag( "name" ).addText( "ShiroProvider" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm" )
+        .addTag( "value" ).addText( "org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
+        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
+        .addTag( "value" ).addText( "ldap://localhost:" + ldapTransport.getPort() ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
+        .addTag( "value" ).addText( "simple" ).gotoParent()
+        .addTag( "param" )
+        .addTag( "name" ).addText( "urls./**" )
+        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
+        .addTag("provider")
+        .addTag( "role" ).addText( "authorization" )
+        .addTag( "name" ).addText( "AclsAuthz" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag("param")
+        .addTag("name").addText("knox.acl")
+        .addTag("value").addText("admin;*;*").gotoParent().gotoParent()
+        .addTag("provider")
+        .addTag( "role" ).addText( "identity-assertion" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "name" ).addText( "Default" ).gotoParent()
+        .addTag( "provider" )
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "KNOX" )
+        .gotoRoot();
+    // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  private static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+  public static InputStream getResourceStream( String resource ) throws IOException {
+    return getResourceUrl( resource ).openStream();
+  }
+
+  public static URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
+    return url;
+  }
+
+  public static String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public static String getResourceBaseName() {
+    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
+  }
+
+  //@Test
+  public void waitForManualTesting() throws IOException {
+    System.in.read();
+  }
+
+  @Test( timeout = LONG_TIMEOUT )
+  public void testTopologyCollection() throws ClassNotFoundException {
+    LOG_ENTER();
+
+    String username = "admin";
+    String password = "admin-password";
+    String serviceUrl =  clusterUrl + "/api/v1/topologies";
+    String href1 = given()
+        //.log().all()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_JSON)
+        .contentType(MediaType.APPLICATION_JSON)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_OK)
+        .body("topologies.topology[0].name", not(nullValue()))
+        .body("topologies.topology[1].name", not(nullValue()))
+        .body("topologies.topology[0].uri", not(nullValue()))
+        .body("topologies.topology[1].uri", not(nullValue()))
+        .body("topologies.topology[0].href", not(nullValue()))
+        .body("topologies.topology[1].href", not(nullValue()))
+        .body("topologies.topology[0].timestamp", not(nullValue()))
+        .body("topologies.topology[1].timestamp", not(nullValue()))
+        .when().get(serviceUrl).thenReturn().getBody().path("topologies.topology.href[1]");
+
+       given()
+        //.log().all()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_XML)
+        .expect()
+        //.log().all()
+        .body("topologies.topology.href[1]", equalTo(href1))
+        .statusCode(HttpStatus.SC_OK)
+        .when().get(serviceUrl);
+
+
+
+
+
+    given()
+        //.log().all()
+        .auth().preemptive().basic(username, password)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_OK)
+        .contentType(MediaType.APPLICATION_XML)
+        .get(serviceUrl);
+
+
+    given().auth().preemptive().basic(username, password)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_OK)
+        .contentType("application/json")
+        .body("topology.name", equalTo("test-cluster"))
+        .when().get(href1);
+
+  }
+
+  @Test( timeout = LONG_TIMEOUT )
+  public void testTopologyObject() throws ClassNotFoundException {
+    LOG_ENTER();
+
+    String username = "admin";
+    String password = "admin-password";
+    String serviceUrl =  clusterUrl + "/api/v1/topologies";
+    String hrefJson = given()
+        //.log().all()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_JSON)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_OK)
+        .when().get(serviceUrl).thenReturn().getBody().path("topologies.topology[1].href");
+
+    String timestampJson = given()
+        //.log().all()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_JSON)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_OK)
+        .contentType("application/json")
+        .when().get(serviceUrl).andReturn()
+        .getBody().path("topologies.topology[1].timestamp");
+
+        given()
+        //.log().all()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_JSON)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_OK)
+        .body("topology.name", equalTo("test-cluster"))
+        .body("topology.timestamp", equalTo(Long.parseLong(timestampJson)))
+        .when()
+        .get(hrefJson);
+
+
+    String hrefXml = given()
+        //.log().all()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_XML)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_OK)
+        .when().get(serviceUrl).thenReturn().getBody().path("topologies.topology[1].href");
+
+    given()
+        //.log().all()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_XML)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_OK)
+        .when()
+        .get(hrefXml);
+
+  }
+
+  @Test( timeout = LONG_TIMEOUT )
+  public void testPositiveAuthorization() throws ClassNotFoundException{
+    LOG_ENTER();
+
+    String adminUser = "admin";
+    String adminPass = "admin-password";
+    String url =  clusterUrl + "/api/v1/topologies";
+
+    given()
+        //.log().all()
+        .auth().preemptive().basic(adminUser, adminPass)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_OK)
+        .contentType(ContentType.JSON)
+        .body("topologies.topology[0].name", not(nullValue()))
+        .body("topologies.topology[1].name", not(nullValue()))
+        .body("topologies.topology[0].uri", not(nullValue()))
+        .body("topologies.topology[1].uri", not(nullValue()))
+        .body("topologies.topology[0].href", not(nullValue()))
+        .body("topologies.topology[1].href", not(nullValue()))
+        .body("topologies.topology[0].timestamp", not(nullValue()))
+        .body("topologies.topology[1].timestamp", not(nullValue()))
+        .get(url);
+
+  }
+
+  @Test( timeout = LONG_TIMEOUT )
+  public void testNegativeAuthorization() throws ClassNotFoundException{
+    LOG_ENTER();
+
+    String guestUser = "guest";
+    String guestPass = "guest-password";
+    String url =  clusterUrl + "/api/v1/topologies";
+
+    given()
+        //.log().all()
+        .auth().basic(guestUser, guestPass)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_FORBIDDEN)
+        .get(url);
+
+
+
+  }
+
+  private Topology createTestTopology(){
+    Topology topology = new Topology();
+    topology.setName("test-topology");
+
+    try {
+      topology.setUri(new URI(gatewayUrl + "/" + topology.getName()));
+    } catch (URISyntaxException ex) {
+      assertThat(topology.getUri(), not(nullValue()));
+    }
+
+    Provider identityProvider = new Provider();
+    identityProvider.setName("Default");
+    identityProvider.setRole("identity-assertion");
+    identityProvider.setEnabled(true);
+
+    Provider AuthenicationProvider = new Provider();
+    AuthenicationProvider.setName("ShiroProvider");
+    AuthenicationProvider.setRole("authentication");
+    AuthenicationProvider.setEnabled(true);
+
+    Param ldapMain = new Param();
+    ldapMain.setName("main.ldapRealm");
+    ldapMain.setValue("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm");
+
+    Param ldapGroupContextFactory = new Param();
+    ldapGroupContextFactory.setName("main.ldapGroupContextFactory");
+    ldapGroupContextFactory.setValue("org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory");
+
+    Param ldapRealmContext = new Param();
+    ldapRealmContext.setName("main.ldapRealm.contextFactory");
+    ldapRealmContext.setValue("$ldapGroupContextFactory");
+
+    Param ldapURL = new Param();
+    ldapURL.setName("main.ldapRealm.contextFactory.url");
+    ldapURL.setValue("ldap://localhost:" + ldapTransport.getPort());
+
+    Param ldapUserTemplate = new Param();
+    ldapUserTemplate.setName("main.ldapRealm.userDnTemplate");
+    ldapUserTemplate.setValue("uid={0},ou=people,dc=hadoop,dc=apache,dc=org");
+
+    Param authcBasic = new Param();
+    authcBasic.setName("urls./**");
+    authcBasic.setValue("authcBasic");
+
+    AuthenicationProvider.addParam(ldapGroupContextFactory);
+    AuthenicationProvider.addParam(ldapMain);
+    AuthenicationProvider.addParam(ldapRealmContext);
+    AuthenicationProvider.addParam(ldapURL);
+    AuthenicationProvider.addParam(ldapUserTemplate);
+    AuthenicationProvider.addParam(authcBasic);
+
+    Service testService = new Service();
+    testService.setRole("test-service-role");
+
+    topology.addProvider(AuthenicationProvider);
+    topology.addProvider(identityProvider);
+    topology.addService(testService);
+    topology.setTimestamp(System.nanoTime());
+
+    return topology;
+  }
+
+  @Test( timeout = LONG_TIMEOUT )
+  public void testDeployTopology() throws ClassNotFoundException {
+    LOG_ENTER();
+
+    Topology testTopology = createTestTopology();
+
+    String user = "guest";
+    String password = "guest-password";
+
+    String url = gatewayUrl + "/" + testTopology.getName() + "/test-service-path/test-service-resource";
+
+    GatewayServices srvs = GatewayServer.getGatewayServices();
+
+    TopologyService ts = srvs.getService(GatewayServices.TOPOLOGY_SERVICE);
+
+    assertThat(testTopology, not(nullValue()));
+    assertThat(testTopology.getName(), is("test-topology"));
+
+    given()
+        //.log().all()
+        .auth().preemptive().basic(user, password)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_NOT_FOUND)
+        .when()
+        .get(url);
+
+    ts.deployTopology(testTopology);
+
+    given()
+        //.log().all()
+        .auth().preemptive().basic(user, password)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_OK)
+        .contentType("text/plain")
+        .body(is("test-service-response"))
+        .when()
+        .get(url).getBody();
+
+    ts.deleteTopology(testTopology);
+
+    given()
+        //.log().all()
+        .auth().preemptive().basic(user, password)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_NOT_FOUND)
+        .when()
+        .get(url);
+  }
+
+  @Test( timeout = LONG_TIMEOUT )
+  public void testDeleteTopology() throws ClassNotFoundException {
+    LOG_ENTER();
+
+    Topology test = createTestTopology();
+
+    String username = "admin";
+    String password = "admin-password";
+    String url =  clusterUrl + "/api/v1/topologies/" + test.getName();
+
+    GatewayServices gs = GatewayServer.getGatewayServices();
+
+    TopologyService ts = gs.getService(GatewayServices.TOPOLOGY_SERVICE);
+
+    ts.deployTopology(test);
+
+    given()
+        .auth().preemptive().basic(username, password)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_OK)
+        .contentType(MediaType.APPLICATION_JSON)
+        .get(url);
+
+    given()
+        .auth().preemptive().basic(username, password)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_OK)
+        .contentType(MediaType.APPLICATION_JSON)
+        .delete(url);
+
+    given()
+        //.log().all()
+        .auth().preemptive().basic(username, password)
+        .expect()
+        //.log().all()
+        .statusCode(HttpStatus.SC_NO_CONTENT)
+        .get(url);
+  }
+
+  @Test( timeout = LONG_TIMEOUT )
+  public void testPutTopology() throws ClassNotFoundException {
+    LOG_ENTER() ;
+
+    String username = "admin";
+    String password = "admin-password";
+    String url =  clusterUrl + "/api/v1/topologies/test-put";
+
+    String JsonPut =
+        given()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_JSON)
+        .get(clusterUrl + "/api/v1/topologies/test-cluster")
+        .getBody().asString();
+
+    String XML = given()
+        //.log().all()
+        .auth().preemptive().basic(username, password)
+        .contentType(MediaType.APPLICATION_JSON)
+        .header("Accept", MediaType.APPLICATION_XML)
+        .body(JsonPut)
+        .expect()
+        .statusCode(HttpStatus.SC_OK)
+        //.log().all()
+        .put(url).getBody().asString();
+
+
+        given()
+            .auth().preemptive().basic(username, password)
+            .header("Accept", MediaType.APPLICATION_XML)
+            .expect()
+            .statusCode(HttpStatus.SC_OK)
+            .body(equalTo(XML))
+            .get(url)
+            .getBody().asString();
+
+
+    String XmlPut =
+        given()
+            .auth().preemptive().basic(username, password)
+            .header("Accept", MediaType.APPLICATION_XML)
+            .get(clusterUrl + "/api/v1/topologies/test-cluster")
+            .getBody().asString();
+
+    String JSON = given()
+        //.log().all()
+        .auth().preemptive().basic(username, password)
+        .contentType(MediaType.APPLICATION_XML)
+        .header("Accept", MediaType.APPLICATION_JSON)
+        .body(XmlPut)
+        .expect()
+        .statusCode(HttpStatus.SC_OK)
+            //.log().all()
+        .put(url).getBody().asString();
+
+    given()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_JSON)
+        .expect()
+        .statusCode(HttpStatus.SC_OK)
+        .body(equalTo(JSON))
+        .get(url)
+        .getBody().asString();
+
+  }
+
+  @Test( timeout = LONG_TIMEOUT )
+  public void testXForwardedHeaders() {
+    LOG_ENTER();
+
+    String username = "admin";
+    String password = "admin-password";
+    String url =  clusterUrl + "/api/v1/topologies";
+
+//    X-Forward header values
+    String port = String.valueOf(777);
+    String server = "myserver";
+    String host = server + ":" + port;
+    String proto = "protocol";
+    String context = "/mycontext";
+    String newUrl = proto + "://" + host + context;
+//    String port = String.valueOf(gateway.getAddresses()[0].getPort());
+
+//     Case 1: Add in all x-forward headers (host, port, server, context, proto)
+    given()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_XML)
+        .header("X-Forwarded-Host", host )
+        .header("X-Forwarded-Port", port )
+        .header("X-Forwarded-Server", server )
+        .header("X-Forwarded-Context", context)
+        .header("X-Forwarded-Proto", proto)
+        .expect()
+        .statusCode(HttpStatus.SC_OK)
+        .body(containsString(newUrl))
+        .body(containsString("test-cluster"))
+        .body(containsString("admin"))
+        .get(url).prettyPrint();
+
+
+//     Case 2: add in x-forward headers (host, server, proto, context)
+    given()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_XML)
+        .header("X-Forwarded-Host", host )
+        .header("X-Forwarded-Server", server )
+        .header("X-Forwarded-Context", context )
+        .header("X-Forwarded-Proto", proto )
+        .expect()
+        .statusCode(HttpStatus.SC_OK)
+        .body(containsString(server))
+        .body(containsString(context))
+        .body(containsString(proto))
+        .body(containsString(host))
+        .body(containsString("test-cluster"))
+        .body(containsString("admin"))
+        .get(url);
+
+//     Case 3: add in x-forward headers (host, proto, port, context)
+    given()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_XML)
+        .header("X-Forwarded-Host", host )
+        .header("X-Forwarded-Port", port )
+        .header("X-Forwarded-Context", context )
+        .header("X-Forwarded-Proto", proto)
+        .expect()
+        .statusCode(HttpStatus.SC_OK)
+        .body(containsString(host))
+        .body(containsString(port))
+        .body(containsString(context))
+        .body(containsString(proto))
+        .body(containsString("test-cluster"))
+        .body(containsString("admin"))
+        .get(url);
+
+//     Case 4: add in x-forward headers (host, proto, port, context) no port in host.
+    given()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_XML)
+        .header("X-Forwarded-Host", server)
+        .header("X-Forwarded-Port", port)
+        .header("X-Forwarded-Context", context)
+        .header("X-Forwarded-Proto", proto)
+        .expect()
+        .statusCode(HttpStatus.SC_OK)
+        .body(containsString(server))
+        .body(containsString(port))
+        .body(containsString(context))
+        .body(containsString(proto))
+        .body(containsString("test-cluster"))
+        .body(containsString("admin"))
+        .get(url);
+
+//     Case 5: add in x-forward headers (host, port)
+    given()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_XML)
+        .header("X-Forwarded-Host", host )
+        .header("X-Forwarded-Port", port )
+        .expect()
+        .statusCode(HttpStatus.SC_OK)
+        .body(containsString(host))
+        .body(containsString(port))
+        .body(containsString("test-cluster"))
+        .body(containsString("admin"))
+        .get(url);
+
+//     Case 6: Normal Request
+    given()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_XML)
+        .expect()
+        .statusCode(HttpStatus.SC_OK)
+        .body(containsString(url))
+        .body(containsString("test-cluster"))
+        .body(containsString("admin"))
+        .get(url);
+
+  }
+
+  @Test( timeout = LONG_TIMEOUT )
+  public void testGatewayPathChange() throws Exception {
+    LOG_ENTER();
+    String username = "admin";
+    String password = "admin-password";
+    String url =  clusterUrl + "/api/v1/topologies";
+
+//     Case 1: Normal Request (No Change in gateway.path). Ensure HTTP OK resp + valid URL.
+    given()
+        .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_XML)
+        .expect()
+        .statusCode(HttpStatus.SC_OK)
+        .body(containsString(url + "/test-cluster"))
+        .get(url);
+
+
+//     Case 2: Change gateway.path to another String. Ensure HTTP OK resp + valid URL.
+   try {
+     gateway.stop();
+
+     GatewayTestConfig conf = new GatewayTestConfig();
+     conf.setGatewayPath("new-gateway-path");
+     setupGateway(conf);
+
+     String newUrl = clusterUrl + "/api/v1/topologies";
+
+     given()
+         .auth().preemptive().basic(username, password)
+         .header("Accept", MediaType.APPLICATION_XML)
+         .expect()
+         .statusCode(HttpStatus.SC_OK)
+         .body(containsString(newUrl + "/test-cluster"))
+         .get(newUrl);
+   } catch(Exception e){
+     fail(e.getMessage());
+   }
+    finally {
+//        Restart the gateway with old settings.
+       gateway.stop();
+      setupGateway(new GatewayTestConfig());
+    }
+
+  }
+
+  private static final String CLASS = GatewayAdminTopologyFuncTest.class.getCanonicalName();
 
 }