You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by su...@apache.org on 2017/01/13 21:10:10 UTC

knox git commit: KNOX-845 Added webhdfs groovy shell tests using MiniDFSCluster

Repository: knox
Updated Branches:
  refs/heads/master 869ced5de -> 082455c35


KNOX-845 Added webhdfs groovy shell tests using MiniDFSCluster


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/082455c3
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/082455c3
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/082455c3

Branch: refs/heads/master
Commit: 082455c358f81f62dd48152be8df0ebdcf4b768a
Parents: 869ced5
Author: Sumit Gupta <su...@apache.org>
Authored: Fri Jan 13 16:08:51 2017 -0500
Committer: Sumit Gupta <su...@apache.org>
Committed: Fri Jan 13 16:08:51 2017 -0500

----------------------------------------------------------------------
 gateway-test-release/pom.xml                    |   3 +-
 .../hadoop/gateway/SecureClusterTest.java       |   5 +-
 gateway-test-release/webhdfs-test/pom.xml       |  53 +++
 .../hadoop/gateway/GatewayTestConfig.java       | 457 +++++++++++++++++++
 .../hadoop/gateway/GatewayTestDriver.java       | 183 ++++++++
 .../org/apache/hadoop/gateway/ShellTest.java    | 198 ++++++++
 .../ShellTest/InsecureWebHdfsPutGet.groovy      |  39 ++
 .../org/apache/hadoop/gateway/ShellTest/README  |  57 +++
 .../gateway/ShellTest/WebHdfsPutGet.groovy      |  40 ++
 .../apache/hadoop/gateway/ShellTest/users.ldif  |  61 +++
 10 files changed, 1092 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/082455c3/gateway-test-release/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-test-release/pom.xml b/gateway-test-release/pom.xml
index 7aa27fb..00b5818 100644
--- a/gateway-test-release/pom.xml
+++ b/gateway-test-release/pom.xml
@@ -31,6 +31,7 @@
     <packaging>pom</packaging>
     <modules>
         <module>webhdfs-kerb-test</module>
+        <module>webhdfs-test</module>
     </modules>
 
     <dependencies>
@@ -118,7 +119,7 @@
         <dependency>
             <groupId>org.apache.httpcomponents</groupId>
             <artifactId>httpclient</artifactId>
-            <version>4.3.6</version>
+            <version>4.5.1</version>
         </dependency>
         <dependency>
             <groupId>log4j</groupId>

http://git-wip-us.apache.org/repos/asf/knox/blob/082455c3/gateway-test-release/webhdfs-kerb-test/src/test/java/org/apache/hadoop/gateway/SecureClusterTest.java
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-kerb-test/src/test/java/org/apache/hadoop/gateway/SecureClusterTest.java b/gateway-test-release/webhdfs-kerb-test/src/test/java/org/apache/hadoop/gateway/SecureClusterTest.java
index ea39c25..33e5589 100644
--- a/gateway-test-release/webhdfs-kerb-test/src/test/java/org/apache/hadoop/gateway/SecureClusterTest.java
+++ b/gateway-test-release/webhdfs-kerb-test/src/test/java/org/apache/hadoop/gateway/SecureClusterTest.java
@@ -73,7 +73,6 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.IPC_CLIENT_CONNECT_MAX_RETRIE
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
-
 @Category(ReleaseTest.class)
 public class SecureClusterTest {
 
@@ -189,7 +188,7 @@ public class SecureClusterTest {
     CloseableHttpResponse response = client.execute(target, request);
     String json = EntityUtils.toString(response.getEntity());
     response.close();
-    System.out.println(json);
+//    System.out.println(json);
     assertEquals("{\"Path\":\"/user/" + userName + "\"}", json);
   }
 
@@ -257,7 +256,7 @@ public class SecureClusterTest {
         .addTag("enabled").addText("true")
         .addTag( "param" )
         .addTag("name").addText("csrf.enabled")
-        .addTag("value").addText("true").gotoParent().gotoParent()
+        .addTag("value").addText("false").gotoParent().gotoParent()
         .addTag("provider")
         .addTag("role").addText("authentication")
         .addTag("name").addText("ShiroProvider")

http://git-wip-us.apache.org/repos/asf/knox/blob/082455c3/gateway-test-release/webhdfs-test/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-test/pom.xml b/gateway-test-release/webhdfs-test/pom.xml
new file mode 100644
index 0000000..47698e8
--- /dev/null
+++ b/gateway-test-release/webhdfs-test/pom.xml
@@ -0,0 +1,53 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <artifactId>gateway-test-release</artifactId>
+        <groupId>org.apache.knox</groupId>
+        <version>0.12.0-SNAPSHOT</version>
+    </parent>
+
+    <artifactId>webhdfs-test</artifactId>
+    <name>webhdfs-test</name>
+    <version>0.12.0-SNAPSHOT</version>
+    <description>Tests for WebHDFS integration with Knox</description>
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <version>${surefire-version}</version>
+                <configuration>
+                    <forkCount>1</forkCount>
+                    <reuseForks>false</reuseForks>
+                    <systemPropertyVariables>
+                        <gateway-version>${gateway-version}</gateway-version>
+                    </systemPropertyVariables>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-failsafe-plugin</artifactId>
+                <version>${failsafe-version}</version>
+            </plugin>
+        </plugins>
+    </build>
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/082455c3/gateway-test-release/webhdfs-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java b/gateway-test-release/webhdfs-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
new file mode 100644
index 0000000..5c633d0
--- /dev/null
+++ b/gateway-test-release/webhdfs-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
@@ -0,0 +1,457 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+public class GatewayTestConfig extends Configuration implements GatewayConfig {
+
+  /* Websocket defaults */
+  public static final boolean DEFAULT_WEBSOCKET_FEATURE_ENABLED =  false;
+  public static final int DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE =  Integer.MAX_VALUE;;
+  public static final int DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_SIZE =  Integer.MAX_VALUE;;
+  public static final int DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE =  32768;
+  public static final int DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE =  32768;
+  public static final int DEFAULT_WEBSOCKET_INPUT_BUFFER_SIZE =  4096;
+  public static final int DEFAULT_WEBSOCKET_ASYNC_WRITE_TIMEOUT =  60000;
+  public static final int DEFAULT_WEBSOCKET_IDLE_TIMEOUT =  300000;
+
+  private String gatewayHomeDir = "gateway-home";
+  private String hadoopConfDir = "hadoop";
+  private String gatewayHost = "localhost";
+  private int gatewayPort = 0;
+  private String gatewayPath = "gateway";
+  private boolean hadoopKerberosSecured = false;
+  private String kerberosConfig = "/etc/knox/conf/krb5.conf";
+  private boolean kerberosDebugEnabled = false;
+  private String kerberosLoginConfig = "/etc/knox/conf/krb5JAASLogin.conf";
+  private String frontendUrl = null;
+  private boolean xForwardedEnabled = true;
+
+  public void setGatewayHomeDir( String gatewayHomeDir ) {
+    this.gatewayHomeDir = gatewayHomeDir;
+  }
+
+  @Override
+  public String getGatewayConfDir() {
+    return gatewayHomeDir;
+  }
+
+  @Override
+  public String getGatewayDataDir() {
+    return gatewayHomeDir;
+  }
+
+  @Override
+  public String getGatewaySecurityDir() {
+    return gatewayHomeDir + "/security";
+  }
+
+  @Override
+  public String getGatewayTopologyDir() {
+    return gatewayHomeDir + "/topologies";
+  }
+
+  @Override
+  public String getGatewayDeploymentDir() {
+    return gatewayHomeDir + "/deployments";
+  }
+
+//  public void setDeploymentDir( String clusterConfDir ) {
+//    this.deployDir = clusterConfDir;
+//  }
+
+  @Override
+  public String getHadoopConfDir() {
+    return hadoopConfDir;
+  }
+
+//  public void setHadoopConfDir( String hadoopConfDir ) {
+//    this.hadoopConfDir = hadoopConfDir;
+//  }
+
+  @Override
+  public String getGatewayHost() {
+    return gatewayHost;
+  }
+
+//  public void setGatewayHost( String gatewayHost ) {
+//    this.gatewayHost = gatewayHost;
+//  }
+
+  @Override
+  public int getGatewayPort() {
+    return gatewayPort;
+  }
+
+//  public void setGatewayPort( int gatewayPort ) {
+//    this.gatewayPort = gatewayPort;
+//  }
+
+  @Override
+  public String getGatewayPath() {
+    return gatewayPath;
+  }
+
+  public void setGatewayPath( String gatewayPath ) {
+    this.gatewayPath = gatewayPath;
+  }
+
+  @Override
+  public InetSocketAddress getGatewayAddress() throws UnknownHostException {
+    return new InetSocketAddress( getGatewayHost(), getGatewayPort() );
+  }
+
+  @Override
+  public boolean isSSLEnabled() {
+    // TODO Auto-generated method stub
+    return false;
+  }
+
+  @Override
+  public boolean isHadoopKerberosSecured() {
+    return hadoopKerberosSecured;
+  }
+
+  public void setHadoopKerberosSecured(boolean hadoopKerberosSecured) {
+    this.hadoopKerberosSecured = hadoopKerberosSecured;
+  }
+  
+  @Override
+  public String getKerberosConfig() {
+    return kerberosConfig;
+  }
+  
+  public void setKerberosConfig(String kerberosConfig) {
+    this.kerberosConfig = kerberosConfig;
+  }
+
+
+  @Override
+  public boolean isKerberosDebugEnabled() {
+    return kerberosDebugEnabled;
+  }
+  
+//  public void setKerberosDebugEnabled(boolean kerberosConfigEnabled) {
+//    this.kerberosDebugEnabled = kerberosDebugEnabled;
+//  }
+  
+  @Override
+  public String getKerberosLoginConfig() {
+    return kerberosLoginConfig;
+  }
+
+  public void setKerberosLoginConfig(String kerberosLoginConfig) {
+    this.kerberosLoginConfig = kerberosLoginConfig;
+  }
+
+  /* (non-Javadoc)
+     * @see org.apache.hadoop.gateway.config.GatewayConfig#getDefaultTopologyName()
+     */
+  @Override
+  public String getDefaultTopologyName() {
+    return "default";
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getDefaultAppRedirectPath()
+   */
+  @Override
+  public String getDefaultAppRedirectPath() {
+    // TODO Auto-generated method stub
+    return "/gateway/sandbox";
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getFrontendUrl()
+   */
+  @Override
+  public String getFrontendUrl() { return frontendUrl; }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getExcludedSSLProtocols()
+   */
+  @Override
+  public List getExcludedSSLProtocols() {
+    List<String> protocols = new ArrayList<String>();
+    protocols.add("SSLv3");
+    return protocols;
+  }
+
+  @Override
+  public List getIncludedSSLCiphers() {
+    return null;
+  }
+
+  @Override
+  public List getExcludedSSLCiphers() {
+    return null;
+  }
+
+  public void setFrontendUrl( String frontendUrl ) {
+    this.frontendUrl = frontendUrl;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#isClientAuthNeeded()
+   */
+  @Override
+  public boolean isClientAuthNeeded() {
+    // TODO Auto-generated method stub
+    return false;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getTruststorePath()
+   */
+  @Override
+  public String getTruststorePath() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getTrustAllCerts()
+   */
+  @Override
+  public boolean getTrustAllCerts() {
+    // TODO Auto-generated method stub
+    return false;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getTruststoreType()
+   */
+  @Override
+  public String getTruststoreType() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+  
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getKeystoreType()
+   */
+  @Override
+  public String getKeystoreType() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+//  public void setKerberosLoginConfig(String kerberosLoginConfig) {
+//   this.kerberosLoginConfig = kerberosLoginConfig;
+//  }
+
+   @Override
+   public String getGatewayServicesDir() {
+      return gatewayHomeDir + "/data/services";
+   }
+
+  @Override
+  public String getGatewayApplicationsDir() {
+    return gatewayHomeDir + "/conf/applications";
+  }
+
+  @Override
+  public boolean isXForwardedEnabled() {
+    return xForwardedEnabled;
+  }
+
+  public void setXForwardedEnabled(boolean enabled) {
+    xForwardedEnabled = enabled;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getEphemeralDHKeySize()
+   */
+  @Override
+  public String getEphemeralDHKeySize() {
+    return "2048";
+  }
+
+  @Override
+  public int getHttpClientMaxConnections() {
+    return 16;
+  }
+
+  @Override
+  public int getHttpClientConnectionTimeout() {
+    return -1;
+  }
+
+  @Override
+  public int getHttpClientSocketTimeout() {
+    return -1;
+  }
+
+  @Override
+  public int getThreadPoolMax() {
+    return 16;
+  }
+
+  @Override
+  public int getHttpServerRequestBuffer() {
+    return 16*1024;
+  }
+
+  @Override
+  public int getHttpServerRequestHeaderBuffer() {
+    return 8*1024;
+  }
+
+  @Override
+  public int getHttpServerResponseBuffer() {
+    return 32*1024;
+  }
+
+  @Override
+  public int getHttpServerResponseHeaderBuffer() {
+    return 8*1024;
+  }
+
+  @Override
+  public int getGatewayDeploymentsBackupVersionLimit() {
+    return Integer.MAX_VALUE;
+  }
+
+  @Override
+  public long getGatewayDeploymentsBackupAgeLimit() {
+    return Long.MAX_VALUE;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getSigningKeystoreName()
+   */
+  @Override
+  public String getSigningKeystoreName() {
+    return null;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getSigningKeyAlias()
+   */
+  @Override
+  public String getSigningKeyAlias() {
+    return null;
+  }
+
+  @Override
+  public List<String> getGlobalRulesServices() {
+    return Collections.EMPTY_LIST;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#isWebsocketEnabled()
+   */
+  @Override
+  public boolean isWebsocketEnabled() {
+    return DEFAULT_WEBSOCKET_FEATURE_ENABLED;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getWebsocketMaxTextMessageSize()
+   */
+  @Override
+  public int getWebsocketMaxTextMessageSize() {
+    return DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getWebsocketMaxBinaryMessageSize()
+   */
+  @Override
+  public int getWebsocketMaxBinaryMessageSize() {
+    return DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_SIZE;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getWebsocketMaxTextMessageBufferSize()
+   */
+  @Override
+  public int getWebsocketMaxTextMessageBufferSize() {
+    return DEFAULT_WEBSOCKET_MAX_TEXT_MESSAGE_BUFFER_SIZE;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getWebsocketMaxBinaryMessageBufferSize()
+   */
+  @Override
+  public int getWebsocketMaxBinaryMessageBufferSize() {
+    return DEFAULT_WEBSOCKET_MAX_BINARY_MESSAGE_BUFFER_SIZE;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getWebsocketInputBufferSize()
+   */
+  @Override
+  public int getWebsocketInputBufferSize() {
+    return DEFAULT_WEBSOCKET_INPUT_BUFFER_SIZE;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getWebsocketAsyncWriteTimeout()
+   */
+  @Override
+  public int getWebsocketAsyncWriteTimeout() {
+    return DEFAULT_WEBSOCKET_ASYNC_WRITE_TIMEOUT;
+  }
+
+  /* (non-Javadoc)
+   * @see org.apache.hadoop.gateway.config.GatewayConfig#getWebsocketIdleTimeout()
+   */
+  @Override
+  public int getWebsocketIdleTimeout() {
+    return DEFAULT_WEBSOCKET_IDLE_TIMEOUT;
+  }
+
+  @Override
+  public boolean isMetricsEnabled() {
+    return false;
+  }
+
+  @Override
+  public boolean isJmxMetricsReportingEnabled() {
+    return false;
+  }
+
+  @Override
+  public boolean isGraphiteMetricsReportingEnabled() {
+    return false;
+  }
+
+  @Override
+  public String getGraphiteHost() {
+    return null;
+  }
+
+  @Override
+  public int getGraphitePort() {
+    return 0;
+  }
+
+  @Override
+  public int getGraphiteReportingFrequency() {
+    return 0;
+  }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/082455c3/gateway-test-release/webhdfs-test/src/test/java/org/apache/hadoop/gateway/GatewayTestDriver.java
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-test/src/test/java/org/apache/hadoop/gateway/GatewayTestDriver.java b/gateway-test-release/webhdfs-test/src/test/java/org/apache/hadoop/gateway/GatewayTestDriver.java
new file mode 100644
index 0000000..b8254fc
--- /dev/null
+++ b/gateway-test-release/webhdfs-test/src/test/java/org/apache/hadoop/gateway/GatewayTestDriver.java
@@ -0,0 +1,183 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway;
+
+import com.mycila.xmltool.XMLTag;
+import org.apache.commons.io.FileUtils;
+import org.apache.directory.server.protocol.shared.transport.TcpTransport;
+import org.apache.hadoop.gateway.config.GatewayConfig;
+import org.apache.hadoop.gateway.security.ldap.SimpleLdapDirectoryServer;
+import org.apache.hadoop.gateway.services.DefaultGatewayServices;
+import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.hamcrest.MatcherAssert;
+import org.hamcrest.Matchers;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.net.InetAddress;
+import java.net.URL;
+import java.net.UnknownHostException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+
+import static org.hamcrest.CoreMatchers.notNullValue;
+import static org.junit.Assert.assertThat;
+
+public class GatewayTestDriver {
+
+  private static Logger log = LoggerFactory.getLogger(GatewayTestDriver.class);
+
+  public Class<?> resourceBaseClass;
+  public SimpleLdapDirectoryServer ldap;
+  public TcpTransport ldapTransport;
+  public boolean useGateway;
+  public GatewayServer gateway;
+  public GatewayConfig config;
+  public String clusterName;
+
+  /**
+   * Sets the class from which relative test resource names should be resolved.
+   * @param resourceBaseClass The class from which relative test resource names should be resolved.
+   */
+  public void setResourceBase( Class<?> resourceBaseClass ) {
+    this.resourceBaseClass = resourceBaseClass;
+  }
+
+  /**
+   * Starts an embedded LDAP server of the specified port.
+   * @param port The desired port the LDAP server should listen on.
+   * @return The actual port the LDAP server is listening on.
+   * @throws Exception Thrown if a failure occurs.
+   */
+  public int setupLdap( int port ) throws Exception {
+    URL usersUrl = getResourceUrl("users.ldif");
+    ldapTransport = new TcpTransport( 0 );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new File( usersUrl.toURI() ), ldapTransport );
+    ldap.start();
+    log.info( "LDAP port = " + ldapTransport.getAcceptor().getLocalAddress().getPort() );
+    return port;
+  }
+
+
+  /**
+   * Creates a GATEWAY_HOME, starts a gateway instance and deploys a test topology.
+   */
+  public void setupGateway( GatewayTestConfig config, String cluster, XMLTag topology, boolean use ) throws Exception {
+    this.useGateway = use;
+    this.config = config;
+    this.clusterName = cluster;
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
+    gatewayDir.mkdirs();
+
+    config.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File topoDir = new File( config.getGatewayTopologyDir() );
+    topoDir.mkdirs();
+
+    File deployDir = new File( config.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+    File descriptor = new File( topoDir, cluster + ".xml" );
+    FileOutputStream stream = new FileOutputStream( descriptor );
+    topology.toStream( stream );
+    stream.close();
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put("persist-master", "false");
+    options.put("master", "password");
+    try {
+      srvcs.init(config, options);
+    } catch (ServiceLifecycleException e) {
+      e.printStackTrace(); // I18N not required.
+    }
+    File stacksDir = new File( config.getGatewayServicesDir() );
+    stacksDir.mkdirs();
+    //TODO: [sumit] This is a hack for now, need to find a better way to locate the source resources for 'stacks' to be tested
+    String pathToStacksSource = "gateway-service-definitions/src/main/resources/services";
+    File stacksSourceDir = new File( targetDir.getParent(), pathToStacksSource);
+    if (!stacksSourceDir.exists()) {
+      stacksSourceDir = new File( targetDir.getParentFile().getParentFile().getParent(), pathToStacksSource);
+    }
+    if (stacksSourceDir.exists()) {
+      FileUtils.copyDirectoryToDirectory(stacksSourceDir, stacksDir);
+    }
+
+    gateway = GatewayServer.startGateway(config, srvcs);
+    MatcherAssert.assertThat("Failed to start gateway.", gateway, notNullValue());
+
+    log.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
+  }
+
+  public void cleanup() throws Exception {
+    gateway.stop();
+    FileUtils.deleteQuietly( new File( config.getGatewayTopologyDir() ) );
+    FileUtils.deleteQuietly( new File( config.getGatewayConfDir() ) );
+    FileUtils.deleteQuietly( new File( config.getGatewaySecurityDir() ) );
+    FileUtils.deleteQuietly( new File( config.getGatewayDeploymentDir() ) );
+    FileUtils.deleteQuietly( new File( config.getGatewayDataDir() ) );
+    FileUtils.deleteQuietly( new File( config.getGatewayServicesDir() ) );
+    ldap.stop( true );
+  }
+
+
+  public String getResourceBaseName() {
+    return resourceBaseClass.getName().replaceAll( "\\.", "/" ) + "/";
+  }
+
+  public String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, Matchers.notNullValue() );
+    return url;
+  }
+
+  public String getLdapUrl() {
+    return "ldap://localhost:" + ldapTransport.getAcceptor().getLocalAddress().getPort();
+  }
+
+  public String getClusterUrl() {
+    String url;
+    String localHostName = getLocalHostName();
+    url = "http://" + localHostName + ":" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath() + "/" + clusterName;
+    return url;
+  }
+
+  public int getGatewayPort() {
+    return gateway.getAddresses()[0].getPort();
+  }
+
+  private String getLocalHostName() {
+    String hostName = "localhost";
+    try {
+      hostName = InetAddress.getByName("127.0.0.1").getHostName();
+    } catch( UnknownHostException e ) {
+      // Ignore and use the default.
+    }
+    return hostName;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/082455c3/gateway-test-release/webhdfs-test/src/test/java/org/apache/hadoop/gateway/ShellTest.java
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-test/src/test/java/org/apache/hadoop/gateway/ShellTest.java b/gateway-test-release/webhdfs-test/src/test/java/org/apache/hadoop/gateway/ShellTest.java
new file mode 100644
index 0000000..94db645
--- /dev/null
+++ b/gateway-test-release/webhdfs-test/src/test/java/org/apache/hadoop/gateway/ShellTest.java
@@ -0,0 +1,198 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway;
+import org.apache.hadoop.gateway.shell.Shell;
+import org.apache.hadoop.test.category.ReleaseTest;
+import org.junit.experimental.categories.Category;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.security.Principal;
+
+import com.mycila.xmltool.XMLDoc;
+import com.mycila.xmltool.XMLTag;
+import groovy.lang.Binding;
+import groovy.lang.GroovyShell;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
+import org.apache.hadoop.test.TestUtils;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.BasicUserPrincipal;
+import org.apache.http.auth.Credentials;
+import org.apache.http.client.CredentialsProvider;
+import org.apache.http.impl.client.BasicCredentialsProvider;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.log4j.PropertyConfigurator;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import static org.hamcrest.Matchers.containsString;
+import static org.junit.Assert.assertNotNull;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+
+
+@Category(ReleaseTest.class)
+public class ShellTest {
+
+  private static MiniDFSCluster miniDFSCluster;
+  private static HdfsConfiguration configuration;
+  private static int nameNodeHttpPort;
+  private static String userName;
+
+  private static GatewayTestDriver driver = new GatewayTestDriver();
+  private static File baseDir;
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    nameNodeHttpPort = TestUtils.findFreePort();
+    configuration = new HdfsConfiguration();
+    baseDir = new File(KeyStoreTestUtil.getClasspathDir(ShellTest.class));
+    System.setProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA, baseDir.getAbsolutePath());
+    miniDFSCluster = new MiniDFSCluster.Builder(configuration)
+        .nameNodePort(20112)
+        .nameNodeHttpPort(nameNodeHttpPort)
+        .numDataNodes(2)
+        .format(true)
+        .racks(null)
+        .build();
+    userName = UserGroupInformation.createUserForTesting("guest", new String[] {"users"}).getUserName();
+
+    setupKnox();
+  }
+
+  private static void setupKnox() throws Exception {
+    //knox setup
+    System.setProperty("gateway.hadoop.kerberos.secured", "false");
+    GatewayTestConfig config = new GatewayTestConfig();
+    config.setGatewayPath( "gateway" );
+    config.setHadoopKerberosSecured(false);
+    driver.setResourceBase(ShellTest.class);
+    driver.setupLdap(0);
+    driver.setupGateway(config, "cluster", createTopology(), true);
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    miniDFSCluster.shutdown();
+    driver.cleanup();
+  }
+
+  @Test
+  public void basicInsecureShell() throws Exception {
+    testPutGetScript("InsecureWebHdfsPutGet.groovy");
+  }
+
+  private void testPutGetScript(String script) throws IOException, URISyntaxException {
+    setupLogging();
+    DistributedFileSystem fileSystem = miniDFSCluster.getFileSystem();
+    Path dir = new Path("/user/guest/example");
+    fileSystem.delete(dir, true);
+    fileSystem.mkdirs(dir, new FsPermission("777"));
+    fileSystem.setOwner(dir, "guest", "users");
+    Binding binding = new Binding();
+    binding.setProperty("gateway", driver.getClusterUrl());
+    URL readme = driver.getResourceUrl("README");
+    File file = new File(readme.toURI());
+    System.out.println(file.exists());
+    binding.setProperty("file", file.getAbsolutePath());
+    GroovyShell shell = new GroovyShell(binding);
+    shell.evaluate(driver.getResourceUrl(script).toURI());
+    String status = (String) binding.getProperty("status");
+    assertNotNull(status);
+    System.out.println(status);
+    String fetchedFile = (String) binding.getProperty("fetchedFile");
+    assertNotNull(fetchedFile);
+    System.out.println(fetchedFile);
+    assertThat(fetchedFile, containsString("README"));
+  }
+
+  @Test
+  public void basicSecureShell() throws Exception {
+    testPutGetScript("WebHdfsPutGet.groovy");
+  }
+
+  private static void setupLogging() {
+    PropertyConfigurator.configure(ClassLoader.getSystemResource("log4j.properties"));
+  }
+
+  /**
+   * Creates a topology that is deployed to the gateway instance for the test suite.
+   * Note that this topology is shared by all of the test methods in this suite.
+   * @return A populated XML structure for a topology file.
+   */
+  private static XMLTag createTopology() {
+    XMLTag xml = XMLDoc.newDocument(true)
+        .addRoot("topology")
+        .addTag( "gateway" )
+        .addTag( "provider" )
+        .addTag("role").addText("webappsec")
+        .addTag("name").addText("WebAppSec")
+        .addTag("enabled").addText("true")
+        .addTag( "param" )
+        .addTag("name").addText("csrf.enabled")
+        .addTag("value").addText("false").gotoParent().gotoParent()
+        .addTag("provider")
+        .addTag("role").addText("authentication")
+        .addTag("name").addText("ShiroProvider")
+        .addTag("enabled").addText("true")
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm")
+        .addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.userDnTemplate")
+        .addTag( "value" ).addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.contextFactory.url")
+        .addTag( "value" ).addText(driver.getLdapUrl()).gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
+        .addTag( "value" ).addText("simple").gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("urls./**")
+        .addTag( "value" ).addText("authcBasic").gotoParent().gotoParent()
+        .addTag("provider")
+        .addTag("role").addText("identity-assertion")
+        .addTag("enabled").addText("true")
+        .addTag("name").addText("Default").gotoParent()
+        .addTag("provider")
+        .addTag( "role" ).addText( "authorization" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag("name").addText("AclsAuthz").gotoParent()
+        .addTag("param")
+        .addTag("name").addText( "webhdfs-acl" )
+        .addTag("value").addText("hdfs;*;*").gotoParent()
+        .gotoRoot()
+        .addTag("service")
+        .addTag("role").addText("WEBHDFS")
+        .addTag("url").addText("http://localhost:" + nameNodeHttpPort + "/webhdfs/").gotoParent()
+        .gotoRoot();
+//     System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/082455c3/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/InsecureWebHdfsPutGet.groovy
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/InsecureWebHdfsPutGet.groovy b/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/InsecureWebHdfsPutGet.groovy
new file mode 100644
index 0000000..fb0a6c4
--- /dev/null
+++ b/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/InsecureWebHdfsPutGet.groovy
@@ -0,0 +1,39 @@
+package org.apache.hadoop.gateway.ShellTest
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+
+//gateway = "https://localhost:8443/gateway/sandbox"
+//file ="README"
+dataFile = "README"
+
+username = "guest"
+pass = "guest-password"
+
+dataDir = "/user/" + username + "/example"
+
+session = Hadoop.loginInsecure( gateway, username, pass )
+
+status = Hdfs.status(session).file( "/" ).now().string
+
+Hdfs.put( session ).file( file ).to( dataDir + "/" + dataFile ).now()
+Hdfs.put( session ).file( file ).to( dataDir + "/" + dataFile ).overwrite(true).permission(777).now()
+
+fetchedFile = Hdfs.get( session ).from( dataDir + "/" + dataFile).now().string
+session.shutdown()

http://git-wip-us.apache.org/repos/asf/knox/blob/082455c3/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/README
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/README b/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/README
new file mode 100644
index 0000000..0ef01d4
--- /dev/null
+++ b/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/README
@@ -0,0 +1,57 @@
+------------------------------------------------------------------------------
+README file for the Apache Knox Gateway
+------------------------------------------------------------------------------
+This distribution includes cryptographic software.  The country in 
+which you currently reside may have restrictions on the import, 
+possession, use, and/or re-export to another country, of 
+encryption software.  BEFORE using any encryption software, please 
+check your country's laws, regulations and policies concerning the
+import, possession, or use, and re-export of encryption software, to 
+see if this is permitted.  See <http://www.wassenaar.org/> for more
+information.
+
+The U.S. Government Department of Commerce, Bureau of Industry and
+Security (BIS), has classified this software as Export Commodity 
+Control Number (ECCN) 5D002.C.1, which includes information security
+software using or performing cryptographic functions with asymmetric
+algorithms.  The form and manner of this Apache Software Foundation
+distribution makes it eligible for export under the License Exception
+ENC Technology Software Unrestricted (TSU) exception (see the BIS 
+Export Administration Regulations, Section 740.13) for both object 
+code and source code.
+
+The following provides more details on the included cryptographic
+software:
+  This package includes the use of ApacheDS which is dependent upon the 
+Bouncy Castle Crypto APIs written by the Legion of the Bouncy Castle
+http://www.bouncycastle.org/ feedback-crypto@bouncycastle.org.
+
+------------------------------------------------------------------------------
+Description
+------------------------------------------------------------------------------
+Please see the Apache Knox site for detailed description.
+
+http://knox.apache.org/
+
+------------------------------------------------------------------------------
+Changes
+------------------------------------------------------------------------------
+Please see the CHANGES file.
+
+------------------------------------------------------------------------------
+Known Issues
+------------------------------------------------------------------------------
+Please see the ISSUES file.
+
+------------------------------------------------------------------------------
+Installation & Usage
+------------------------------------------------------------------------------
+Please see the Apache Knox Gateway User's Guide - available on the Knox site.
+http://knox.apache.org/
+
+------------------------------------------------------------------------------
+Troubleshooting & Filing bugs
+------------------------------------------------------------------------------
+Please see the Apache Knox Gateway User's Guide for detailed information.
+http://knox.apache.org
+

http://git-wip-us.apache.org/repos/asf/knox/blob/082455c3/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/WebHdfsPutGet.groovy
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/WebHdfsPutGet.groovy b/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/WebHdfsPutGet.groovy
new file mode 100644
index 0000000..bf1c9d1
--- /dev/null
+++ b/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/WebHdfsPutGet.groovy
@@ -0,0 +1,40 @@
+package org.apache.hadoop.gateway.ShellTest
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+
+//gateway = "https://localhost:8443/gateway/sandbox"
+//file ="README"
+dataFile = "README"
+
+username = "guest"
+pass = "guest-password"
+
+dataDir = "/user/" + username + "/example"
+
+session = Hadoop.login( gateway, username, pass )
+
+status = Hdfs.status(session).file( "/" ).now().string
+
+Hdfs.put( session ).file( file ).to( dataDir + "/" + dataFile ).now()
+Hdfs.put( session ).file( file ).to( dataDir + "/" + dataFile ).overwrite(true).permission(777).now()
+
+fetchedFile = Hdfs.get( session ).from( dataDir + "/" + dataFile).now().string
+
+session.shutdown()

http://git-wip-us.apache.org/repos/asf/knox/blob/082455c3/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/users.ldif
----------------------------------------------------------------------
diff --git a/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/users.ldif b/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/users.ldif
new file mode 100644
index 0000000..d82e99a
--- /dev/null
+++ b/gateway-test-release/webhdfs-test/src/test/resources/org/apache/hadoop/gateway/ShellTest/users.ldif
@@ -0,0 +1,61 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+version: 1
+
+dn: dc=hadoop,dc=apache,dc=org
+objectclass: organization
+objectclass: dcObject
+o: Hadoop
+dc: hadoop
+
+dn: ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: people
+
+dn: ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: groups
+
+dn: uid=hdfs,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: LarryWalls
+sn: Walls
+uid: hdfs
+userPassword:hdfs-password
+
+dn: uid=guest,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: Guest
+sn: Guest
+uid: guest
+userPassword:guest-password
+
+dn: cn=admin,ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:groupOfNames
+cn: admin
+member: uid=allowedUser,ou=people,dc=hadoop,dc=apache,dc=org