You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by mo...@apache.org on 2017/09/13 13:57:58 UTC

[01/11] knox git commit: KNOX-1035 - Move JWTTokenTest to gateway-spi module + add more tests

Repository: knox
Updated Branches:
  refs/heads/KNOX-998-Package_Restructuring 50f46e9ee -> f4a4355d4


KNOX-1035 - Move JWTTokenTest to gateway-spi module + add more tests


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/773ac9d8
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/773ac9d8
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/773ac9d8

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 773ac9d8e2594241d5d95448b19a75982f467efd
Parents: e88c7a9
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Mon Sep 11 10:29:41 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Mon Sep 11 10:29:41 2017 +0100

----------------------------------------------------------------------
 .../provider/federation/JWTTokenTest.java       | 132 ------------
 .../security/impl/CMFKeystoreServiceTest.java   |  11 +-
 .../security/token/impl/JWTTokenTest.java       | 213 +++++++++++++++++++
 3 files changed, 218 insertions(+), 138 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/773ac9d8/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/JWTTokenTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/JWTTokenTest.java b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/JWTTokenTest.java
deleted file mode 100644
index 2830a9c..0000000
--- a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/JWTTokenTest.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.provider.federation;
-
-import java.util.ArrayList;
-
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
-import org.junit.Test;
-
-public class JWTTokenTest extends org.junit.Assert {
-  private static final String JWT_TOKEN = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOjE0MTY5MjkxMDksImp0aSI6ImFhN2Y4ZDBhOTVjIiwic2NvcGVzIjpbInJlcG8iLCJwdWJsaWNfcmVwbyJdfQ.XCEwpBGvOLma4TCoh36FU7XhUbcskygS81HE1uHLf0E";
-  private static final String HEADER = "{\"alg\":\"RS256\", \"type\":\"JWT\"}";
-  private static final String CLAIMS = "{\"iss\": \"gateway\", \"prn\": \"john.doe@example.com\", \"aud\": \"https://login.example.com\", \"exp\": \"1363360913\"}";
-  
-//  public void testTokenParsing() throws Exception {
-//    try {
-//      JWTToken token = JWTToken.parseToken(JWT_TOKEN);
-//      assertEquals(token.getHeader(), HEADER);
-//      assertEquals(token.getClaims(), CLAIMS);
-//      
-//      assertEquals(token.getIssuer(), "gateway");
-//      assertEquals(token.getPrincipal(), "john.doe@example.com");
-//      assertEquals(token.getAudience(), "https://login.example.com");
-//      assertEquals(token.getExpires(), "1363360913");
-//    }
-//    catch (ParseException pe) {
-//      fail("ParseException encountered.");
-//    }
-//  }
-  
-  @Test
-  public void testTokenCreation() throws Exception {
-    String[] claims = new String[4];
-    claims[0] = "KNOXSSO";
-    claims[1] = "john.doe@example.com";
-    claims[2] = "https://login.example.com";
-    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    JWTToken token = new JWTToken("RS256", claims);
-
-    assertEquals("KNOXSSO", token.getIssuer());
-    assertEquals("john.doe@example.com", token.getSubject());
-    assertEquals("https://login.example.com", token.getAudience());
-  }
-
-  @Test
-  public void testTokenCreationWithAudienceListSingle() throws Exception {
-    String[] claims = new String[4];
-    claims[0] = "KNOXSSO";
-    claims[1] = "john.doe@example.com";
-    claims[2] = null;
-    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    ArrayList<String> audiences = new ArrayList<String>();
-    audiences.add("https://login.example.com");
-
-    JWTToken token = new JWTToken("RS256", claims, audiences);
-
-    assertEquals("KNOXSSO", token.getIssuer());
-    assertEquals("john.doe@example.com", token.getSubject());
-    assertEquals("https://login.example.com", token.getAudience());
-    assertEquals(1, token.getAudienceClaims().length);
-  }
-
-  @Test
-  public void testTokenCreationWithAudienceListMultiple() throws Exception {
-    String[] claims = new String[4];
-    claims[0] = "KNOXSSO";
-    claims[1] = "john.doe@example.com";
-    claims[2] = null;
-    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    ArrayList<String> audiences = new ArrayList<String>();
-    audiences.add("https://login.example.com");
-    audiences.add("KNOXSSO");
-
-    JWTToken token = new JWTToken("RS256", claims, audiences);
-
-    assertEquals("KNOXSSO", token.getIssuer());
-    assertEquals("john.doe@example.com", token.getSubject());
-    assertEquals("https://login.example.com", token.getAudience());
-    assertEquals(2, token.getAudienceClaims().length);
-  }
-
-  @Test
-  public void testTokenCreationWithAudienceListCombined() throws Exception {
-    String[] claims = new String[4];
-    claims[0] = "KNOXSSO";
-    claims[1] = "john.doe@example.com";
-    claims[2] = "LJM";
-    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    ArrayList<String> audiences = new ArrayList<String>();
-    audiences.add("https://login.example.com");
-    audiences.add("KNOXSSO");
-
-    JWTToken token = new JWTToken("RS256", claims, audiences);
-
-    assertEquals("KNOXSSO", token.getIssuer());
-    assertEquals("john.doe@example.com", token.getSubject());
-    assertEquals("https://login.example.com", token.getAudience());
-    assertEquals(3, token.getAudienceClaims().length);
-  }
-
-  @Test
-  public void testTokenCreationWithNullAudienceList() throws Exception {
-    String[] claims = new String[4];
-    claims[0] = "KNOXSSO";
-    claims[1] = "john.doe@example.com";
-    claims[2] = null;
-    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    ArrayList<String> audiences = null;
-
-    JWTToken token = new JWTToken("RS256", claims, audiences);
-
-    assertEquals("KNOXSSO", token.getIssuer());
-    assertEquals("john.doe@example.com", token.getSubject());
-    assertEquals(null, token.getAudience());
-    assertEquals(null, token.getAudienceClaims());
-  }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/773ac9d8/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/impl/CMFKeystoreServiceTest.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/impl/CMFKeystoreServiceTest.java b/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/impl/CMFKeystoreServiceTest.java
index 57064c6..2287b97 100644
--- a/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/impl/CMFKeystoreServiceTest.java
+++ b/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/impl/CMFKeystoreServiceTest.java
@@ -33,7 +33,6 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
 
 /**
 *
@@ -57,31 +56,31 @@ public class CMFKeystoreServiceTest {
         public void init(GatewayConfig config, Map<String, String> options)
             throws ServiceLifecycleException {
           // TODO Auto-generated method stub
-          
+
         }
 
         public void start() throws ServiceLifecycleException {
           // TODO Auto-generated method stub
-          
+
         }
 
         public void stop() throws ServiceLifecycleException {
           // TODO Auto-generated method stub
-          
+
         }
 
         public char[] getMasterSecret() {
           // TODO Auto-generated method stub
           return "testmaster".toCharArray();
         }
-        
+
       });
     } catch (ServiceLifecycleException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
     }
   }
-  
+
   @Test
   public void testCreationOfStoreForCredential() throws KeystoreServiceException {
     try {

http://git-wip-us.apache.org/repos/asf/knox/blob/773ac9d8/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java b/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
new file mode 100644
index 0000000..ef4023d
--- /dev/null
+++ b/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.services.security.token.impl;
+
+import java.security.KeyPair;
+import java.security.KeyPairGenerator;
+import java.security.NoSuchAlgorithmException;
+import java.security.interfaces.RSAPrivateKey;
+import java.security.interfaces.RSAPublicKey;
+import java.util.ArrayList;
+
+import org.junit.Test;
+
+import com.nimbusds.jose.JWSAlgorithm;
+import com.nimbusds.jose.JWSSigner;
+import com.nimbusds.jose.JWSVerifier;
+import com.nimbusds.jose.crypto.RSASSASigner;
+import com.nimbusds.jose.crypto.RSASSAVerifier;
+
+public class JWTTokenTest extends org.junit.Assert {
+  private static final String JWT_TOKEN = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOjE0MTY5MjkxMDksImp0aSI6ImFhN2Y4ZDBhOTVjIiwic2NvcGVzIjpbInJlcG8iLCJwdWJsaWNfcmVwbyJdfQ.XCEwpBGvOLma4TCoh36FU7XhUbcskygS81HE1uHLf0E";
+  private static final String HEADER = "{\"alg\":\"RS256\", \"type\":\"JWT\"}";
+  private static final String CLAIMS = "{\"iss\": \"gateway\", \"prn\": \"john.doe@example.com\", \"aud\": \"https://login.example.com\", \"exp\": \"1363360913\"}";
+
+  private RSAPublicKey publicKey;
+  private RSAPrivateKey privateKey;
+
+  public JWTTokenTest() throws Exception, NoSuchAlgorithmException {
+    KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
+    kpg.initialize(2048);
+
+    KeyPair kp = kpg.genKeyPair();
+    publicKey = (RSAPublicKey) kp.getPublic();
+    privateKey = (RSAPrivateKey) kp.getPrivate();
+  }
+
+  public void testTokenParsing() throws Exception {
+    JWTToken token = JWTToken.parseToken(JWT_TOKEN);
+    assertEquals(token.getHeader(), HEADER);
+    assertEquals(token.getClaims(), CLAIMS);
+
+    assertEquals(token.getIssuer(), "gateway");
+    assertEquals(token.getPrincipal(), "john.doe@example.com");
+    assertEquals(token.getAudience(), "https://login.example.com");
+    assertEquals(token.getExpires(), "1363360913");
+  }
+
+  @Test
+  public void testTokenCreation() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = "https://login.example.com";
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    JWTToken token = new JWTToken("RS256", claims);
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+  }
+
+  @Test
+  public void testTokenCreationWithAudienceListSingle() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = null;
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    ArrayList<String> audiences = new ArrayList<String>();
+    audiences.add("https://login.example.com");
+
+    JWTToken token = new JWTToken("RS256", claims, audiences);
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+    assertEquals(1, token.getAudienceClaims().length);
+  }
+
+  @Test
+  public void testTokenCreationWithAudienceListMultiple() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = null;
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    ArrayList<String> audiences = new ArrayList<String>();
+    audiences.add("https://login.example.com");
+    audiences.add("KNOXSSO");
+
+    JWTToken token = new JWTToken("RS256", claims, audiences);
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+    assertEquals(2, token.getAudienceClaims().length);
+  }
+
+  @Test
+  public void testTokenCreationWithAudienceListCombined() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = "LJM";
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    ArrayList<String> audiences = new ArrayList<String>();
+    audiences.add("https://login.example.com");
+    audiences.add("KNOXSSO");
+
+    JWTToken token = new JWTToken("RS256", claims, audiences);
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+    assertEquals(3, token.getAudienceClaims().length);
+  }
+
+  @Test
+  public void testTokenCreationWithNullAudienceList() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = null;
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    ArrayList<String> audiences = null;
+
+    JWTToken token = new JWTToken("RS256", claims, audiences);
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals(null, token.getAudience());
+    assertArrayEquals(null, token.getAudienceClaims());
+  }
+
+  @Test
+  public void testTokenCreationRS512() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = "https://login.example.com";
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    JWTToken token = new JWTToken(JWSAlgorithm.RS512.getName(), claims);
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+    assertTrue(token.getHeader().contains(JWSAlgorithm.RS512.getName()));
+  }
+
+  @Test
+  public void testTokenSignature() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = "https://login.example.com";
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    JWTToken token = new JWTToken("RS256", claims);
+
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+
+    // Sign the token
+    JWSSigner signer = new RSASSASigner(privateKey);
+    token.sign(signer);
+    assertTrue(token.getSignaturePayload().length > 0);
+
+    // Verify the signature
+    JWSVerifier verifier = new RSASSAVerifier((RSAPublicKey) publicKey);
+    assertTrue(token.verify(verifier));
+  }
+
+  @Test
+  public void testTokenSignatureRS512() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = "https://login.example.com";
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    JWTToken token = new JWTToken(JWSAlgorithm.RS512.getName(), claims);
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+    assertTrue(token.getHeader().contains(JWSAlgorithm.RS512.getName()));
+
+    // Sign the token
+    JWSSigner signer = new RSASSASigner(privateKey);
+    token.sign(signer);
+    assertTrue(token.getSignaturePayload().length > 0);
+
+    // Verify the signature
+    JWSVerifier verifier = new RSASSAVerifier((RSAPublicKey) publicKey);
+    assertTrue(token.verify(verifier));
+  }
+
+}


[09/11] knox git commit: KNOX-998 - Merging from master

Posted by mo...@apache.org.
KNOX-998 - Merging from master


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/582cc7e3
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/582cc7e3
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/582cc7e3

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 582cc7e38a99f4dc624325cccd66e12a0923ec6f
Parents: 50f46e9 e2e1251
Author: Sandeep More <mo...@apache.org>
Authored: Wed Sep 13 09:36:13 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Wed Sep 13 09:36:13 2017 -0400

----------------------------------------------------------------------
 .../provider/federation/JWTTokenTest.java       | 132 ------------
 gateway-server/pom.xml                          |  22 --
 .../resources/services/hbase/0.98.0/service.xml |   2 +-
 .../definition/ServiceDefinitionTest.java       |   4 +-
 .../security/token/impl/JWTTokenTest.java       | 213 +++++++++++++++++++
 .../security/impl/CMFKeystoreServiceTest.java   |  11 +-
 gateway-test/pom.xml                            |  12 +-
 .../gateway/AmbariServiceDefinitionTest.java    |   6 +-
 .../knox/gateway/GatewayAdminFuncTest.java      |   2 +-
 .../gateway/GatewayAdminTopologyFuncTest.java   |  43 ++--
 .../apache/knox/gateway/GatewayAppFuncTest.java |   6 +-
 .../knox/gateway/GatewayBasicFuncTest.java      |  94 ++++----
 .../knox/gateway/GatewayDeployFuncTest.java     |   4 +-
 .../knox/gateway/GatewayHealthFuncTest.java     |   4 +-
 .../GatewayLdapDynamicGroupFuncTest.java        |   2 +-
 .../knox/gateway/GatewayLdapGroupFuncTest.java  |   4 +-
 .../gateway/GatewayLdapPosixGroupFuncTest.java  |   2 +-
 .../gateway/GatewayLocalServiceFuncTest.java    |   2 +-
 .../knox/gateway/GatewayMultiFuncTest.java      |  16 +-
 .../GatewayPortMappingDisableFeatureTest.java   |   2 +-
 .../gateway/GatewayPortMappingFailTest.java     |   2 +-
 .../gateway/GatewayPortMappingFuncTest.java     |   2 +-
 .../knox/gateway/GatewaySampleFuncTest.java     |   2 +-
 .../apache/knox/gateway/GatewaySslFuncTest.java |   4 +-
 .../apache/knox/gateway/Knox242FuncTest.java    |   2 +-
 .../apache/knox/gateway/WebHdfsHaFuncTest.java  |   2 +-
 pom.xml                                         |  14 +-
 27 files changed, 332 insertions(+), 279 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/JWTTokenTest.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/JWTTokenTest.java
index 7359a8d,0000000..e69de29
mode 100644,000000..100644
--- a/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/JWTTokenTest.java
+++ b/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/JWTTokenTest.java

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-service-definitions/src/test/java/org/apache/knox/gateway/service/definition/ServiceDefinitionTest.java
----------------------------------------------------------------------
diff --cc gateway-service-definitions/src/test/java/org/apache/knox/gateway/service/definition/ServiceDefinitionTest.java
index 60f2d70,0000000..75b18f7
mode 100644,000000..100644
--- a/gateway-service-definitions/src/test/java/org/apache/knox/gateway/service/definition/ServiceDefinitionTest.java
+++ b/gateway-service-definitions/src/test/java/org/apache/knox/gateway/service/definition/ServiceDefinitionTest.java
@@@ -1,90 -1,0 +1,90 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.service.definition;
 +
 +import org.junit.Test;
 +
 +import javax.xml.bind.JAXBContext;
 +import javax.xml.bind.Unmarshaller;
 +import java.net.URL;
 +import java.util.List;
 +
 +import static org.junit.Assert.assertEquals;
 +import static org.junit.Assert.assertNotNull;
 +
 +public class ServiceDefinitionTest {
 +
 +  @Test
 +  public void testUnmarshalling() throws Exception {
 +    JAXBContext context = JAXBContext.newInstance(ServiceDefinition.class);
 +    Unmarshaller unmarshaller = context.createUnmarshaller();
 +    URL url = ClassLoader.getSystemResource("services/foo/1.0.0/service.xml");
 +    ServiceDefinition definition = (ServiceDefinition) unmarshaller.unmarshal(url.openStream());
 +    assertEquals("foo", definition.getName());
 +    assertEquals("FOO", definition.getRole());
 +    assertEquals("1.0.0", definition.getVersion());
 +    assertEquals("custom-client", definition.getDispatch().getContributorName());
 +    assertEquals("ha-client", definition.getDispatch().getHaContributorName());
 +    assertEquals("org.apache.knox.gateway.MockHttpClientFactory", definition.getDispatch().getHttpClientFactory());
 +    List<Policy> policies = definition.getPolicies();
 +    assertEquals(5, policies.size());
 +    String[] policyOrder = new String[]{"webappsec", "authentication", "rewrite", "identity-assertion", "authorization"};
 +    for (int i=0; i< policyOrder.length; i++ ) {
 +      assertEquals(policyOrder[i], policies.get(i).getRole());
 +    }
 +    List<Route> routes = definition.getRoutes();
 +    assertNotNull(routes);
 +    assertEquals(1, routes.size());
 +    Route route = routes.get(0);
 +    assertEquals("/foo/?**", route.getPath());
 +    assertEquals("http-client", route.getDispatch().getContributorName());
 +    policies = route.getPolicies();
 +    assertEquals(5, policies.size());
 +    policyOrder = new String[]{"webappsec", "federation", "identity-assertion", "authorization", "rewrite"};
 +    for (int i=0; i< policyOrder.length; i++ ) {
 +      assertEquals(policyOrder[i], policies.get(i).getRole());
 +    }
 +    assertNotNull(definition.getTestURLs());
 +    assertEquals(2, definition.getTestURLs().size());
 +  }
 +
 +  @Test
 +  public void testUnmarshallingCommonServices() throws Exception {
 +    JAXBContext context = JAXBContext.newInstance(ServiceDefinition.class);
 +    Unmarshaller unmarshaller = context.createUnmarshaller();
 +    URL url = ClassLoader.getSystemResource("services/yarn-rm/2.5.0/service.xml");
 +    ServiceDefinition definition = (ServiceDefinition) unmarshaller.unmarshal(url.openStream());
 +    assertEquals("resourcemanager", definition.getName());
 +    assertEquals("RESOURCEMANAGER", definition.getRole());
 +    assertEquals("2.5.0", definition.getVersion());
 +    List<Route> routes = definition.getRoutes();
 +    assertNotNull(routes);
 +    assertEquals(12, routes.size());
 +    assertNotNull(routes.get(0).getPath());
 +    url = ClassLoader.getSystemResource("services/hbase/0.98.0/service.xml");
 +    definition = (ServiceDefinition) unmarshaller.unmarshal(url.openStream());
-     assertNotNull(definition.getDispatch());
-     assertEquals("org.apache.knox.gateway.hbase.HBaseDispatch", definition.getDispatch().getClassName());
++    assertNotNull(definition.getName());
++    assertEquals("webhbase", definition.getName());
 +    url = ClassLoader.getSystemResource("services/webhdfs/2.4.0/service.xml");
 +    definition = (ServiceDefinition) unmarshaller.unmarshal(url.openStream());
 +    assertNotNull(definition.getDispatch());
 +    assertEquals("org.apache.knox.gateway.hdfs.dispatch.HdfsHttpClientDispatch", definition.getDispatch().getClassName());
 +    assertEquals("org.apache.knox.gateway.hdfs.dispatch.WebHdfsHaDispatch", definition.getDispatch().getHaClassName());
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/impl/CMFKeystoreServiceTest.java
----------------------------------------------------------------------
diff --cc gateway-spi/src/test/java/org/apache/knox/gateway/services/security/impl/CMFKeystoreServiceTest.java
index 73306f4,0000000..7386f74
mode 100644,000000..100644
--- a/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/impl/CMFKeystoreServiceTest.java
+++ b/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/impl/CMFKeystoreServiceTest.java
@@@ -1,162 -1,0 +1,161 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway.services.security.impl;
 +
 +import java.io.File;
 +import java.security.KeyStore;
 +import java.security.KeyStoreException;
 +import java.util.Map;
 +
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.security.KeystoreServiceException;
 +import org.apache.knox.gateway.services.security.MasterService;
 +import org.apache.hadoop.test.category.FastTests;
 +import org.apache.hadoop.test.category.UnitTests;
 +import org.junit.Before;
 +import org.junit.Test;
 +import org.junit.experimental.categories.Category;
 +
 +import static org.junit.Assert.assertTrue;
- import static org.junit.Assert.fail;
 +
 +/**
 +*
 +*/
 +@Category( { UnitTests.class, FastTests.class } )
 +public class CMFKeystoreServiceTest {
 +  CMFKeystoreService ks;
 +  String aliasName = "TestAliasName";
 +  String secretValue = "AliasSecretValue";
 +  char[] password = { 'P', 'A', 'S', 'S' };
 +  File credentialsStoreFile = new File("ambari-credentials.jceks");
 +  File keyStoreFile = new File("ambari.jks");
 +  File certificateFile = new File("ambari");
 +
 +  @Before
 +  public void setup() {
 +    try {
 +      ks = new CMFKeystoreService(".", "ambari");
 +      ks.setMasterService(new MasterService() {
 +
 +        public void init(GatewayConfig config, Map<String, String> options)
 +            throws ServiceLifecycleException {
 +          // TODO Auto-generated method stub
-           
++
 +        }
 +
 +        public void start() throws ServiceLifecycleException {
 +          // TODO Auto-generated method stub
-           
++
 +        }
 +
 +        public void stop() throws ServiceLifecycleException {
 +          // TODO Auto-generated method stub
-           
++
 +        }
 +
 +        public char[] getMasterSecret() {
 +          // TODO Auto-generated method stub
 +          return "testmaster".toCharArray();
 +        }
-         
++
 +      });
 +    } catch (ServiceLifecycleException e) {
 +      // TODO Auto-generated catch block
 +      e.printStackTrace();
 +    }
 +  }
-   
++
 +  @Test
 +  public void testCreationOfStoreForCredential() throws KeystoreServiceException {
 +    try {
 +      ks.createCredentialStore();
 +      assertTrue("Credential Store file is not created", ks.isCredentialStoreAvailable()
 +          && credentialsStoreFile.exists());
 +      KeyStore credentialStore = ks.getCredentialStore();
 +      assertTrue("Credential Store file is not created with proper file type",
 +        ("JCEKS").equalsIgnoreCase(credentialStore.getType()));
 +    } finally {
 +      credentialsStoreFile.deleteOnExit();
 +    }
 +  }
 +
 +  @Test
 +  public void testCreationOfKeyStore() throws KeystoreServiceException {
 +    try {
 +      ks.createKeystore();
 +      assertTrue("Key Store file is not created", ks.isKeystoreAvailable() && keyStoreFile.exists());
 +      KeyStore keystore = ks.getKeystore();
 +      assertTrue("Key Store file is not created with proper file type",
 +        ("JKS").equalsIgnoreCase(keystore.getType()));
 +      ks.createCredentialStore();
 +      ks.addCredential(aliasName, "secretValue");
 +    } finally {
 +      keyStoreFile.deleteOnExit();
 +      credentialsStoreFile.deleteOnExit();
 +    }
 +  }
 +
 +  @Test
 +  public void testAdditionOfCredentialsToKeyStore() throws KeystoreServiceException {
 +    try {
 +      ks.createKeystore();
 +      ks.createCredentialStore();
 +      ks.addCredential(aliasName, "secretValue");
 +      char[] secret = ks.getCredential(aliasName);
 +      assertTrue("Addition of Credentials failed", new String(secret).equals("secretValue"));
 +    } finally {
 +      credentialsStoreFile.deleteOnExit();
 +      keyStoreFile.deleteOnExit();
 +    }
 +  }
 +
 +  @Test
 +  public void testAdditionOfAliasWithSelfSignedCertificate() throws KeystoreServiceException,
 +      KeyStoreException {
 +    try {
 +      ks.createKeystore();
 +      ks.createCredentialStore();
 +      ks.addCredential(aliasName, "secretValue");
 +      ks.addSelfSignedCert(aliasName, password);
 +      KeyStore keystore = ks.getKeystore();
 +      assertTrue("Addition of Alias with Self Signed Certificate failed",
 +        !keystore.getCertificate(aliasName).toString().isEmpty() && certificateFile.exists());
 +    } finally {
 +      credentialsStoreFile.deleteOnExit();
 +      keyStoreFile.deleteOnExit();
 +      certificateFile.deleteOnExit();
 +    }
 +  }
 +
 +  @Test
 +  public void testFetchOfAliasKey() throws KeystoreServiceException {
 +    try {
 +      ks.createKeystore();
 +      ks.createCredentialStore();
 +      ks.addCredential(aliasName, "secretValue");
 +      ks.addSelfSignedCert(aliasName, password);
 +      assertTrue("Fetch of AliasKey failed", !ks.getKey(aliasName, password).toString().isEmpty()
 +          && certificateFile.exists());
 +    } finally {
 +      credentialsStoreFile.deleteOnExit();
 +      keyStoreFile.deleteOnExit();
 +      certificateFile.deleteOnExit();
 +    }
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/AmbariServiceDefinitionTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/AmbariServiceDefinitionTest.java
index aad94a7,0000000..8b26dce
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/AmbariServiceDefinitionTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/AmbariServiceDefinitionTest.java
@@@ -1,352 -1,0 +1,354 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + * <p/>
 + * http://www.apache.org/licenses/LICENSE-2.0
 + * <p/>
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import org.apache.commons.io.FileUtils;
 +import org.apache.commons.io.IOUtils;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.topology.TopologyService;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.hadoop.test.mock.MockServer;
 +import org.apache.http.HttpStatus;
 +import org.apache.velocity.Template;
 +import org.apache.velocity.VelocityContext;
 +import org.apache.velocity.app.VelocityEngine;
 +import org.apache.velocity.runtime.RuntimeConstants;
 +import org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader;
 +import org.hamcrest.MatcherAssert;
 +import org.junit.After;
 +import org.junit.AfterClass;
 +import org.junit.Assert;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +import java.io.File;
 +import java.io.StringWriter;
 +import java.util.HashMap;
 +import java.util.Map;
 +import java.util.Properties;
 +import java.util.UUID;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +import static uk.co.datumedge.hamcrest.json.SameJSONAs.sameJSONAs;
 +
 +public class AmbariServiceDefinitionTest {
 +
 +  private static Logger LOG = LoggerFactory.getLogger( AmbariServiceDefinitionTest.class );
 +  private static Class<?> DAT = AmbariServiceDefinitionTest.class;
 +
 +  private static GatewayTestConfig config;
 +  private static DefaultGatewayServices services;
 +  private static GatewayServer gateway;
 +  private static int gatewayPort;
 +  private static String gatewayUrl;
 +  private static String clusterUrl;
 +  private static String clusterPath;
 +  private static Properties params;
 +  private static TopologyService topos;
 +  private static MockServer mockAmbari;
 +
 +  private static VelocityEngine velocity;
 +  private static VelocityContext context;
 +
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    LOG_ENTER();
 +    setupGateway();
 +    String topoStr = TestUtils.merge( DAT, "test-topology.xml", params );
 +    File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +    topos.reloadTopologies();
 +    LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    LOG_ENTER();
 +    gateway.stop();
 +    FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
 +    LOG_EXIT();
 +  }
 +
 +  @After
 +  public void cleanupTest() throws Exception {
 +    FileUtils.cleanDirectory( new File( config.getGatewayTopologyDir() ) );
-     FileUtils.cleanDirectory( new File( config.getGatewayDeploymentDir() ) );
++    // Test run should not fail if deleting deployment files is not successful.
++    // Deletion has been already done by TopologyService.
++    FileUtils.deleteQuietly( new File( config.getGatewayDeploymentDir() ) );
 +  }
 +
 +  public static void setupGateway() throws Exception {
 +    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
 +    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
 +    gatewayDir.mkdirs();
 +
 +    config = new GatewayTestConfig();
 +    config.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
 +
 +    File topoDir = new File( config.getGatewayTopologyDir() );
 +    topoDir.mkdirs();
 +
 +    File deployDir = new File( config.getGatewayDeploymentDir() );
 +    deployDir.mkdirs();
 +
 +    setupMockServers();
 +    startGatewayServer();
 +  }
 +
 +  public static void setupMockServers() throws Exception {
 +    mockAmbari = new MockServer( "AMBARI", true );
 +  }
 +
 +  public static void startGatewayServer() throws Exception {
 +    services = new DefaultGatewayServices();
 +    Map<String,String> options = new HashMap<>();
 +    options.put( "persist-master", "false" );
 +    options.put( "master", "password" );
 +    try {
 +      services.init( config, options );
 +    } catch ( ServiceLifecycleException e ) {
 +      e.printStackTrace(); // I18N not required.
 +    }
 +    topos = services.getService(GatewayServices.TOPOLOGY_SERVICE);
 +
 +    gateway = GatewayServer.startGateway( config, services );
 +    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
 +
 +    gatewayPort = gateway.getAddresses()[0].getPort();
 +    gatewayUrl = "http://localhost:" + gatewayPort + "/" + config.getGatewayPath();
 +    String topologyPath = "/test-topology";
 +    clusterPath = "/" + config.getGatewayPath() + topologyPath;
 +    clusterUrl = gatewayUrl + topologyPath;
 +
 +    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
 +
 +    params = new Properties();
 +    params.put( "AMBARI_URL", "http://localhost:" + mockAmbari.getPort() );
 +
 +    velocity = new VelocityEngine();
 +    velocity.setProperty( RuntimeConstants.RUNTIME_LOG_LOGSYSTEM_CLASS, "org.apache.velocity.runtime.log.NullLogSystem" );
 +    velocity.setProperty( RuntimeConstants.RESOURCE_LOADER, "classpath" );
 +    velocity.setProperty( "classpath.resource.loader.class", ClasspathResourceLoader.class.getName() );
 +    velocity.init();
 +
 +    context = new VelocityContext();
 +    context.put( "cluster_url", clusterUrl );
 +    context.put( "cluster_path", clusterPath );
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void clusters() throws Exception {
 +    LOG_ENTER();
 +
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = clusterUrl + "/ambari/api/v1/clusters";
 +
 +    mockAmbari.expect()
 +        .method( "GET" )
 +        .pathInfo( "/api/v1/clusters" )
 +        .respond()
 +        .status( HttpStatus.SC_OK )
 +        .content( TestUtils.getResourceStream( DAT, "clusters-response.json" ) )
 +        .contentType( "text/plain" );
 +
 +    String body = given()
 +//        .log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +//        .log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .when().get( serviceUrl ).asString();
 +
 +
 +    String name = TestUtils.getResourceName( this.getClass(), "clusters-response-expected.json" );
 +    Template template = velocity.getTemplate( name );
 +    StringWriter sw = new StringWriter();
 +    template.merge( context, sw );
 +    String expected = sw.toString();
 +
 +    MatcherAssert.assertThat(body, sameJSONAs(expected));
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void historyServer() throws Exception {
 +    LOG_ENTER();
 +
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = clusterUrl + "/ambari/api/v1/clusters/test/hosts/c6401.ambari.apache.org/host_components/HISTORYSERVER";
 +
 +    mockAmbari.expect()
 +        .method( "GET" )
 +        .pathInfo( "/api/v1/clusters/test/hosts/c6401.ambari.apache.org/host_components/HISTORYSERVER" )
 +        .respond()
 +        .status( HttpStatus.SC_OK )
 +        .content( TestUtils.getResourceStream( DAT, "history-server-response.json" ) )
 +        .contentType( "text/plain" );
 +
 +    String body = given()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .when().get( serviceUrl ).asString();
 +
 +
 +    String name = TestUtils.getResourceName( this.getClass(), "history-server-response-expected.json" );
 +    Template template = velocity.getTemplate( name );
 +    StringWriter sw = new StringWriter();
 +    template.merge( context, sw );
 +    String expected = sw.toString();
 +
 +    MatcherAssert.assertThat(body, sameJSONAs(expected));
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void unwiseCharacterRequest() throws Exception {
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = clusterUrl + "/ambari/api/v1/clusters/test/components";
 +
 +    mockAmbari.expect()
 +        .method( "GET" )
 +        .pathInfo( "/api/v1/clusters/test/components" )
 +        .queryParam("ServiceComponentInfo/component_name", "APP_TIMELINE_SERVER|ServiceComponentInfo/category=MASTER")
 +        .respond()
 +        .status( HttpStatus.SC_OK )
 +        .content( TestUtils.getResourceStream( DAT, "unwise-character-response.json" ) )
 +        .contentType( "text/plain" );
 +    //only assertion here is to make sure the request can be made successfully with the unwise characters present
 +    //in the request url
 +     given()
 +        .auth().preemptive().basic( username, password )
 +        .queryParam("ServiceComponentInfo/component_name", "APP_TIMELINE_SERVER|ServiceComponentInfo/category=MASTER")
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .when().get( serviceUrl ).asString();
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void encryptedResponse() throws Exception {
 +    LOG_ENTER();
 +
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = clusterUrl + "/ambari/api/v1/persist/CLUSTER_CURRENT_STATUS?_=1457977721091";
 +
 +    mockAmbari.expect()
 +        .method( "GET" )
 +        .pathInfo( "/api/v1/persist/CLUSTER_CURRENT_STATUS" )
 +        .queryParam("_","1457977721091")
 +        .respond()
 +        .status( HttpStatus.SC_OK )
 +        .content( TestUtils.getResourceStream( DAT, "encrypted-response.txt" ) )
 +        .contentType( "text/plain" );
 +
 +    String body = given()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .when().get( serviceUrl ).asString();
 +
 +    Assert.assertNotNull(body);
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void postDataWithWrongContentType() throws Exception {
 +    LOG_ENTER();
 +
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = clusterUrl + "/ambari/api/v1/stacks/HDP/versions/2.3/recommendations";
 +
 +    mockAmbari.expect()
 +        .method( "POST" )
 +        .pathInfo( "/api/v1/stacks/HDP/versions/2.3/recommendations" )
 +        .content( TestUtils.getResourceStream( DAT, "post-data-wrong-type.json" ) )
 +        .respond()
 +        .status( HttpStatus.SC_OK )
 +        .contentType( "application/x-www-form-urlencoded" );
 +
 +
 +    String body = given()
 +        .auth().preemptive().basic( username, password )
 +        .content(IOUtils.toByteArray(TestUtils.getResourceStream( DAT, "post-data-wrong-type.json")))
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "application/x-www-form-urlencoded" )
 +        .when().post( serviceUrl ).asString();
 +
 +    Assert.assertNotNull(body);
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void contextPathInViewsResponse() throws Exception {
 +    LOG_ENTER();
 +
 +    String username = "guest";
 +    String password = "guest-password";
 +
 +    String serviceUrl = clusterUrl + "/ambari/api/v1/views?fields=versions/instances/ViewInstanceInfo,versions/" +
 +        "ViewVersionInfo/label&versions/ViewVersionInfo/system=false&_=1461186937589";
 +
 +    mockAmbari.expect()
 +        .method( "GET" )
 +        .pathInfo( "/api/v1/views" )
 +        .queryParam("_", "1461186937589")
 +        .queryParam("versions/ViewVersionInfo/system", "false")
 +        .queryParam("fields", "versions/instances/ViewInstanceInfo,versions/ViewVersionInfo/label")
 +        .respond()
 +        .status( HttpStatus.SC_OK )
 +        .content( TestUtils.getResourceStream( DAT, "views-response.json" ) )
 +        .contentType( "text/plain" );
 +
 +    String body = given()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .when().get( serviceUrl ).asString();
 +
 +
 +    String name = TestUtils.getResourceName( this.getClass(), "views-response-expected.json" );
 +    Template template = velocity.getTemplate( name );
 +    StringWriter sw = new StringWriter();
 +    template.merge( context, sw );
 +    String expected = sw.toString();
 +
 +    MatcherAssert.assertThat(body, sameJSONAs(expected));
 +    LOG_EXIT();
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminFuncTest.java
index 4710c37,0000000..810626b
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminFuncTest.java
@@@ -1,176 -1,0 +1,176 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.http.HttpStatus;
 +import org.hamcrest.MatcherAssert;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +import javax.ws.rs.core.MediaType;
 +import java.io.File;
 +import java.io.FileOutputStream;
 +import java.io.IOException;
 +import java.util.HashMap;
 +import java.util.Map;
 +import java.util.UUID;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +
 +public class GatewayAdminFuncTest {
 +
 +  private static Logger LOG = LoggerFactory.getLogger( GatewayAdminFuncTest.class );
 +
 +  //public static Enumeration<Appender> appenders;
 +  public static GatewayConfig config;
 +  public static GatewayServer gateway;
 +  public static String gatewayUrl;
 +  public static String clusterUrl;
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    TestUtils.LOG_ENTER();
 +    //appenders = NoOpAppender.setUp();
 +    driver.setupLdap(0);
 +    setupGateway();
 +    TestUtils.LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    TestUtils.LOG_ENTER();
 +    gateway.stop();
 +    driver.cleanup();
 +    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
 +    //NoOpAppender.tearDown( appenders );
 +    TestUtils.LOG_EXIT();
 +  }
 +
 +  public static void setupGateway() throws Exception {
 +
 +    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
 +    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
 +    gatewayDir.mkdirs();
 +
 +    GatewayTestConfig testConfig = new GatewayTestConfig();
 +    config = testConfig;
 +    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
 +
 +    File topoDir = new File( testConfig.getGatewayTopologyDir() );
 +    topoDir.mkdirs();
 +
 +    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
 +    deployDir.mkdirs();
 +
 +    File descriptor = new File( topoDir, "test-cluster.xml" );
 +    FileOutputStream stream = new FileOutputStream( descriptor );
 +    createTopology().toStream( stream );
 +    stream.close();
 +
 +    DefaultGatewayServices srvcs = new DefaultGatewayServices();
 +    Map<String,String> options = new HashMap<>();
 +    options.put( "persist-master", "false" );
 +    options.put( "master", "password" );
 +    try {
 +      srvcs.init( testConfig, options );
 +    } catch ( ServiceLifecycleException e ) {
 +      e.printStackTrace(); // I18N not required.
 +    }
 +    gateway = GatewayServer.startGateway( testConfig, srvcs );
 +    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
 +
 +    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
 +
 +    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
 +    clusterUrl = gatewayUrl + "/test-cluster";
 +  }
 +
 +  private static XMLTag createTopology() {
 +    XMLTag xml = XMLDoc.newDocument( true )
 +        .addRoot( "topology" )
 +        .addTag( "gateway" )
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "authentication" )
 +        .addTag( "name" ).addText( "ShiroProvider" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
 +        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
 +        .addTag( "value" ).addText( driver.getLdapUrl() ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "urls./**" )
 +        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "identity-assertion" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "name" ).addText( "Default" ).gotoParent()
 +        .addTag( "provider" )
 +        .gotoRoot()
 +        .addTag( "service" )
 +        .addTag( "role" ).addText( "KNOX" )
 +        .gotoRoot();
 +    // System.out.println( "GATEWAY=" + xml.toString() );
 +    return xml;
 +  }
 +
 +  //@Test
 +  public void waitForManualTesting() throws IOException {
 +    System.out.println( clusterUrl );
 +    System.in.read();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testAdminService() throws ClassNotFoundException {
 +    TestUtils.LOG_ENTER();
 +
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = clusterUrl + "/api/v1/version";
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("Accept", MediaType.APPLICATION_JSON)
 +        .expect()
 +        //.log().all()
 +        .statusCode(HttpStatus.SC_OK)
 +        //.body( is( "{\"hash\":\"unknown\",\"version\":\"unknown\"}" ) )
 +        .when().get( serviceUrl );
 +
 +    TestUtils.LOG_EXIT();
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminTopologyFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminTopologyFuncTest.java
index 5f828e5,0000000..e5c16be
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminTopologyFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAdminTopologyFuncTest.java
@@@ -1,799 -1,0 +1,802 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import java.io.File;
 +import java.io.FileOutputStream;
 +import java.io.IOException;
 +import java.io.StringReader;
 +import java.net.URI;
 +import java.net.URISyntaxException;
 +import java.util.Enumeration;
 +import java.util.HashMap;
 +import java.util.Map;
 +import java.util.UUID;
 +import javax.ws.rs.core.MediaType;
 +
- import com.jayway.restassured.http.ContentType;
++import io.restassured.http.ContentType;
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +import org.apache.directory.server.protocol.shared.transport.TcpTransport;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.topology.TopologyService;
 +import org.apache.knox.gateway.topology.Param;
 +import org.apache.knox.gateway.topology.Provider;
 +import org.apache.knox.gateway.topology.Service;
 +import org.apache.knox.gateway.topology.Topology;
 +import org.apache.knox.gateway.util.XmlUtils;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.http.HttpStatus;
 +import org.apache.log4j.Appender;
 +import org.hamcrest.MatcherAssert;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +import org.w3c.dom.Document;
 +import org.xml.sax.InputSource;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.containsString;
 +import static org.hamcrest.CoreMatchers.equalTo;
 +import static org.hamcrest.CoreMatchers.is;
 +import static org.hamcrest.CoreMatchers.not;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +import static org.hamcrest.CoreMatchers.nullValue;
 +import static org.hamcrest.xml.HasXPath.hasXPath;
 +import static org.junit.Assert.assertThat;
 +import static org.junit.Assert.fail;
 +
 +public class GatewayAdminTopologyFuncTest {
 +
 +  private static Logger LOG = LoggerFactory.getLogger( GatewayAdminTopologyFuncTest.class );
 +
 +  public static Enumeration<Appender> appenders;
 +  public static GatewayConfig config;
 +  public static GatewayServer gateway;
 +  public static String gatewayUrl;
 +  public static String clusterUrl;
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    //appenders = NoOpAppender.setUp();
 +    driver.setupLdap(0);
 +    setupGateway(new GatewayTestConfig());
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    gateway.stop();
 +    driver.cleanup();
 +    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
 +    //NoOpAppender.tearDown( appenders );
 +  }
 +
 +  public static void setupGateway(GatewayTestConfig testConfig) throws Exception {
 +
 +    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
 +    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
 +    gatewayDir.mkdirs();
 +
 +    config = testConfig;
 +    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
 +
 +    File topoDir = new File( testConfig.getGatewayTopologyDir() );
 +    topoDir.mkdirs();
 +
 +    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
 +    deployDir.mkdirs();
 +
 +    File descriptor = new File( topoDir, "admin.xml" );
 +    FileOutputStream stream = new FileOutputStream( descriptor );
 +    createKnoxTopology().toStream( stream );
 +    stream.close();
 +
 +    File descriptor2 = new File( topoDir, "test-cluster.xml" );
 +    FileOutputStream stream2 = new FileOutputStream( descriptor2 );
 +    createNormalTopology().toStream( stream2 );
 +    stream.close();
 +
 +    DefaultGatewayServices srvcs = new DefaultGatewayServices();
 +    Map<String,String> options = new HashMap<>();
 +    options.put( "persist-master", "false" );
 +    options.put( "master", "password" );
 +
 +    try {
 +      srvcs.init( testConfig, options );
 +    } catch ( ServiceLifecycleException e ) {
 +      e.printStackTrace(); // I18N not required.
 +    }
 +    gateway = GatewayServer.startGateway( testConfig, srvcs );
 +    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
 +
 +    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
 +
 +    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
 +    clusterUrl = gatewayUrl + "/admin";
 +  }
 +
 +  private static XMLTag createNormalTopology() {
 +    XMLTag xml = XMLDoc.newDocument( true )
 +        .addRoot( "topology" )
 +        .addTag( "gateway" )
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "webappsec" )
 +        .addTag( "name" ).addText( "WebAppSec" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "csrf.enabled" )
 +        .addTag( "value" ).addText( "true" ).gotoParent().gotoParent()
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "authentication" )
 +        .addTag( "name" ).addText( "ShiroProvider" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
 +        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
 +        .addTag( "value" ).addText( driver.getLdapUrl() ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "urls./**" )
 +        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "identity-assertion" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "name" ).addText( "Default" ).gotoParent()
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "authorization" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "name" ).addText( "AclsAuthz" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "webhdfs-acl" )
 +        .addTag( "value" ).addText( "hdfs;*;*" ).gotoParent()
 +        .gotoRoot()
 +        .addTag( "service" )
 +        .addTag( "role" ).addText( "WEBHDFS" )
 +        .addTag( "url" ).addText( "http://localhost:50070/webhdfs/v1" ).gotoParent()
 +        .gotoRoot();
 +//     System.out.println( "GATEWAY=" + xml.toString() );
 +    return xml;
 +  }
 +
 +  private static XMLTag createKnoxTopology() {
 +    XMLTag xml = XMLDoc.newDocument( true )
 +        .addRoot( "topology" )
 +        .addTag( "gateway" )
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "authentication" )
 +        .addTag( "name" ).addText( "ShiroProvider" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
 +        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
 +        .addTag( "value" ).addText( driver.getLdapUrl() ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "urls./**" )
 +        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
 +        .addTag("provider")
 +        .addTag( "role" ).addText( "authorization" )
 +        .addTag( "name" ).addText( "AclsAuthz" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag("param")
 +        .addTag("name").addText("knox.acl")
 +        .addTag("value").addText("admin;*;*").gotoParent().gotoParent()
 +        .addTag("provider")
 +        .addTag( "role" ).addText( "identity-assertion" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "name" ).addText( "Default" ).gotoParent()
 +        .gotoRoot()
 +        .addTag( "service" )
 +        .addTag( "role" ).addText( "KNOX" )
 +        .gotoRoot();
 +    // System.out.println( "GATEWAY=" + xml.toString() );
 +    return xml;
 +  }
 +
 +  //@Test
 +  public void waitForManualTesting() throws IOException {
 +    System.in.read();
 +  }
 +
 +  @Test( timeout = TestUtils.LONG_TIMEOUT )
 +  public void testTopologyCollection() throws ClassNotFoundException {
 +    LOG_ENTER();
 +
 +    String username = "admin";
 +    String password = "admin-password";
 +    String serviceUrl = clusterUrl + "/api/v1/topologies";
 +    String href1 = given()
 +        //.log().all()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_JSON)
 +        .contentType(MediaType.APPLICATION_JSON)
 +        .expect()
 +        //.log().all()
 +        .statusCode(HttpStatus.SC_OK)
 +        .body("topologies.topology[0].name", not(nullValue()))
 +        .body("topologies.topology[1].name", not(nullValue()))
 +        .body("topologies.topology[0].uri", not(nullValue()))
 +        .body("topologies.topology[1].uri", not(nullValue()))
 +        .body("topologies.topology[0].href", not(nullValue()))
 +        .body("topologies.topology[1].href", not(nullValue()))
 +        .body("topologies.topology[0].timestamp", not(nullValue()))
 +        .body("topologies.topology[1].timestamp", not(nullValue()))
 +        .when().get(serviceUrl).thenReturn().getBody().path("topologies.topology.href[1]");
 +
 +       given()
 +        //.log().all()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_XML)
 +        .expect()
 +        //.log().all()
 +        .body("topologies.topology.href[1]", equalTo(href1))
 +        .statusCode(HttpStatus.SC_OK)
 +        .when().get(serviceUrl);
 +
 +
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic(username, password)
 +        .expect()
 +        //.log().all()
 +        .statusCode(HttpStatus.SC_OK)
 +        .contentType(MediaType.APPLICATION_XML)
-         .get(serviceUrl);
++        .when().get(serviceUrl);
 +
 +
 +    given().auth().preemptive().basic(username, password)
++        .header("Accept", MediaType.APPLICATION_JSON)
 +        .expect()
 +        //.log().all()
 +        .statusCode(HttpStatus.SC_OK)
 +        .contentType("application/json")
 +        .body("topology.name", equalTo("test-cluster"))
 +        .when().get(href1);
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.LONG_TIMEOUT )
 +  public void testTopologyObject() throws ClassNotFoundException {
 +    LOG_ENTER();
 +
 +    String username = "admin";
 +    String password = "admin-password";
 +    String serviceUrl = clusterUrl + "/api/v1/topologies";
 +    String hrefJson = given()
 +        //.log().all()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_JSON)
 +        .expect()
 +        //.log().all()
 +        .statusCode(HttpStatus.SC_OK)
 +        .when().get(serviceUrl).thenReturn().getBody().path("topologies.topology[1].href");
 +
 +    String timestampJson = given()
 +        //.log().all()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_JSON)
 +        .expect()
 +        //.log().all()
 +        .statusCode(HttpStatus.SC_OK)
 +        .contentType("application/json")
 +        .when().get(serviceUrl).andReturn()
 +        .getBody().path("topologies.topology[1].timestamp");
 +
 +        given()
 +        //.log().all()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_JSON)
 +        .expect()
 +        //.log().all()
 +        .statusCode(HttpStatus.SC_OK)
 +        .body("topology.name", equalTo("test-cluster"))
 +        .body("topology.timestamp", equalTo(Long.parseLong(timestampJson)))
 +        .when()
 +        .get(hrefJson);
 +
 +
 +    String hrefXml = given()
 +        //.log().all()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_XML)
 +        .expect()
 +        //.log().all()
 +        .statusCode(HttpStatus.SC_OK)
 +        .when().get(serviceUrl).thenReturn().getBody().path("topologies.topology[1].href");
 +
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_XML)
 +        .expect()
 +        //.log().all()
 +        .statusCode(HttpStatus.SC_OK)
 +        .when()
 +        .get(hrefXml);
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.LONG_TIMEOUT )
 +  public void testPositiveAuthorization() throws ClassNotFoundException{
 +    LOG_ENTER();
 +
 +    String adminUser = "admin";
 +    String adminPass = "admin-password";
 +    String url = clusterUrl + "/api/v1/topologies";
 +
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic(adminUser, adminPass)
++        .header("Accept", MediaType.APPLICATION_JSON)
 +        .expect()
 +        //.log().all()
 +        .statusCode(HttpStatus.SC_OK)
 +        .contentType(ContentType.JSON)
 +        .body("topologies.topology[0].name", not(nullValue()))
 +        .body("topologies.topology[1].name", not(nullValue()))
 +        .body("topologies.topology[0].uri", not(nullValue()))
 +        .body("topologies.topology[1].uri", not(nullValue()))
 +        .body("topologies.topology[0].href", not(nullValue()))
 +        .body("topologies.topology[1].href", not(nullValue()))
 +        .body("topologies.topology[0].timestamp", not(nullValue()))
 +        .body("topologies.topology[1].timestamp", not(nullValue()))
-         .get(url);
++        .when().get(url);
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.LONG_TIMEOUT )
 +  public void testNegativeAuthorization() throws ClassNotFoundException{
 +    LOG_ENTER();
 +
 +    String guestUser = "guest";
 +    String guestPass = "guest-password";
 +    String url = clusterUrl + "/api/v1/topologies";
 +
 +    given()
 +        //.log().all()
 +        .auth().basic(guestUser, guestPass)
 +        .expect()
 +        //.log().all()
 +        .statusCode(HttpStatus.SC_FORBIDDEN)
-         .get(url);
++        .when().get(url);
 +
 +    LOG_EXIT();
 +  }
 +
 +  private Topology createTestTopology(){
 +    Topology topology = new Topology();
 +    topology.setName("test-topology");
 +
 +    try {
 +      topology.setUri(new URI(gatewayUrl + "/" + topology.getName()));
 +    } catch (URISyntaxException ex) {
 +      assertThat(topology.getUri(), not(nullValue()));
 +    }
 +
 +    Provider identityProvider = new Provider();
 +    identityProvider.setName("Default");
 +    identityProvider.setRole("identity-assertion");
 +    identityProvider.setEnabled(true);
 +
 +    Provider AuthenicationProvider = new Provider();
 +    AuthenicationProvider.setName("ShiroProvider");
 +    AuthenicationProvider.setRole("authentication");
 +    AuthenicationProvider.setEnabled(true);
 +
 +    Param ldapMain = new Param();
 +    ldapMain.setName("main.ldapRealm");
 +    ldapMain.setValue("org.apache.knox.gateway.shirorealm.KnoxLdapRealm");
 +
 +    Param ldapGroupContextFactory = new Param();
 +    ldapGroupContextFactory.setName("main.ldapGroupContextFactory");
 +    ldapGroupContextFactory.setValue("org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory");
 +
 +    Param ldapRealmContext = new Param();
 +    ldapRealmContext.setName("main.ldapRealm.contextFactory");
 +    ldapRealmContext.setValue("$ldapGroupContextFactory");
 +
 +    Param ldapURL = new Param();
 +    ldapURL.setName("main.ldapRealm.contextFactory.url");
 +    ldapURL.setValue(driver.getLdapUrl());
 +
 +    Param ldapUserTemplate = new Param();
 +    ldapUserTemplate.setName("main.ldapRealm.userDnTemplate");
 +    ldapUserTemplate.setValue("uid={0},ou=people,dc=hadoop,dc=apache,dc=org");
 +
 +    Param authcBasic = new Param();
 +    authcBasic.setName("urls./**");
 +    authcBasic.setValue("authcBasic");
 +
 +    AuthenicationProvider.addParam(ldapGroupContextFactory);
 +    AuthenicationProvider.addParam(ldapMain);
 +    AuthenicationProvider.addParam(ldapRealmContext);
 +    AuthenicationProvider.addParam(ldapURL);
 +    AuthenicationProvider.addParam(ldapUserTemplate);
 +    AuthenicationProvider.addParam(authcBasic);
 +
 +    Service testService = new Service();
 +    testService.setRole("test-service-role");
 +
 +    topology.addProvider(AuthenicationProvider);
 +    topology.addProvider(identityProvider);
 +    topology.addService(testService);
 +    topology.setTimestamp(System.nanoTime());
 +
 +    return topology;
 +  }
 +
 +  @Test( timeout = TestUtils.LONG_TIMEOUT )
 +  public void testDeployTopology() throws Exception {
 +    LOG_ENTER();
 +
 +    Topology testTopology = createTestTopology();
 +
 +    String user = "guest";
 +    String password = "guest-password";
 +
 +    String url = gatewayUrl + "/" + testTopology.getName() + "/test-service-path/test-service-resource";
 +
 +    GatewayServices srvs = GatewayServer.getGatewayServices();
 +
 +    TopologyService ts = srvs.getService(GatewayServices.TOPOLOGY_SERVICE);
 +    try {
 +      ts.stopMonitor();
 +
 +      assertThat( testTopology, not( nullValue() ) );
 +      assertThat( testTopology.getName(), is( "test-topology" ) );
 +
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( "admin", "admin-password" ).header( "Accept", MediaType.APPLICATION_JSON ).expect()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_OK ).body( containsString( "ServerVersion" ) ).when().get( gatewayUrl + "/admin/api/v1/version" );
 +
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( user, password ).expect()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_NOT_FOUND ).when().get( url );
 +
 +      ts.deployTopology( testTopology );
 +
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( user, password ).expect()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_OK ).contentType( "text/plain" ).body( is( "test-service-response" ) ).when().get( url ).getBody();
 +
 +      ts.deleteTopology( testTopology );
 +
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( user, password ).expect()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_NOT_FOUND ).when().get( url );
 +    } finally {
 +      ts.startMonitor();
 +    }
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.LONG_TIMEOUT )
 +  public void testDeleteTopology() throws ClassNotFoundException {
 +    LOG_ENTER();
 +
 +    Topology test = createTestTopology();
 +
 +    String username = "admin";
 +    String password = "admin-password";
 +    String url = clusterUrl + "/api/v1/topologies/" + test.getName();
 +
 +    GatewayServices gs = GatewayServer.getGatewayServices();
 +
 +    TopologyService ts = gs.getService(GatewayServices.TOPOLOGY_SERVICE);
 +
 +    ts.deployTopology(test);
 +
 +    given()
 +        .auth().preemptive().basic(username, password)
++        .header("Accept", MediaType.APPLICATION_JSON)
 +        .expect()
 +        //.log().all()
 +        .statusCode(HttpStatus.SC_OK)
 +        .contentType(MediaType.APPLICATION_JSON)
-         .get(url);
++        .when().get(url);
 +
 +    given()
 +        .auth().preemptive().basic(username, password)
 +        .expect()
 +        //.log().all()
 +        .statusCode(HttpStatus.SC_OK)
 +        .contentType(MediaType.APPLICATION_JSON)
-         .delete(url);
++        .when().delete(url);
 +
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic(username, password)
 +        .expect()
 +        //.log().all()
 +        .statusCode(HttpStatus.SC_NO_CONTENT)
-         .get(url);
++        .when().get(url);
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.LONG_TIMEOUT )
 +  public void testPutTopology() throws Exception {
 +    LOG_ENTER() ;
 +
 +    String username = "admin";
 +    String password = "admin-password";
 +    String url = clusterUrl + "/api/v1/topologies/test-put";
 +
 +    String JsonPut =
 +        given()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_JSON)
 +        .get(clusterUrl + "/api/v1/topologies/test-cluster")
 +        .getBody().asString();
 +
 +    String XML = given()
 +        //.log().all()
 +        .auth().preemptive().basic(username, password)
 +        .contentType(MediaType.APPLICATION_JSON)
 +        .header("Accept", MediaType.APPLICATION_XML)
 +        .body(JsonPut)
 +        .expect()
 +        .statusCode(HttpStatus.SC_OK)
 +        //.log().all()
-         .put(url).getBody().asString();
++        .when().put(url).getBody().asString();
 +
 +    InputSource source = new InputSource( new StringReader( XML ) );
 +    Document doc = XmlUtils.readXml( source );
 +
 +    assertThat( doc, hasXPath( "/topology/gateway/provider[1]/name", containsString( "WebAppSec" ) ) );
 +    assertThat( doc, hasXPath( "/topology/gateway/provider[1]/param/name", containsString( "csrf.enabled" ) ) );
 +
 +    given()
 +            .auth().preemptive().basic(username, password)
 +            .header("Accept", MediaType.APPLICATION_XML)
 +            .expect()
 +            .statusCode(HttpStatus.SC_OK)
 +            .body(equalTo(XML))
-             .get(url)
++            .when().get(url)
 +            .getBody().asString();
 +
 +    String XmlPut =
 +        given()
 +            .auth().preemptive().basic(username, password)
 +            .header("Accept", MediaType.APPLICATION_XML)
 +            .get(clusterUrl + "/api/v1/topologies/test-cluster")
 +            .getBody().asString();
 +
 +    String JSON = given()
 +        //.log().all()
 +        .auth().preemptive().basic(username, password)
 +        .contentType(MediaType.APPLICATION_XML)
 +        .header("Accept", MediaType.APPLICATION_JSON)
 +        .body(XmlPut)
 +        .expect()
 +        .statusCode(HttpStatus.SC_OK)
 +            //.log().all()
-         .put(url).getBody().asString();
++        .when().put(url).getBody().asString();
 +
 +    given()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_JSON)
 +        .expect()
 +        .statusCode(HttpStatus.SC_OK)
 +        .body(equalTo(JSON))
-         .get(url)
++        .when().get(url)
 +        .getBody().asString();
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.LONG_TIMEOUT )
 +  public void testXForwardedHeaders() {
 +    LOG_ENTER();
 +
 +    String username = "admin";
 +    String password = "admin-password";
 +    String url = clusterUrl + "/api/v1/topologies";
 +
 +//    X-Forward header values
 +    String port = String.valueOf(777);
 +    String server = "myserver";
 +    String host = server + ":" + port;
 +    String proto = "protocol";
 +    String context = "/mycontext";
 +    String newUrl = proto + "://" + host + context;
 +//    String port = String.valueOf(gateway.getAddresses()[0].getPort());
 +
 +//     Case 1: Add in all x-forward headers (host, port, server, context, proto)
 +    given()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_XML)
 +        .header("X-Forwarded-Host", host )
 +        .header("X-Forwarded-Port", port )
 +        .header("X-Forwarded-Server", server )
 +        .header("X-Forwarded-Context", context)
 +        .header("X-Forwarded-Proto", proto)
 +        .expect()
 +        .statusCode(HttpStatus.SC_OK)
 +        .body(containsString(newUrl))
 +        .body(containsString("test-cluster"))
 +        .body(containsString("admin"))
-         .get(url);
++        .when().get(url);
 +
 +
 +//     Case 2: add in x-forward headers (host, server, proto, context)
 +    given()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_XML)
 +        .header("X-Forwarded-Host", host )
 +        .header("X-Forwarded-Server", server )
 +        .header("X-Forwarded-Context", context )
 +        .header("X-Forwarded-Proto", proto )
 +        .expect()
 +        .statusCode(HttpStatus.SC_OK)
 +        .body(containsString(server))
 +        .body(containsString(context))
 +        .body(containsString(proto))
 +        .body(containsString(host))
 +        .body(containsString("test-cluster"))
 +        .body(containsString("admin"))
-         .get(url);
++        .when().get(url);
 +
 +//     Case 3: add in x-forward headers (host, proto, port, context)
 +    given()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_XML)
 +        .header("X-Forwarded-Host", host )
 +        .header("X-Forwarded-Port", port )
 +        .header("X-Forwarded-Context", context )
 +        .header("X-Forwarded-Proto", proto)
 +        .expect()
 +        .statusCode(HttpStatus.SC_OK)
 +        .body(containsString(host))
 +        .body(containsString(port))
 +        .body(containsString(context))
 +        .body(containsString(proto))
 +        .body(containsString("test-cluster"))
 +        .body(containsString("admin"))
-         .get(url);
++        .when().get(url);
 +
 +//     Case 4: add in x-forward headers (host, proto, port, context) no port in host.
 +    given()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_XML)
 +        .header("X-Forwarded-Host", server)
 +        .header("X-Forwarded-Port", port)
 +        .header("X-Forwarded-Context", context)
 +        .header("X-Forwarded-Proto", proto)
 +        .expect()
 +        .statusCode(HttpStatus.SC_OK)
 +        .body(containsString(server))
 +        .body(containsString(port))
 +        .body(containsString(context))
 +        .body(containsString(proto))
 +        .body(containsString("test-cluster"))
 +        .body(containsString("admin"))
-         .get(url);
++        .when().get(url);
 +
 +//     Case 5: add in x-forward headers (host, port)
 +    given()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_XML)
 +        .header("X-Forwarded-Host", host )
 +        .header("X-Forwarded-Port", port )
 +        .expect()
 +        .statusCode(HttpStatus.SC_OK)
 +        .body(containsString(host))
 +        .body(containsString(port))
 +        .body(containsString("test-cluster"))
 +        .body(containsString("admin"))
-         .get(url);
++        .when().get(url);
 +
 +//     Case 6: Normal Request
 +    given()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_XML)
 +        .expect()
 +        .statusCode(HttpStatus.SC_OK)
 +        .body(containsString(url))
 +        .body(containsString("test-cluster"))
 +        .body(containsString("admin"))
-         .get(url);
++        .when().get(url);
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.LONG_TIMEOUT )
 +  public void testGatewayPathChange() throws Exception {
 +    LOG_ENTER();
 +    String username = "admin";
 +    String password = "admin-password";
 +    String url = clusterUrl + "/api/v1/topologies";
 +
 +//     Case 1: Normal Request (No Change in gateway.path). Ensure HTTP OK resp + valid URL.
 +    given()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.APPLICATION_XML)
 +        .expect()
 +        .statusCode(HttpStatus.SC_OK)
 +        .body(containsString(url + "/test-cluster"))
-         .get(url);
++        .when().get(url);
 +
 +
 +//     Case 2: Change gateway.path to another String. Ensure HTTP OK resp + valid URL.
 +   try {
 +     gateway.stop();
 +
 +     GatewayTestConfig conf = new GatewayTestConfig();
 +     conf.setGatewayPath("new-gateway-path");
 +     setupGateway(conf);
 +
 +     String newUrl = clusterUrl + "/api/v1/topologies";
 +
 +     given()
 +         .auth().preemptive().basic(username, password)
 +         .header("Accept", MediaType.APPLICATION_XML)
 +         .expect()
 +         .statusCode(HttpStatus.SC_OK)
 +         .body(containsString(newUrl + "/test-cluster"))
-          .get(newUrl);
++         .when().get(newUrl);
 +   } catch(Exception e){
 +     fail(e.getMessage());
 +   }
 +    finally {
 +//        Restart the gateway with old settings.
 +       gateway.stop();
 +      setupGateway(new GatewayTestConfig());
 +    }
 +
 +    LOG_EXIT();
 +  }
 +
 +  private static final String CLASS = GatewayAdminTopologyFuncTest.class.getCanonicalName();
 +
 +}


[08/11] knox git commit: KNOX-998 - Merging from master

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAppFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayAppFuncTest.java
index ae4fc74,0000000..859d34d
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAppFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayAppFuncTest.java
@@@ -1,670 -1,0 +1,672 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import java.io.File;
 +import java.net.URL;
 +import java.nio.charset.Charset;
 +import java.util.ArrayList;
 +import java.util.Collection;
 +import java.util.Enumeration;
 +import java.util.HashMap;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Properties;
 +import java.util.UUID;
 +
 +import org.apache.commons.io.FileUtils;
 +import org.apache.directory.server.protocol.shared.transport.TcpTransport;
 +import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.topology.TopologyService;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.hadoop.test.mock.MockServer;
 +import org.apache.http.HttpStatus;
 +import org.apache.log4j.Appender;
 +import org.hamcrest.MatcherAssert;
 +import org.junit.After;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.equalTo;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +import static org.hamcrest.Matchers.arrayWithSize;
 +import static org.hamcrest.Matchers.hasItemInArray;
 +import static org.hamcrest.core.Is.is;
 +import static org.hamcrest.core.IsNot.not;
 +import static org.junit.Assert.assertThat;
 +import static org.xmlmatchers.transform.XmlConverters.the;
 +import static org.xmlmatchers.xpath.HasXPath.hasXPath;
 +import static uk.co.datumedge.hamcrest.json.SameJSONAs.sameJSONAs;
 +
 +public class GatewayAppFuncTest {
 +
 +  private static Logger LOG = LoggerFactory.getLogger( GatewayAppFuncTest.class );
 +  private static Class<?> DAT = GatewayAppFuncTest.class;
 +
 +  private static Enumeration<Appender> appenders;
 +  private static GatewayTestConfig config;
 +  private static DefaultGatewayServices services;
 +  private static GatewayServer gateway;
 +  private static int gatewayPort;
 +  private static String gatewayUrl;
 +  private static String clusterUrl;
 +  private static Properties params;
 +  private static TopologyService topos;
 +  private static MockServer mockWebHdfs;
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    LOG_ENTER();
 +    //appenders = NoOpAppender.setUp();
 +    driver.setupLdap(0);
 +    setupGateway();
 +    LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    LOG_ENTER();
 +    gateway.stop();
 +    driver.cleanup();
 +    FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
 +    //NoOpAppender.tearDown( appenders );
 +    LOG_EXIT();
 +  }
 +
 +  @After
 +  public void cleanupTest() throws Exception {
 +    FileUtils.cleanDirectory( new File( config.getGatewayTopologyDir() ) );
-     FileUtils.cleanDirectory( new File( config.getGatewayDeploymentDir() ) );
++    // Test run should not fail if deleting deployment files is not successful.
++    // Deletion has been already done by TopologyService.
++    FileUtils.deleteQuietly( new File( config.getGatewayDeploymentDir() ) );
 +  }
 +
 +  public static void setupGateway() throws Exception {
 +
 +    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
 +    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
 +    gatewayDir.mkdirs();
 +
 +    config = new GatewayTestConfig();
 +    config.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
 +
 +    URL svcsFileUrl = TestUtils.getResourceUrl( DAT, "test-svcs/readme.txt" );
 +    File svcsFile = new File( svcsFileUrl.getFile() );
 +    File svcsDir = svcsFile.getParentFile();
 +    config.setGatewayServicesDir( svcsDir.getAbsolutePath() );
 +
 +    URL appsFileUrl = TestUtils.getResourceUrl( DAT, "test-apps/readme.txt" );
 +    File appsFile = new File( appsFileUrl.getFile() );
 +    File appsDir = appsFile.getParentFile();
 +    config.setGatewayApplicationsDir( appsDir.getAbsolutePath() );
 +
 +    File topoDir = new File( config.getGatewayTopologyDir() );
 +    topoDir.mkdirs();
 +
 +    File deployDir = new File( config.getGatewayDeploymentDir() );
 +    deployDir.mkdirs();
 +
 +
 +    setupMockServers();
 +    startGatewayServer();
 +  }
 +
 +  public static void setupMockServers() throws Exception {
 +    mockWebHdfs = new MockServer( "WEBHDFS", true );
 +  }
 +
 +  public static void startGatewayServer() throws Exception {
 +    services = new DefaultGatewayServices();
 +    Map<String,String> options = new HashMap<>();
 +    options.put( "persist-master", "false" );
 +    options.put( "master", "password" );
 +    try {
 +      services.init( config, options );
 +    } catch ( ServiceLifecycleException e ) {
 +      e.printStackTrace(); // I18N not required.
 +    }
 +    topos = services.getService(GatewayServices.TOPOLOGY_SERVICE);
 +
 +    gateway = GatewayServer.startGateway( config, services );
 +    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
 +
 +    gatewayPort = gateway.getAddresses()[0].getPort();
 +    gatewayUrl = "http://localhost:" + gatewayPort + "/" + config.getGatewayPath();
 +    clusterUrl = gatewayUrl + "/test-topology";
 +
 +    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
 +
 +    params = new Properties();
 +    params.put( "LDAP_URL", driver.getLdapUrl() );
 +    params.put( "WEBHDFS_URL", "http://localhost:" + mockWebHdfs.getPort() );
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testSimpleStaticHelloAppDeployUndeploy() throws Exception {
 +    LOG_ENTER();
 +
 +    String topoStr = TestUtils.merge( DAT, "test-static-hello-topology.xml", params );
 +    File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +
 +    topos.reloadTopologies();
 +
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = clusterUrl + "/static-hello-app-path/index.html";
 +    String body = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/html" )
 +        .when().get( serviceUrl ).asString();
 +    assertThat( the(body), hasXPath( "/html/head/title/text()", equalTo("Static Hello Application") ) );
 +
 +    serviceUrl = clusterUrl + "/static-hello-app-path/";
 +    body = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/html" )
 +        .when().get( serviceUrl ).asString();
 +    assertThat( the(body), hasXPath( "/html/head/title/text()", equalTo("Static Hello Application") ) );
 +
 +    serviceUrl = clusterUrl + "/static-hello-app-path";
 +    body = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/html" )
 +        .when().get( serviceUrl ).asString();
 +    assertThat( the(body), hasXPath( "/html/head/title/text()", equalTo("Static Hello Application") ) );
 +
 +    assertThat( "Failed to delete test topology file", FileUtils.deleteQuietly( topoFile ), is(true) );
 +    topos.reloadTopologies();
 +
 +    given()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .statusCode( HttpStatus.SC_NOT_FOUND )
 +        .when().get( serviceUrl );
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testSimpleDynamicAppDeployUndeploy() throws Exception {
 +    LOG_ENTER();
 +
 +    String topoStr = TestUtils.merge( DAT, "test-dynamic-app-topology.xml", params );
 +    File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +
 +    topos.reloadTopologies();
 +
 +    String username = "guest";
 +    String password = "guest-password";
 +
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
 +        .when().get( clusterUrl + "/dynamic-app-path" );
 +
 +    assertThat( "Failed to delete test topology file", FileUtils.deleteQuietly( topoFile ), is(true) );
 +    topos.reloadTopologies();
 +
 +    given()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .statusCode( HttpStatus.SC_NOT_FOUND )
 +        .when()
 +        .get( clusterUrl + "/dynamic-app-path" );
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testNakedAppDeploy() throws Exception {
 +    LOG_ENTER();
 +
 +    String topoStr = TestUtils.merge( DAT, "test-naked-app-topology.xml", params );
 +    File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +
 +    topos.reloadTopologies();
 +
 +    given()
 +        //.log().all()
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .body( is( gatewayUrl + "/test-topology/dynamic-app/?null" ) )
 +        .when().get( gatewayUrl + "/test-topology/dynamic-app" );
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test//( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testDefaultAppName() throws Exception {
 +    LOG_ENTER();
 +
 +    String topoStr = TestUtils.merge( DAT, "test-default-app-name-topology.xml", params );
 +    File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +
 +    topos.reloadTopologies();
 +
 +    String username = "guest";
 +    String password = "guest-password";
 +
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .body( is( clusterUrl + "/dynamic-app/?null" ) )
 +        .when().get( clusterUrl + "/dynamic-app" );
 +
 +    assertThat( "Failed to delete test topology file", FileUtils.deleteQuietly( topoFile ), is(true) );
 +    topos.reloadTopologies();
 +
 +    given()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .statusCode( HttpStatus.SC_NOT_FOUND )
 +        .when()
 +        .get( clusterUrl + "/dynamic-app" );
 +
 +    File deployDir = new File( config.getGatewayDeploymentDir() );
 +    assertThat( deployDir.listFiles(), is(arrayWithSize(0)) );
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test//( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testMultiApps() throws Exception {
 +    LOG_ENTER();
 +
 +    String topoStr = TestUtils.merge( DAT, "test-multi-apps-topology.xml", params );
 +    File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +
 +    topos.reloadTopologies();
 +
 +    String username = "guest";
 +    String password = "guest-password";
 +
 +    String body = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/html" )
 +        .when().get( clusterUrl + "/static-hello-app-path/index.html" ).asString();
 +    assertThat( the(body), hasXPath( "/html/head/title/text()", equalTo("Static Hello Application") ) );
 +
 +    body = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .contentType( "" )
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .when().get( clusterUrl + "/static-json-app/one.json" ).asString();
 +    assertThat( body, sameJSONAs( "{'test-name-one':'test-value-one'}" ) );
 +
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
 +        .when().get( clusterUrl + "/dynamic-app-path" );
 +
 +    body = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .contentType( "application/xml" )
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .when().get( clusterUrl + "/test.xml" ).asString();
 +    assertThat( the(body), hasXPath( "/test" ) );
 +
 +    assertThat( FileUtils.deleteQuietly( topoFile ), is(true) );
 +    topos.reloadTopologies();
 +
 +    given()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .statusCode( HttpStatus.SC_NOT_FOUND )
 +        .when().get( clusterUrl + "/static-hello-app-path/index.html" );
 +    given()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .statusCode( HttpStatus.SC_NOT_FOUND )
 +        .when().get( clusterUrl + "/static-json-app/one.json" );
 +    given()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .statusCode( HttpStatus.SC_NOT_FOUND )
 +        .when().get( clusterUrl + "/dynamic-app-path" );
 +    given()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .statusCode( HttpStatus.SC_NOT_FOUND )
 +        .when().get( clusterUrl + "/test.xml" );
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testServicesAndApplications() throws Exception {
 +    LOG_ENTER();
 +
 +    String topoStr = TestUtils.merge( DAT, "test-svcs-and-apps-topology.xml", params );
 +    File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +
 +    topos.reloadTopologies();
 +
 +    String username = "guest";
 +    String password = "guest-password";
 +
 +    mockWebHdfs.expect()
 +        .method( "GET" )
 +        .pathInfo( "/v1/" )
 +        .queryParam( "op", "GETHOMEDIRECTORY" )
 +        .queryParam( "user.name", "guest" )
 +        .respond()
 +        .status( HttpStatus.SC_OK )
 +        .content( "{\"path\":\"/users/guest\"}", Charset.forName("UTF-8") )
 +        .contentType( "application/json" );
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .queryParam( "op", "GETHOMEDIRECTORY" )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "application/json" )
 +        .body( "path", is( "/users/guest") )
 +        .when().get( clusterUrl + "/webhdfs/v1" );
 +    assertThat( mockWebHdfs.isEmpty(), is(true) );
 +
 +    String body = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "application/xml" )
 +        .when().get( clusterUrl + "/static-xml-app/test.xml" ).asString();
 +    assertThat( the(body), hasXPath( "test" ) );
 +
 +    body = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .contentType( "" )
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .when().get( clusterUrl + "/app-two/one.json" ).asString();
 +    assertThat( body, sameJSONAs( "{'test-name-one':'test-value-one'}" ) );
 +
 +    assertThat( FileUtils.deleteQuietly( topoFile ), is(true) );
 +    topos.reloadTopologies();
 +
 +    given()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .statusCode( HttpStatus.SC_NOT_FOUND )
 +        .when().get( clusterUrl + "/app-one/index.html" );
 +    given()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .statusCode( HttpStatus.SC_NOT_FOUND )
 +        .when().get( clusterUrl + "/app-two/one.json" );
 +    given()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        .statusCode( HttpStatus.SC_NOT_FOUND )
 +        .when().get( clusterUrl + "/test.xml" );
 +
 +    File deployDir = new File( config.getGatewayDeploymentDir() );
 +    assertThat( deployDir.listFiles(), is(arrayWithSize(0)) );
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test//( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testDeploymentCleanup() throws Exception {
 +    LOG_ENTER();
 +
 +    String username = "guest";
 +    String password = "guest-password";
 +
 +    int oldVersionLimit = config.getGatewayDeploymentsBackupVersionLimit();
 +
 +    try {
 +      gateway.stop();
 +      config.setGatewayDeploymentsBackupVersionLimit( 1 );
 +      startGatewayServer();
 +
 +      String topoStr = TestUtils.merge( DAT, "test-dynamic-app-topology.xml", params );
 +      File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
 +      FileUtils.writeStringToFile( topoFile, topoStr );
 +      topos.reloadTopologies();
 +
 +      File deployDir = new File( config.getGatewayDeploymentDir() );
 +      String[] topoDirs1 = deployDir.list();
 +      assertThat( topoDirs1, is(arrayWithSize(1)) );
 +
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( username, password )
 +          .expect()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_OK )
 +          .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
 +          .when().get( clusterUrl + "/dynamic-app-path" );
 +
 +      TestUtils.waitUntilNextSecond();
 +      FileUtils.touch( topoFile );
 +
 +      topos.reloadTopologies();
 +      String[] topoDirs2 = deployDir.list();
 +      assertThat( topoDirs2, is(arrayWithSize(2)) );
 +      assertThat( topoDirs2, hasItemInArray(topoDirs1[0]) );
 +
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( username, password )
 +          .expect()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_OK )
 +          .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
 +          .when().get( clusterUrl + "/dynamic-app-path" );
 +
 +      TestUtils.waitUntilNextSecond();
 +      FileUtils.touch( topoFile );
 +      topos.reloadTopologies();
 +
 +      String[] topoDirs3 = deployDir.list();
 +      assertThat( topoDirs3, is(arrayWithSize(2)) );
 +      assertThat( topoDirs3, not(hasItemInArray(topoDirs1[0])) );
 +
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( username, password )
 +          .expect()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_OK )
 +          .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
 +          .when().get( clusterUrl + "/dynamic-app-path" );
 +
 +    } finally {
 +      gateway.stop();
 +      config.setGatewayDeploymentsBackupAgeLimit( oldVersionLimit );
 +      startGatewayServer();
 +    }
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testDefaultTopology() throws Exception {
 +    LOG_ENTER();
 +
 +    try {
 +      gateway.stop();
 +      config.setGatewayDeploymentsBackupVersionLimit( 1 );
 +      startGatewayServer();
 +
 +      String topoStr = TestUtils.merge( DAT, "test-dynamic-app-topology.xml", params );
 +      File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
 +      FileUtils.writeStringToFile( topoFile, topoStr );
 +
 +      topos.reloadTopologies();
 +
 +      File deployDir = new File( config.getGatewayDeploymentDir() );
 +      String[] topoDirs = deployDir.list();
 +      assertThat( topoDirs, is(arrayWithSize(1)) );
 +
 +      String username = "guest";
 +      String password = "guest-password";
 +
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( username, password )
 +          .expect()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_OK )
 +          .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
 +          .when().get( clusterUrl + "/dynamic-app-path" );
 +
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( username, password )
 +          .expect()
 +          //.log().all()
 +          .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
 +          .when().get( clusterUrl + "/dynamic-app-path" );
 +
 +      topoStr = TestUtils.merge( DAT, "test-dynamic-app-topology.xml", params );
 +      topoFile = new File( config.getGatewayTopologyDir(), "test-topology-2.xml" );
 +      FileUtils.writeStringToFile( topoFile, topoStr );
 +
 +      topos.reloadTopologies();
 +
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( username, password )
 +          .expect()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_OK )
 +          .body( is( gatewayUrl + "/test-topology" + "/dynamic-app-path/?null" ) )
 +          .when().get( gatewayUrl + "/test-topology/dynamic-app-path" );
 +
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( username, password )
 +          .expect()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_OK )
 +          .body( is( gatewayUrl + "/test-topology-2" + "/dynamic-app-path/?null" ) )
 +          .when().get( gatewayUrl + "/test-topology-2/dynamic-app-path" );
 +
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( username, password )
 +          .expect()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_NOT_FOUND )
 +          .body( is( clusterUrl + "/dynamic-app-path/?null" ) );
 +
 +      gateway.stop();
 +      config.setDefaultTopologyName( "test-topology" );
 +      startGatewayServer();
 +
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( username, password )
 +          .expect()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_OK )
 +          .body( is( gatewayUrl + "/test-topology" + "/dynamic-app-path/?null" ) )
 +          .when().get( gatewayUrl + "/test-topology/dynamic-app-path" );
 +
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( username, password )
 +          .expect()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_OK )
 +          .body( is( gatewayUrl + "/test-topology-2" + "/dynamic-app-path/?null" ) )
 +          .when().get( gatewayUrl + "/test-topology-2/dynamic-app-path" );
 +
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( username, password )
 +          .expect()
 +          //.log().all()
 +          .body( is( clusterUrl + "/dynamic-app-path/?null" ) )
 +          .when().get( clusterUrl + "/dynamic-app-path" );
 +
 +    } finally {
 +      gateway.stop();
 +      config.setDefaultTopologyName( null );
 +      startGatewayServer();
 +    }
 +
 +    LOG_EXIT();
 +  }
 +
 +  public static Collection<String> toNames( File[] files ) {
 +    List<String> names = new ArrayList<String>( files.length );
 +    for( File file : files ) {
 +      names.add( file.getAbsolutePath() );
 +    }
 +    return names;
 +
 +  }
 +
 +}


[05/11] knox git commit: KNOX-998 - Merging from master

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingDisableFeatureTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingDisableFeatureTest.java
index 65d00a2,0000000..db1dbe0
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingDisableFeatureTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingDisableFeatureTest.java
@@@ -1,252 -1,0 +1,252 @@@
 +package org.apache.knox.gateway;
 +
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + * <p>
 + * http://www.apache.org/licenses/LICENSE-2.0
 + * <p>
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.hadoop.test.category.ReleaseTest;
 +import org.apache.hadoop.test.mock.MockServer;
 +import org.apache.http.HttpStatus;
 +import org.junit.After;
 +import org.junit.Before;
 +import org.junit.Rule;
 +import org.junit.Test;
 +import org.junit.experimental.categories.Category;
 +import org.junit.rules.ExpectedException;
 +
 +import java.io.IOException;
 +import java.net.ConnectException;
 +import java.util.concurrent.ConcurrentHashMap;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.is;
 +
 +/**
 + * Test that the Gateway Topology Port Mapping feature is disabled properly.
 + *
 + */
 +@Category(ReleaseTest.class)
 +public class GatewayPortMappingDisableFeatureTest {
 +
 +  // Specifies if the test requests should go through the gateway or directly to the services.
 +  // This is frequently used to verify the behavior of the test both with and without the gateway.
 +  private static final boolean USE_GATEWAY = true;
 +
 +  // Specifies if the test requests should be sent to mock services or the real services.
 +  // This is frequently used to verify the behavior of the test both with and without mock services.
 +  private static final boolean USE_MOCK_SERVICES = true;
 +
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  private static MockServer masterServer;
 +
 +  private int eeriePort;
 +
 +  @Rule
 +  public ExpectedException exception = ExpectedException.none();
 +
 +
 +  public GatewayPortMappingDisableFeatureTest() {
 +    super();
 +  }
 +
 +  /**
 +   * Creates a deployment of a gateway instance that all test methods will share.  This method also creates a
 +   * registry of sorts for all of the services that will be used by the test methods.
 +   * The createTopology method is used to create the topology file that would normally be read from disk.
 +   * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs.
 +   * <p/>
 +   * This would normally be done once for this suite but the failure tests start affecting each other depending
 +   * on the state the last 'active' url
 +   *
 +   * @throws Exception Thrown if any failure occurs.
 +   */
 +  @Before
 +  public void setup() throws Exception {
 +    LOG_ENTER();
 +
 +    eeriePort = getAvailablePort(1240, 49151);
 +
 +    ConcurrentHashMap<String, Integer> topologyPortMapping = new ConcurrentHashMap<String, Integer>();
 +    topologyPortMapping.put("eerie", eeriePort);
 +
 +    masterServer = new MockServer("master", true);
 +    GatewayTestConfig config = new GatewayTestConfig();
 +    config.setGatewayPath("gateway");
 +    config.setTopologyPortMapping(topologyPortMapping);
 +    // disable the feature
 +    config.setGatewayPortMappingEnabled(false);
 +
 +    driver.setResourceBase(WebHdfsHaFuncTest.class);
 +    driver.setupLdap(0);
 +
 +    driver.setupService("WEBHDFS", "http://vm.local:50070/webhdfs", "/eerie/webhdfs", USE_MOCK_SERVICES);
 +
 +    driver.setupGateway(config, "eerie", createTopology("WEBHDFS"), USE_GATEWAY);
 +
 +    LOG_EXIT();
 +  }
 +
 +  @After
 +  public void cleanup() throws Exception {
 +    LOG_ENTER();
 +    driver.cleanup();
 +    driver.reset();
 +    masterServer.reset();
 +    LOG_EXIT();
 +  }
 +
 +  /**
 +   * Test the standard case
 +   *
 +   * @throws IOException
 +   */
 +  @Test(timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testBasicListOperation() throws IOException {
 +    LOG_ENTER();
 +    test(driver.getUrl("WEBHDFS") );
 +    LOG_EXIT();
 +  }
 +
 +  /**
 +   * Test the multi port fail scenario when the feature is disabled.
 +   * @throws IOException
 +   */
 +  @Test(timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testMultiPortFailOperation() throws IOException {
 +    LOG_ENTER();
 +    exception.expect(ConnectException.class);
 +    exception.expectMessage("Connection refused");
 +
 +    test("http://localhost:" + eeriePort + "/webhdfs" );
 +    LOG_EXIT();
 +  }
 +
 +
 +  private void test (final String url) throws IOException {
 +    String password = "hdfs-password";
 +    String username = "hdfs";
 +
 +    masterServer.expect()
 +        .method("GET")
 +        .pathInfo("/webhdfs/v1/")
 +        .queryParam("op", "LISTSTATUS")
 +        .queryParam("user.name", username)
 +        .respond()
 +        .status(HttpStatus.SC_OK)
 +        .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
 +        .contentType("application/json");
 +
 +    given()
 +        .auth().preemptive().basic(username, password)
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam("op", "LISTSTATUS")
 +        .expect()
 +        .log().ifError()
 +        .statusCode(HttpStatus.SC_OK)
 +        .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
 +        .when().get(url + "/v1/");
 +    masterServer.isEmpty();
 +  }
 +
 +
 +  /**
 +   * Creates a topology that is deployed to the gateway instance for the test suite.
 +   * Note that this topology is shared by all of the test methods in this suite.
 +   *
 +   * @return A populated XML structure for a topology file.
 +   */
 +  private static XMLTag createTopology(final String role) {
 +    XMLTag xml = XMLDoc.newDocument(true)
 +        .addRoot("topology")
 +        .addTag("gateway")
 +        .addTag("provider")
 +        .addTag("role").addText("webappsec")
 +        .addTag("name").addText("WebAppSec")
 +        .addTag("enabled").addText("true")
 +        .addTag("param")
 +        .addTag("name").addText("csrf.enabled")
 +        .addTag("value").addText("true").gotoParent().gotoParent()
 +        .addTag("provider")
 +        .addTag("role").addText("authentication")
 +        .addTag("name").addText("ShiroProvider")
 +        .addTag("enabled").addText("true")
 +        .addTag("param")
 +        .addTag("name").addText("main.ldapRealm")
 +        .addTag("value").addText("org.apache.knox.gateway.shirorealm.KnoxLdapRealm").gotoParent()
 +        .addTag("param")
 +        .addTag("name").addText("main.ldapRealm.userDnTemplate")
 +        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
 +        .addTag("param")
 +        .addTag("name").addText("main.ldapRealm.contextFactory.url")
 +        .addTag("value").addText(driver.getLdapUrl()).gotoParent()
 +        .addTag("param")
 +        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
 +        .addTag("value").addText("simple").gotoParent()
 +        .addTag("param")
 +        .addTag("name").addText("urls./**")
 +        .addTag("value").addText("authcBasic").gotoParent().gotoParent()
 +        .addTag("provider")
 +        .addTag("role").addText("identity-assertion")
 +        .addTag("enabled").addText("true")
 +        .addTag("name").addText("Default").gotoParent()
 +        .addTag("provider")
 +        .addTag("role").addText("authorization")
 +        .addTag("enabled").addText("true")
 +        .addTag("name").addText("AclsAuthz").gotoParent()
 +        .addTag("param")
 +        .addTag("name").addText("webhdfs-acl")
 +        .addTag("value").addText("hdfs;*;*").gotoParent()
 +        .addTag("provider")
 +        .addTag("role").addText("ha")
 +        .addTag("enabled").addText("true")
 +        .addTag("name").addText("HaProvider")
 +        .addTag("param")
 +        .addTag("name").addText("WEBHDFS")
 +        .addTag("value").addText("maxFailoverAttempts=3;failoverSleep=15;maxRetryAttempts=3;retrySleep=10;enabled=true").gotoParent()
 +        .gotoRoot()
 +        .addTag("service")
 +        .addTag("role").addText(role)
 +        .addTag("url").addText("http://localhost:" + masterServer.getPort() + "/webhdfs")
 +        .gotoRoot();
 +    return xml;
 +  }
 +
 +  /**
 +   * This utility method will return the next available port
 +   * that can be used.
 +   *
 +   * @return Port that is available.
 +   */
 +  public static int getAvailablePort(final int min, final int max) {
 +
 +    for (int i = min; i <= max; i++) {
 +
 +      if (!GatewayServer.isPortInUse(i)) {
 +        return i;
 +      }
 +    }
 +    // too bad
 +    return -1;
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFailTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFailTest.java
index 2ee8bb9,0000000..ee9e802
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFailTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFailTest.java
@@@ -1,150 -1,0 +1,150 @@@
 +package org.apache.knox.gateway;
 +
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + * <p>
 + * http://www.apache.org/licenses/LICENSE-2.0
 + * <p>
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.hadoop.test.category.ReleaseTest;
 +import org.apache.hadoop.test.mock.MockServer;
 +import org.apache.http.HttpStatus;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.junit.experimental.categories.Category;
 +
 +import java.io.IOException;
 +import java.util.concurrent.ConcurrentHashMap;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +
 +/**
 + * Test the fail cases for the Port Mapping Feature
 + */
 +@Category(ReleaseTest.class)
 +public class GatewayPortMappingFailTest {
 +
 +  // Specifies if the test requests should go through the gateway or directly to the services.
 +  // This is frequently used to verify the behavior of the test both with and without the gateway.
 +  private static final boolean USE_GATEWAY = true;
 +
 +  // Specifies if the test requests should be sent to mock services or the real services.
 +  // This is frequently used to verify the behavior of the test both with and without mock services.
 +  private static final boolean USE_MOCK_SERVICES = true;
 +
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  private static MockServer masterServer;
 +
 +  private static int eeriePort;
 +
 +  /**
 +   * Create an instance
 +   */
 +  public GatewayPortMappingFailTest() {
 +    super();
 +  }
 +
 +  /**
 +   * Creates a deployment of a gateway instance that all test methods will share.  This method also creates a
 +   * registry of sorts for all of the services that will be used by the test methods.
 +   * The createTopology method is used to create the topology file that would normally be read from disk.
 +   * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs.
 +   * <p/>
 +   * This would normally be done once for this suite but the failure tests start affecting each other depending
 +   * on the state the last 'active' url
 +   *
 +   * @throws Exception Thrown if any failure occurs.
 +   */
 +  @BeforeClass
 +  public static void setup() throws Exception {
 +    LOG_ENTER();
 +
 +    eeriePort = GatewayPortMappingFuncTest.getAvailablePort(1240, 49151);
 +
 +    ConcurrentHashMap<String, Integer> topologyPortMapping = new ConcurrentHashMap<String, Integer>();
 +    topologyPortMapping.put("eerie", eeriePort);
 +
 +    masterServer = new MockServer("master", true);
 +    GatewayTestConfig config = new GatewayTestConfig();
 +    config.setGatewayPath("gateway");
 +    config.setTopologyPortMapping(topologyPortMapping);
 +
 +    driver.setResourceBase(WebHdfsHaFuncTest.class);
 +    driver.setupLdap(0);
 +
 +    driver.setupService("WEBHDFS", "http://vm.local:50070/webhdfs", "/eerie/webhdfs", USE_MOCK_SERVICES);
 +
 +    driver.setupGateway(config, "eerie", GatewayPortMappingFuncTest.createTopology("WEBHDFS", driver.getLdapUrl(), masterServer.getPort()), USE_GATEWAY);
 +
 +    LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanup() throws Exception {
 +    LOG_ENTER();
 +    driver.cleanup();
 +    driver.reset();
 +    masterServer.reset();
 +    LOG_EXIT();
 +  }
 +
 +
 +  /**
 +   * Fail when trying to use this feature on the standard port.
 +   * Here we do not have Default Topology Feature not enabled.
 +   *
 +   * http://localhost:{gatewayPort}/webhdfs/v1
 +   *
 +   * @throws IOException
 +   */
 +  @Test(timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testMultiPortOperationFail() throws IOException {
 +    LOG_ENTER();
 +    final String url = "http://localhost:" + driver.getGatewayPort() + "/webhdfs" ;
 +
 +    String password = "hdfs-password";
 +    String username = "hdfs";
 +
 +    masterServer.expect()
 +        .method("GET")
 +        .pathInfo("/webhdfs/v1/")
 +        .queryParam("op", "LISTSTATUS")
 +        .queryParam("user.name", username)
 +        .respond()
 +        .status(HttpStatus.SC_OK)
 +        .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
 +        .contentType("application/json");
 +
 +    given()
 +        .auth().preemptive().basic(username, password)
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam("op", "LISTSTATUS")
 +        .expect()
 +        //.log().ifError()
 +        .statusCode(HttpStatus.SC_NOT_FOUND)
 +        //.content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
 +        .when().get(url + "/v1/");
 +    masterServer.isEmpty();
 +
 +    LOG_EXIT();
 +  }
 +
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFuncTest.java
index 78ff451,0000000..b597f41
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayPortMappingFuncTest.java
@@@ -1,276 -1,0 +1,276 @@@
 +package org.apache.knox.gateway;
 +
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + * <p>
 + * http://www.apache.org/licenses/LICENSE-2.0
 + * <p>
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.hadoop.test.category.ReleaseTest;
 +import org.apache.hadoop.test.mock.MockServer;
 +import org.apache.http.HttpStatus;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.junit.experimental.categories.Category;
 +
 +import java.io.IOException;
 +import java.util.concurrent.ConcurrentHashMap;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.is;
 +
 +/**
 + * Test the Gateway Topology Port Mapping functionality
 + *
 + */
 +@Category(ReleaseTest.class)
 +public class GatewayPortMappingFuncTest {
 +
 +  // Specifies if the test requests should go through the gateway or directly to the services.
 +  // This is frequently used to verify the behavior of the test both with and without the gateway.
 +  private static final boolean USE_GATEWAY = true;
 +
 +  // Specifies if the test requests should be sent to mock services or the real services.
 +  // This is frequently used to verify the behavior of the test both with and without mock services.
 +  private static final boolean USE_MOCK_SERVICES = true;
 +
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  private static MockServer masterServer;
 +
 +  private static int eeriePort;
 +
 +  public GatewayPortMappingFuncTest() {
 +    super();
 +  }
 +
 +  /**
 +   * Creates a deployment of a gateway instance that all test methods will share.  This method also creates a
 +   * registry of sorts for all of the services that will be used by the test methods.
 +   * The createTopology method is used to create the topology file that would normally be read from disk.
 +   * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs.
 +   * <p/>
 +   * This would normally be done once for this suite but the failure tests start affecting each other depending
 +   * on the state the last 'active' url
 +   *
 +   * @throws Exception Thrown if any failure occurs.
 +   */
 +  @BeforeClass
 +  public static void setup() throws Exception {
 +    LOG_ENTER();
 +
 +    eeriePort = getAvailablePort(1240, 49151);
 +
 +    ConcurrentHashMap<String, Integer> topologyPortMapping = new ConcurrentHashMap<String, Integer>();
 +    topologyPortMapping.put("eerie", eeriePort);
 +
 +    masterServer = new MockServer("master", true);
 +    GatewayTestConfig config = new GatewayTestConfig();
 +    config.setGatewayPath("gateway");
 +    config.setTopologyPortMapping(topologyPortMapping);
 +
 +    // Enable default topology
 +    config.setDefaultTopologyName("eerie");
 +
 +    driver.setResourceBase(WebHdfsHaFuncTest.class);
 +    driver.setupLdap(0);
 +
 +    driver.setupService("WEBHDFS", "http://vm.local:50070/webhdfs", "/eerie/webhdfs", USE_MOCK_SERVICES);
 +
 +    driver.setupGateway(config, "eerie", createTopology("WEBHDFS", driver.getLdapUrl(), masterServer.getPort()), USE_GATEWAY);
 +
 +    LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanup() throws Exception {
 +    LOG_ENTER();
 +    driver.cleanup();
 +    driver.reset();
 +    masterServer.reset();
 +    LOG_EXIT();
 +  }
 +
 +  /**
 +   * Test the standard case:
 +   * http://localhost:{gatewayPort}/gateway/eerie/webhdfs/v1
 +   *
 +   * @throws IOException
 +   */
 +  @Test(timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testBasicListOperation() throws IOException {
 +    LOG_ENTER();
 +    test("http://localhost:" + driver.getGatewayPort() + "/gateway/eerie" + "/webhdfs" );
 +    LOG_EXIT();
 +  }
 +
 +  /**
 +   * Test the Default Topology Feature, activated by property
 +   * "default.app.topology.name"
 +   *
 +   * http://localhost:{eeriePort}/gateway/eerie/webhdfs/v1
 +   *
 +   * @throws IOException
 +   */
 +  @Test(timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testDefaultTopologyFeature() throws IOException {
 +    LOG_ENTER();
 +    test("http://localhost:" + driver.getGatewayPort() + "/webhdfs" );
 +    LOG_EXIT();
 +  }
 +
 +  /**
 +   * Test the multi port scenario.
 +   *
 +   * http://localhost:{eeriePort}/webhdfs/v1
 +   *
 +   * @throws IOException
 +   */
 +  @Test(timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testMultiPortOperation() throws IOException {
 +    LOG_ENTER();
 +    test("http://localhost:" + eeriePort + "/webhdfs" );
 +    LOG_EXIT();
 +  }
 +
 +  /**
 +   * Test the multi port scenario when gateway path is included.
 +   *
 +   * http://localhost:{eeriePort}/gateway/eerie/webhdfs/v1
 +   *
 +   * @throws IOException
 +   */
 +  @Test(timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testMultiPortWithGatewayPath() throws IOException {
 +    LOG_ENTER();
 +    test("http://localhost:" + eeriePort + "/gateway/eerie" + "/webhdfs" );
 +    LOG_EXIT();
 +  }
 +
 +
 +  private void test (final String url) throws IOException {
 +    String password = "hdfs-password";
 +    String username = "hdfs";
 +
 +    masterServer.expect()
 +        .method("GET")
 +        .pathInfo("/webhdfs/v1/")
 +        .queryParam("op", "LISTSTATUS")
 +        .queryParam("user.name", username)
 +        .respond()
 +        .status(HttpStatus.SC_OK)
 +        .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
 +        .contentType("application/json");
 +
 +    given()
 +        .auth().preemptive().basic(username, password)
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam("op", "LISTSTATUS")
 +        .expect()
 +        .log().ifError()
 +        .statusCode(HttpStatus.SC_OK)
 +        .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
 +        .when().get(url + "/v1/");
 +    masterServer.isEmpty();
 +  }
 +
 +
 +  /**
 +   * Creates a topology that is deployed to the gateway instance for the test suite.
 +   * Note that this topology is shared by all of the test methods in this suite.
 +   *
 +   * @return A populated XML structure for a topology file.
 +   */
 +  public static XMLTag createTopology(final String role, final String ldapURL, final int gatewayPort ) {
 +    XMLTag xml = XMLDoc.newDocument(true)
 +        .addRoot("topology")
 +        .addTag("gateway")
 +        .addTag("provider")
 +        .addTag("role").addText("webappsec")
 +        .addTag("name").addText("WebAppSec")
 +        .addTag("enabled").addText("true")
 +        .addTag("param")
 +        .addTag("name").addText("csrf.enabled")
 +        .addTag("value").addText("true").gotoParent().gotoParent()
 +        .addTag("provider")
 +        .addTag("role").addText("authentication")
 +        .addTag("name").addText("ShiroProvider")
 +        .addTag("enabled").addText("true")
 +        .addTag("param")
 +        .addTag("name").addText("main.ldapRealm")
 +        .addTag("value").addText("org.apache.knox.gateway.shirorealm.KnoxLdapRealm").gotoParent()
 +        .addTag("param")
 +        .addTag("name").addText("main.ldapRealm.userDnTemplate")
 +        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
 +        .addTag("param")
 +        .addTag("name").addText("main.ldapRealm.contextFactory.url")
 +        .addTag("value").addText(ldapURL).gotoParent()
 +        .addTag("param")
 +        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
 +        .addTag("value").addText("simple").gotoParent()
 +        .addTag("param")
 +        .addTag("name").addText("urls./**")
 +        .addTag("value").addText("authcBasic").gotoParent().gotoParent()
 +        .addTag("provider")
 +        .addTag("role").addText("identity-assertion")
 +        .addTag("enabled").addText("true")
 +        .addTag("name").addText("Default").gotoParent()
 +        .addTag("provider")
 +        .addTag("role").addText("authorization")
 +        .addTag("enabled").addText("true")
 +        .addTag("name").addText("AclsAuthz").gotoParent()
 +        .addTag("param")
 +        .addTag("name").addText("webhdfs-acl")
 +        .addTag("value").addText("hdfs;*;*").gotoParent()
 +        .addTag("provider")
 +        .addTag("role").addText("ha")
 +        .addTag("enabled").addText("true")
 +        .addTag("name").addText("HaProvider")
 +        .addTag("param")
 +        .addTag("name").addText("WEBHDFS")
 +        .addTag("value").addText("maxFailoverAttempts=3;failoverSleep=15;maxRetryAttempts=3;retrySleep=10;enabled=true").gotoParent()
 +        .gotoRoot()
 +        .addTag("service")
 +        .addTag("role").addText(role)
 +        .addTag("url").addText("http://localhost:" + gatewayPort + "/webhdfs")
 +        .gotoRoot();
 +    return xml;
 +  }
 +
 +  /**
 +   * This utility method will return the next available port
 +   * that can be used.
 +   *
 +   * @return Port that is available.
 +   */
 +  public static int getAvailablePort(final int min, final int max) {
 +
 +    for (int i = min; i <= max; i++) {
 +
 +      if (!GatewayServer.isPortInUse(i)) {
 +        return i;
 +      }
 +    }
 +    // too bad
 +    return -1;
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySampleFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewaySampleFuncTest.java
index 96e64cf,0000000..4c87a90
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySampleFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySampleFuncTest.java
@@@ -1,180 -1,0 +1,180 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +import org.apache.directory.server.protocol.shared.transport.TcpTransport;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.http.HttpStatus;
 +import org.apache.log4j.Appender;
 +import org.hamcrest.MatcherAssert;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +import java.io.File;
 +import java.io.FileOutputStream;
 +import java.io.IOException;
 +import java.util.Enumeration;
 +import java.util.HashMap;
 +import java.util.Map;
 +import java.util.UUID;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.is;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +
 +public class GatewaySampleFuncTest {
 +
 +  private static Logger LOG = LoggerFactory.getLogger( GatewaySampleFuncTest.class );
 +
 +  public static Enumeration<Appender> appenders;
 +  public static GatewayConfig config;
 +  public static GatewayServer gateway;
 +  public static String gatewayUrl;
 +  public static String clusterUrl;
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    LOG_ENTER();
 +    //appenders = NoOpAppender.setUp();
 +    driver.setupLdap(0);
 +    setupGateway();
 +    LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    LOG_ENTER();
 +    gateway.stop();
 +    driver.cleanup();
 +    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
 +    //NoOpAppender.tearDown( appenders );
 +    LOG_EXIT();
 +  }
 +
 +  public static void setupGateway() throws Exception {
 +
 +    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
 +    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
 +    gatewayDir.mkdirs();
 +
 +    GatewayTestConfig testConfig = new GatewayTestConfig();
 +    config = testConfig;
 +    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
 +
 +    File topoDir = new File( testConfig.getGatewayTopologyDir() );
 +    topoDir.mkdirs();
 +
 +    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
 +    deployDir.mkdirs();
 +
 +    File descriptor = new File( topoDir, "test-cluster.xml" );
 +    FileOutputStream stream = new FileOutputStream( descriptor );
 +    createTopology().toStream( stream );
 +    stream.close();
 +
 +    DefaultGatewayServices srvcs = new DefaultGatewayServices();
 +    Map<String,String> options = new HashMap<>();
 +    options.put( "persist-master", "false" );
 +    options.put( "master", "password" );
 +    try {
 +      srvcs.init( testConfig, options );
 +    } catch ( ServiceLifecycleException e ) {
 +      e.printStackTrace(); // I18N not required.
 +    }
 +
 +    gateway = GatewayServer.startGateway( testConfig, srvcs );
 +    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
 +
 +    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
 +
 +    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
 +    clusterUrl = gatewayUrl + "/test-cluster";
 +  }
 +
 +  private static XMLTag createTopology() {
 +    XMLTag xml = XMLDoc.newDocument( true )
 +        .addRoot( "topology" )
 +        .addTag( "gateway" )
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "authentication" )
 +        .addTag( "name" ).addText( "ShiroProvider" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
 +        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
 +        .addTag( "value" ).addText( driver.getLdapUrl() ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "urls./**" )
 +        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "identity-assertion" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "name" ).addText( "Default" ).gotoParent()
 +        .addTag( "provider" )
 +        .gotoRoot()
 +        .addTag( "service" )
 +        .addTag( "role" ).addText( "test-service-role" )
 +        .gotoRoot();
 +    // System.out.println( "GATEWAY=" + xml.toString() );
 +    return xml;
 +  }
 +
 +  //@Test
 +  public void waitForManualTesting() throws IOException {
 +    System.in.read();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testTestService() throws ClassNotFoundException {
 +    LOG_ENTER();
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = clusterUrl + "/test-service-path/test-service-resource";
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .body( is( "test-service-response" ) )
 +        .when().get( serviceUrl );
 +    LOG_EXIT();
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySslFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewaySslFuncTest.java
index 02bb3dd,0000000..3726dbc
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySslFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewaySslFuncTest.java
@@@ -1,304 -1,0 +1,306 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import java.io.File;
 +import java.nio.file.FileSystems;
 +import java.nio.file.Path;
 +import java.security.KeyManagementException;
 +import java.security.NoSuchAlgorithmException;
 +import java.security.SecureRandom;
 +import java.security.cert.CertificateException;
 +import java.security.cert.X509Certificate;
 +import java.util.Arrays;
 +import java.util.Enumeration;
 +import java.util.HashMap;
 +import java.util.Iterator;
 +import java.util.Map;
 +import java.util.Properties;
 +import java.util.ServiceLoader;
 +import java.util.UUID;
 +import javax.net.ssl.HostnameVerifier;
 +import javax.net.ssl.SSLContext;
 +import javax.net.ssl.SSLHandshakeException;
 +import javax.net.ssl.SSLSession;
 +import javax.net.ssl.TrustManager;
 +import javax.net.ssl.X509TrustManager;
 +import javax.xml.transform.stream.StreamSource;
 +
 +import org.apache.commons.io.FileUtils;
 +import org.apache.directory.server.protocol.shared.transport.TcpTransport;
 +import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.topology.TopologyService;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.hadoop.test.category.ReleaseTest;
 +import org.apache.hadoop.test.mock.MockServer;
 +import org.apache.http.HttpHost;
 +import org.apache.http.auth.AuthScope;
 +import org.apache.http.auth.UsernamePasswordCredentials;
 +import org.apache.http.client.AuthCache;
 +import org.apache.http.client.CredentialsProvider;
 +import org.apache.http.client.methods.CloseableHttpResponse;
 +import org.apache.http.client.methods.HttpGet;
 +import org.apache.http.client.protocol.HttpClientContext;
 +import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
 +import org.apache.http.impl.auth.BasicScheme;
 +import org.apache.http.impl.client.BasicAuthCache;
 +import org.apache.http.impl.client.BasicCredentialsProvider;
 +import org.apache.http.impl.client.CloseableHttpClient;
 +import org.apache.http.impl.client.HttpClients;
 +import org.apache.log4j.Appender;
 +import org.hamcrest.MatcherAssert;
 +import org.junit.After;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.junit.experimental.categories.Category;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +import static org.junit.Assert.assertThat;
 +import static org.junit.Assert.fail;
 +import static org.xmlmatchers.transform.XmlConverters.the;
 +import static org.xmlmatchers.xpath.HasXPath.hasXPath;
 +
 +@Category( ReleaseTest.class )
 +public class GatewaySslFuncTest {
 +
 +  private static Logger LOG = LoggerFactory.getLogger( GatewaySslFuncTest.class );
 +  private static Class<?> DAT = GatewaySslFuncTest.class;
 +
 +  private static Enumeration<Appender> appenders;
 +  private static GatewayTestConfig config;
 +  private static DefaultGatewayServices services;
 +  private static GatewayServer gateway;
 +  private static String gatewayScheme;
 +  private static int gatewayPort;
 +  private static String gatewayUrl;
 +  private static Properties params;
 +  private static TopologyService topos;
 +  private static MockServer mockWebHdfs;
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    LOG_ENTER();
 +    //appenders = NoOpAppender.setUp();
 +    driver.setupLdap(0);
 +    setupGateway();
 +    LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    LOG_ENTER();
 +    gateway.stop();
 +    driver.cleanup();
 +    FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
 +    //NoOpAppender.tearDown( appenders );
 +    LOG_EXIT();
 +  }
 +
 +  @After
 +  public void cleanupTest() throws Exception {
 +    FileUtils.cleanDirectory( new File( config.getGatewayTopologyDir() ) );
-     FileUtils.cleanDirectory( new File( config.getGatewayDeploymentDir() ) );
++    // Test run should not fail if deleting deployment files is not successful.
++    // Deletion has been already done by TopologyService.
++    FileUtils.deleteQuietly( new File( config.getGatewayDeploymentDir() ) );
 +  }
 +
 +  public static void setupGateway() throws Exception {
 +
 +    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
 +    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
 +    gatewayDir.mkdirs();
 +
 +    config = new GatewayTestConfig();
 +    config.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
 +
 +    File topoDir = new File( config.getGatewayTopologyDir() );
 +    topoDir.mkdirs();
 +
 +    File deployDir = new File( config.getGatewayDeploymentDir() );
 +    deployDir.mkdirs();
 +
 +    File securityDir = new File( config.getGatewaySecurityDir() );
 +    securityDir.mkdirs();
 +
 +    config.setSSLEnabled( true );
 +
 +    setupMockServers();
 +    startGatewayServer();
 +  }
 +
 +  public static void setupMockServers() throws Exception {
 +    mockWebHdfs = new MockServer( "WEBHDFS", true );
 +  }
 +
 +  private static GatewayServices instantiateGatewayServices() {
 +    ServiceLoader<GatewayServices> loader = ServiceLoader.load( GatewayServices.class );
 +    Iterator<GatewayServices> services = loader.iterator();
 +    if (services.hasNext()) {
 +      return services.next();
 +    }
 +    return null;
 +  }
 +
 +  public static void startGatewayServer() throws Exception {
 +    instantiateGatewayServices();
 +    services = new DefaultGatewayServices();
 +    Map<String,String> options = new HashMap<>();
 +    options.put( "persist-master", "false" );
 +    options.put( "master", "password" );
 +    try {
 +      services.init( config, options );
 +    } catch ( ServiceLifecycleException e ) {
 +      e.printStackTrace(); // I18N not required.
 +    }
 +    topos = services.getService(GatewayServices.TOPOLOGY_SERVICE);
 +
 +    gateway = GatewayServer.startGateway( config, services );
 +    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
 +
 +    gatewayScheme = config.isSSLEnabled() ? "https" : "http";
 +    gatewayPort = gateway.getAddresses()[0].getPort();
 +    gatewayUrl = gatewayScheme + "://localhost:" + gatewayPort + "/" + config.getGatewayPath();
 +
 +    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
 +
 +    params = new Properties();
 +    params.put( "LDAP_URL", driver.getLdapUrl() );
 +    params.put( "WEBHDFS_URL", "http://localhost:" + mockWebHdfs.getPort() );
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testKnox674SslCipherSuiteConfig() throws Exception {
 +    LOG_ENTER();
 +
 +    String topoStr = TestUtils.merge( DAT, "test-admin-topology.xml", params );
 +    File topoFile = new File( config.getGatewayTopologyDir(), "test-topology.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +
 +    topos.reloadTopologies();
 +
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = gatewayUrl + "/test-topology/api/v1/version";
 +
 +    HttpHost targetHost = new HttpHost( "localhost", gatewayPort, gatewayScheme );
 +    CredentialsProvider credsProvider = new BasicCredentialsProvider();
 +    credsProvider.setCredentials(
 +        new AuthScope( targetHost.getHostName(), targetHost.getPort() ),
 +        new UsernamePasswordCredentials( username, password ) );
 +
 +    AuthCache authCache = new BasicAuthCache();
 +    BasicScheme basicAuth = new BasicScheme();
 +    authCache.put( targetHost, basicAuth );
 +
 +    HttpClientContext context = HttpClientContext.create();
 +    context.setCredentialsProvider( credsProvider );
 +    context.setAuthCache( authCache );
 +
 +    CloseableHttpClient client = HttpClients.custom()
 +        .setSSLSocketFactory(
 +            new SSLConnectionSocketFactory(
 +                createInsecureSslContext(),
 +                new String[]{"TLSv1.2"},
 +                new String[]{"TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256"},
 +                new TrustAllHosts() ) )
 +        .build();
 +    HttpGet request = new HttpGet( serviceUrl );
 +    CloseableHttpResponse response = client.execute( request, context );
 +    assertThat( the( new StreamSource( response.getEntity().getContent() ) ), hasXPath( "/ServerVersion/version" ) );
 +    response.close();
 +    client.close();
 +
 +    gateway.stop();
 +    config.setExcludedSSLCiphers( Arrays.asList( new String[]{ "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256" } ) );
 +    config.setIncludedSSLCiphers( Arrays.asList( new String[]{ "TLS_DHE_RSA_WITH_AES_128_CBC_SHA" } ) );
 +
 +    startGatewayServer();
 +    serviceUrl = gatewayUrl + "/test-topology/api/v1/version";
 +
 +    try {
 +      client = HttpClients.custom()
 +          .setSSLSocketFactory(
 +              new SSLConnectionSocketFactory(
 +                  createInsecureSslContext(),
 +                  new String[]{ "TLSv1.2" },
 +                  new String[]{ "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256" },
 +                  new TrustAllHosts() ) ).build();
 +      request = new HttpGet( serviceUrl );
 +      client.execute( request, context );
 +      fail( "Expected SSLHandshakeException" );
 +    } catch ( SSLHandshakeException e ) {
 +      // Expected.
 +      client.close();
 +    }
 +
 +    client = HttpClients.custom()
 +        .setSSLSocketFactory(
 +            new SSLConnectionSocketFactory(
 +                createInsecureSslContext(),
 +                new String[]{ "TLSv1.2" },
 +                new String[]{ "TLS_DHE_RSA_WITH_AES_128_CBC_SHA" },
 +                new TrustAllHosts() ) ).build();
 +    request = new HttpGet( serviceUrl );
 +    response = client.execute( request, context );
 +    assertThat( the( new StreamSource( response.getEntity().getContent() ) ), hasXPath( "/ServerVersion/version" ) );
 +    response.close();
 +    client.close();
 +
 +    LOG_EXIT();
 +  }
 +
 +  public static class TrustAllHosts implements HostnameVerifier {
 +    @Override
 +    public boolean verify( String host, SSLSession sslSession ) {
 +      // Trust all hostnames.
 +      return true;
 +    }
 +  }
 +
 +  public static class TrustAllCerts implements X509TrustManager {
 +
 +    public void checkClientTrusted( X509Certificate[] x509Certificates, String s ) throws CertificateException {
 +      // Trust all certificates.
 +    }
 +
 +    public void checkServerTrusted( X509Certificate[] x509Certificates, String s ) throws CertificateException {
 +      // Trust all certificates.
 +    }
 +
 +    public X509Certificate[] getAcceptedIssuers() {
 +      return null;
 +    }
 +
 +  }
 +
 +  public static SSLContext createInsecureSslContext() throws NoSuchAlgorithmException, KeyManagementException {
 +    SSLContext sslContext = SSLContext.getInstance( "SSL" );
 +    sslContext.init( null, new TrustManager[]{ new TrustAllCerts() }, new SecureRandom() );
 +    return sslContext;
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/Knox242FuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/Knox242FuncTest.java
index 30c6ec8,0000000..7aaf056
mode 100755,000000..100755
--- a/gateway-test/src/test/java/org/apache/knox/gateway/Knox242FuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/Knox242FuncTest.java
@@@ -1,284 -1,0 +1,284 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.is;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +
 +import java.io.File;
 +import java.io.FileOutputStream;
 +import java.io.IOException;
 +import java.io.InputStream;
 +import java.net.InetSocketAddress;
 +import java.net.URL;
 +import java.nio.file.FileSystems;
 +import java.nio.file.Path;
 +import java.util.Enumeration;
 +import java.util.HashMap;
 +import java.util.Map;
 +import java.util.UUID;
 +
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.security.AliasService;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.http.HttpStatus;
 +import org.apache.log4j.Appender;
 +import org.hamcrest.MatcherAssert;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Ignore;
 +import org.junit.Test;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +
 +/**
 + * Functional test to verify : KNOX-242 LDAP Enhancements
 + * Please see
 + * https://issues.apache.org/jira/browse/KNOX-242
 + *
 + */
 +public class Knox242FuncTest {
 +
 +  private static Logger LOG = LoggerFactory.getLogger( Knox242FuncTest.class );
 +
 +  public static Enumeration<Appender> appenders;
 +  public static GatewayConfig config;
 +  public static GatewayServer gateway;
 +  public static String gatewayUrl;
 +  public static String clusterUrl;
 +  public static String serviceUrl;
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    LOG_ENTER();
 +    //appenders = NoOpAppender.setUp();
 +    String basedir = System.getProperty("basedir");
 +    if (basedir == null) {
 +      basedir = new File(".").getCanonicalPath();
 +    }
 +    Path path = FileSystems.getDefault().getPath(basedir, "/src/test/resources/users-dynamic.ldif");
 +    driver.setupLdap( 0 , path.toFile() );
 +    setupGateway();
 +    TestUtils.awaitNon404HttpStatus( new URL( serviceUrl ), 10000, 100 );
 +    LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    LOG_ENTER();
 +    gateway.stop();
 +    driver.cleanup();
 +    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
 +    //NoOpAppender.tearDown( appenders );
 +    LOG_EXIT();
 +  }
 +
 +  public static void setupGateway() throws IOException, Exception {
 +
 +    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
 +    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
 +    gatewayDir.mkdirs();
 +
 +    GatewayTestConfig testConfig = new GatewayTestConfig();
 +    config = testConfig;
 +    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
 +
 +    File topoDir = new File( testConfig.getGatewayTopologyDir() );
 +    topoDir.mkdirs();
 +
 +    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
 +    deployDir.mkdirs();
 +
 +    DefaultGatewayServices srvcs = new DefaultGatewayServices();
 +    Map<String,String> options = new HashMap<>();
 +    options.put( "persist-master", "false" );
 +    options.put( "master", "password" );
 +    try {
 +      srvcs.init( testConfig, options );
 +    } catch ( ServiceLifecycleException e ) {
 +      e.printStackTrace(); // I18N not required.
 +    }
 +
 +    gateway = GatewayServer.startGateway( testConfig, srvcs );
 +    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
 +
 +    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
 +
 +    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
 +    clusterUrl = gatewayUrl + "/testdg-cluster";
 +    serviceUrl = clusterUrl + "/test-service-path/test-service-resource";
 +
 +    GatewayServices services = GatewayServer.getGatewayServices();
 +    AliasService aliasService = (AliasService)services.getService(GatewayServices.ALIAS_SERVICE);
 +    aliasService.addAliasForCluster("testdg-cluster", "ldcSystemPassword", "guest-password");
 +
 +    char[] password1 = aliasService.getPasswordFromAliasForCluster( "testdg-cluster", "ldcSystemPassword");
 +    //System.err.println("SETUP password 10: " + ((password1 == null) ? "NULL" : new String(password1)));
 +
 +    File descriptor = new File( topoDir, "testdg-cluster.xml" );
 +    FileOutputStream stream = new FileOutputStream( descriptor );
 +    createTopology().toStream( stream );
 +    stream.close();
 +  }
 +
 +  private static XMLTag createTopology() {
 +    XMLTag xml = XMLDoc.newDocument( true )
 +        .addRoot( "topology" )
 +        .addTag( "gateway" )
 +
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "authentication" )
 +        .addTag( "name" ).addText( "ShiroProvider" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapGroupContextFactory" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory" )
 +        .addTag( "value" ).addText( "$ldapGroupContextFactory" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
 +        .addTag( "value" ).addText( driver.getLdapUrl())
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
 +        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
 +
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.searchBase" )
 +        .addTag( "value" ).addText( "dc=hadoop,dc=apache,dc=org" )
 +
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.userSearchAttributeName" )
 +        .addTag( "value" ).addText( "uid" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.userObjectClass" )
 +        .addTag( "value" ).addText( "person" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.userSearchBase" )
 +        .addTag( "value" ).addText( "dc=hadoop,dc=apache,dc=org" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.groupSearchBase" )
 +        .addTag( "value" ).addText( "ou=groups,dc=hadoop,dc=apache,dc=org" )
 +
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.authorizationEnabled" )
 +        .addTag( "value" ).addText( "true" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemAuthenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.groupObjectClass" )
 +        .addTag( "value" ).addText( "groupofurls" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.memberAttribute" )
 +        .addTag( "value" ).addText( "memberurl" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.memberAttributeValueTemplate" )
 +        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemUsername" )
 +        .addTag( "value" ).addText( "uid=guest,ou=people,dc=hadoop,dc=apache,dc=org" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.clusterName" )
 +        .addTag( "value" ).addText( "testdg-cluster" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemPassword" )
 +        .addTag( "value" ).addText( "S{ALIAS=ldcSystemPassword}" )
 +        // .addTag( "value" ).addText( "guest-password" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "urls./**" )
 +        .addTag( "value" ).addText( "authcBasic" )
 +
 +        .gotoParent().gotoParent().addTag( "provider" )
 +        .addTag( "role" ).addText( "authorization" )
 +        .addTag( "name" ).addText( "AclsAuthz" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "test-service-role.acl" )
 +        .addTag( "value" ).addText( "*;directors;*" )
 +
 +        .gotoParent().gotoParent().addTag( "provider" )
 +        .addTag( "role" ).addText( "identity-assertion" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "name" ).addText( "Default" ).gotoParent()
 +
 +        .gotoRoot()
 +        .addTag( "service" )
 +        .addTag( "role" ).addText( "test-service-role" )
 +        .gotoRoot();
 +         // System.out.println( "GATEWAY=" + xml.toString() );
 +    return xml;
 +  }
 +
 +  @Ignore
 +  // @Test
 +  public void waitForManualTesting() throws IOException {
 +    System.in.read();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testGroupMember() throws ClassNotFoundException, Exception {
 +    LOG_ENTER();
 +    String username = "joe";
 +    String password = "joe-password";
 +    String serviceUrl = clusterUrl + "/test-service-path/test-service-resource";
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .body( is( "test-service-response" ) )
 +        .when().get( serviceUrl );
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testNonGroupMember() throws ClassNotFoundException {
 +    LOG_ENTER();
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = clusterUrl + "/test-service-path/test-service-resource";
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_FORBIDDEN )
 +        .when().get( serviceUrl );
 +    LOG_EXIT();
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/WebHdfsHaFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/WebHdfsHaFuncTest.java
index a76dbe6,0000000..d7496b6
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/WebHdfsHaFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/WebHdfsHaFuncTest.java
@@@ -1,466 -1,0 +1,466 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + * <p/>
 + * http://www.apache.org/licenses/LICENSE-2.0
 + * <p/>
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.hadoop.test.category.ReleaseTest;
 +import org.apache.hadoop.test.mock.MockServer;
 +import org.apache.http.HttpStatus;
 +import org.junit.After;
 +import org.junit.Before;
 +import org.junit.Ignore;
 +import org.junit.Test;
 +import org.junit.experimental.categories.Category;
 +
 +import java.io.IOException;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.is;
 +
 +@Category(ReleaseTest.class)
 +public class WebHdfsHaFuncTest {
 +
 +   // Specifies if the test requests should go through the gateway or directly to the services.
 +   // This is frequently used to verify the behavior of the test both with and without the gateway.
 +   private static final boolean USE_GATEWAY = true;
 +
 +   // Specifies if the test requests should be sent to mock services or the real services.
 +   // This is frequently used to verify the behavior of the test both with and without mock services.
 +   private static final boolean USE_MOCK_SERVICES = true;
 +
 +   private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +   private static MockServer masterServer;
 +
 +   private static MockServer standbyServer;
 +
 +   /**
 +    * Creates a deployment of a gateway instance that all test methods will share.  This method also creates a
 +    * registry of sorts for all of the services that will be used by the test methods.
 +    * The createTopology method is used to create the topology file that would normally be read from disk.
 +    * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs.
 +    * <p/>
 +    * This would normally be done once for this suite but the failure tests start affecting each other depending
 +    * on the state the last 'active' url
 +    *
 +    * @throws Exception Thrown if any failure occurs.
 +    */
 +   @Before
 +   public void setup() throws Exception {
 +      LOG_ENTER();
 +      //Log.setLog(new NoOpLogger());
 +      masterServer = new MockServer("master", true);
 +      standbyServer = new MockServer("standby", true);
 +      GatewayTestConfig config = new GatewayTestConfig();
 +      config.setGatewayPath("gateway");
 +      driver.setResourceBase(WebHdfsHaFuncTest.class);
 +      driver.setupLdap(0);
 +      driver.setupService("WEBHDFS", "http://vm.local:50070/webhdfs", "/cluster/webhdfs", USE_MOCK_SERVICES);
 +      driver.setupGateway(config, "cluster", createTopology(), USE_GATEWAY);
 +      LOG_EXIT();
 +   }
 +
 +   @After
 +   public void cleanup() throws Exception {
 +      LOG_ENTER();
 +      driver.cleanup();
 +      driver.reset();
 +      masterServer.reset();
 +      standbyServer.reset();
 +      LOG_EXIT();
 +   }
 +
 +   /**
 +    * Creates a topology that is deployed to the gateway instance for the test suite.
 +    * Note that this topology is shared by all of the test methods in this suite.
 +    *
 +    * @return A populated XML structure for a topology file.
 +    */
 +   private static XMLTag createTopology() {
 +      XMLTag xml = XMLDoc.newDocument(true)
 +            .addRoot("topology")
 +            .addTag("gateway")
 +            .addTag("provider")
 +            .addTag("role").addText("webappsec")
 +            .addTag("name").addText("WebAppSec")
 +            .addTag("enabled").addText("true")
 +            .addTag("param")
 +            .addTag("name").addText("csrf.enabled")
 +            .addTag("value").addText("true").gotoParent().gotoParent()
 +            .addTag("provider")
 +            .addTag("role").addText("authentication")
 +            .addTag("name").addText("ShiroProvider")
 +            .addTag("enabled").addText("true")
 +            .addTag("param")
 +            .addTag("name").addText("main.ldapRealm")
 +            .addTag("value").addText("org.apache.knox.gateway.shirorealm.KnoxLdapRealm").gotoParent()
 +            .addTag("param")
 +            .addTag("name").addText("main.ldapRealm.userDnTemplate")
 +            .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
 +            .addTag("param")
 +            .addTag("name").addText("main.ldapRealm.contextFactory.url")
 +            .addTag("value").addText(driver.getLdapUrl()).gotoParent()
 +            .addTag("param")
 +            .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
 +            .addTag("value").addText("simple").gotoParent()
 +            .addTag("param")
 +            .addTag("name").addText("urls./**")
 +            .addTag("value").addText("authcBasic").gotoParent().gotoParent()
 +            .addTag("provider")
 +            .addTag("role").addText("identity-assertion")
 +            .addTag("enabled").addText("true")
 +            .addTag("name").addText("Default").gotoParent()
 +            .addTag("provider")
 +            .addTag("role").addText("authorization")
 +            .addTag("enabled").addText("true")
 +            .addTag("name").addText("AclsAuthz").gotoParent()
 +            .addTag("param")
 +            .addTag("name").addText("webhdfs-acl")
 +            .addTag("value").addText("hdfs;*;*").gotoParent()
 +            .addTag("provider")
 +            .addTag("role").addText("ha")
 +            .addTag("enabled").addText("true")
 +            .addTag("name").addText("HaProvider")
 +            .addTag("param")
 +            .addTag("name").addText("WEBHDFS")
 +            .addTag("value").addText("maxFailoverAttempts=3;failoverSleep=15;maxRetryAttempts=3;retrySleep=10;enabled=true").gotoParent()
 +            .gotoRoot()
 +            .addTag("service")
 +            .addTag("role").addText("WEBHDFS")
 +            .addTag("url").addText("http://localhost:" + masterServer.getPort() + "/webhdfs")
 +            .addTag("url").addText("http://localhost:" + standbyServer.getPort() + "/webhdfs").gotoParent()
 +            .gotoRoot();
 +//     System.out.println( "GATEWAY=" + xml.toString() );
 +      return xml;
 +   }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +   public void testBasicListOperation() throws IOException {
 +      LOG_ENTER();
 +      String username = "hdfs";
 +      String password = "hdfs-password";
 +      masterServer.expect()
 +            .method("GET")
 +            .pathInfo("/webhdfs/v1/")
 +            .queryParam("op", "LISTSTATUS")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_OK)
 +            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
 +            .contentType("application/json");
 +      given()
 +            .auth().preemptive().basic(username, password)
 +            .header("X-XSRF-Header", "jksdhfkhdsf")
 +            .queryParam("op", "LISTSTATUS")
 +            .expect()
 +            .log().ifError()
 +            .statusCode(HttpStatus.SC_OK)
 +            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
 +            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
 +      masterServer.isEmpty();
 +      LOG_EXIT();
 +   }
 +
 +   @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +   @Ignore( "KNOX-446" )
 +   public void testFailoverListOperation() throws Exception {
 +      LOG_ENTER();
 +      String username = "hdfs";
 +      String password = "hdfs-password";
 +      //Shutdown master and expect standby to serve the list response
 +      masterServer.stop();
 +      standbyServer.expect()
 +            .method("GET")
 +            .pathInfo("/webhdfs/v1/")
 +            .queryParam("op", "LISTSTATUS")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_OK)
 +            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
 +            .contentType("application/json");
 +      given()
 +            .auth().preemptive().basic(username, password)
 +            .header("X-XSRF-Header", "jksdhfkhdsf")
 +            .queryParam("op", "LISTSTATUS")
 +            .expect()
 +            .log().ifError()
 +            .statusCode(HttpStatus.SC_OK)
 +            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
 +            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
 +      standbyServer.isEmpty();
 +      masterServer.start();
 +      LOG_EXIT();
 +   }
 +
 +   @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +   public void testFailoverLimit() throws Exception {
 +      LOG_ENTER();
 +      String username = "hdfs";
 +      String password = "hdfs-password";
 +      //Shutdown master and expect standby to serve the list response
 +      masterServer.stop();
 +      standbyServer.stop();
 +      given()
 +            .auth().preemptive().basic(username, password)
 +            .header("X-XSRF-Header", "jksdhfkhdsf")
 +            .queryParam("op", "LISTSTATUS")
 +            .expect()
 +//            .log().ifError()
 +            .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR)
 +            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
 +      standbyServer.start();
 +      masterServer.start();
 +      LOG_EXIT();
 +   }
 +
 +
 +   @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +   @Ignore( "KNOX-446" )
 +   public void testServerInStandby() throws IOException {
 +      LOG_ENTER();
 +      String username = "hdfs";
 +      String password = "hdfs-password";
 +      //make master the server that is in standby
 +      masterServer.expect()
 +            .method("GET")
 +            .pathInfo("/webhdfs/v1/")
 +            .queryParam("op", "LISTSTATUS")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_FORBIDDEN)
 +            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
 +            .contentType("application/json");
 +      //standby server is 'active' in this test case and serves the list response
 +      standbyServer.expect()
 +            .method("GET")
 +            .pathInfo("/webhdfs/v1/")
 +            .queryParam("op", "LISTSTATUS")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_OK)
 +            .content(driver.getResourceBytes("webhdfs-liststatus-success.json"))
 +            .contentType("application/json");
 +      given()
 +            .auth().preemptive().basic(username, password)
 +            .header("X-XSRF-Header", "jksdhfkhdsf")
 +            .queryParam("op", "LISTSTATUS")
 +            .expect()
 +            .log().ifError()
 +            .statusCode(HttpStatus.SC_OK)
 +            .content("FileStatuses.FileStatus[0].pathSuffix", is("app-logs"))
 +            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
 +      masterServer.isEmpty();
 +      standbyServer.isEmpty();
 +      LOG_EXIT();
 +   }
 +
 +   @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +   public void testServerInStandbyFailoverLimit() throws IOException {
 +      LOG_ENTER();
 +      String username = "hdfs";
 +      String password = "hdfs-password";
 +      //make master the server that is in standby
 +      masterServer.expect()
 +            .method("GET")
 +            .pathInfo("/webhdfs/v1/")
 +            .queryParam("op", "LISTSTATUS")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_FORBIDDEN)
 +            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
 +            .contentType("application/json");
 +      standbyServer.expect()
 +            .method("GET")
 +            .pathInfo("/webhdfs/v1/")
 +            .queryParam("op", "LISTSTATUS")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_FORBIDDEN)
 +            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
 +            .contentType("application/json");
 +      masterServer.expect()
 +            .method("GET")
 +            .pathInfo("/webhdfs/v1/")
 +            .queryParam("op", "LISTSTATUS")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_FORBIDDEN)
 +            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
 +            .contentType("application/json");
 +      standbyServer.expect()
 +            .method("GET")
 +            .pathInfo("/webhdfs/v1/")
 +            .queryParam("op", "LISTSTATUS")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_FORBIDDEN)
 +            .content(driver.getResourceBytes("webhdfs-liststatus-standby.json"))
 +            .contentType("application/json");
 +      given()
 +            .auth().preemptive().basic(username, password)
 +            .header("X-XSRF-Header", "jksdhfkhdsf")
 +            .queryParam("op", "LISTSTATUS")
 +            .expect()
 +//            .log().ifError()
 +            .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR)
 +            .when().get(driver.getUrl("WEBHDFS") + "/v1/");
 +      masterServer.isEmpty();
 +      standbyServer.isEmpty();
 +      LOG_EXIT();
 +   }
 +
 +   @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +   public void testServerInSafeMode() throws IOException {
 +      LOG_ENTER();
 +      String username = "hdfs";
 +      String password = "hdfs-password";
 +      //master is in safe mode
 +      masterServer.expect()
 +            .method("POST")
 +            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
 +            .queryParam("op", "RENAME")
 +            .queryParam("destination", "/user/hdfs/foo.txt")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_FORBIDDEN)
 +            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
 +            .contentType("application/json");
 +      masterServer.expect()
 +            .method("POST")
 +            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
 +            .queryParam("op", "RENAME")
 +            .queryParam("destination", "/user/hdfs/foo.txt")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_OK)
 +            .content(driver.getResourceBytes("webhdfs-rename-safemode-off.json"))
 +            .contentType("application/json");
 +      given()
 +            .auth().preemptive().basic(username, password)
 +            .header("X-XSRF-Header", "jksdhfkhdsf")
 +            .queryParam("op", "RENAME")
 +            .queryParam("destination", "/user/hdfs/foo.txt")
 +            .expect()
 +            .log().ifError()
 +            .statusCode(HttpStatus.SC_OK)
 +            .content("boolean", is(true))
 +            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/foo.txt");
 +      masterServer.isEmpty();
 +      LOG_EXIT();
 +   }
 +
 +   @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +   public void testServerInSafeModeRetriableException() throws IOException {
 +      LOG_ENTER();
 +      String username = "hdfs";
 +      String password = "hdfs-password";
 +      //master is in safe mode
 +      masterServer.expect()
 +            .method("POST")
 +            .pathInfo("/webhdfs/v1/user/hdfs/new")
 +            .queryParam("op", "MKDIRS")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_FORBIDDEN)
 +            .content(driver.getResourceBytes("webhdfs-mkdirs-safemode.json"))
 +            .contentType("application/json");
 +      masterServer.expect()
 +            .method("POST")
 +            .pathInfo("/webhdfs/v1/user/hdfs/new")
 +            .queryParam("op", "MKDIRS")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_OK)
 +            .content(driver.getResourceBytes("webhdfs-rename-safemode-off.json"))
 +            .contentType("application/json");
 +      given()
 +            .auth().preemptive().basic(username, password)
 +            .header("X-XSRF-Header", "jksdhfkhdsf")
 +            .queryParam("op", "MKDIRS")
 +            .expect()
 +            .log().ifError()
 +            .statusCode(HttpStatus.SC_OK)
 +            .content("boolean", is(true))
 +            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/new");
 +      masterServer.isEmpty();
 +      LOG_EXIT();
 +   }
 +
 +   @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +   public void testServerInSafeModeRetryLimit() throws IOException {
 +      LOG_ENTER();
 +      String username = "hdfs";
 +      String password = "hdfs-password";
 +      //master is in safe mode
 +      masterServer.expect()
 +            .method("POST")
 +            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
 +            .queryParam("op", "RENAME")
 +            .queryParam("destination", "/user/hdfs/foo.txt")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_FORBIDDEN)
 +            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
 +            .contentType("application/json");
 +      masterServer.expect()
 +            .method("POST")
 +            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
 +            .queryParam("op", "RENAME")
 +            .queryParam("destination", "/user/hdfs/foo.txt")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_FORBIDDEN)
 +            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
 +            .contentType("application/json");
 +      masterServer.expect()
 +            .method("POST")
 +            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
 +            .queryParam("op", "RENAME")
 +            .queryParam("destination", "/user/hdfs/foo.txt")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_FORBIDDEN)
 +            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
 +            .contentType("application/json");
 +      masterServer.expect()
 +            .method("POST")
 +            .pathInfo("/webhdfs/v1/user/hdfs/foo.txt")
 +            .queryParam("op", "RENAME")
 +            .queryParam("destination", "/user/hdfs/foo.txt")
 +            .queryParam("user.name", username)
 +            .respond()
 +            .status(HttpStatus.SC_FORBIDDEN)
 +            .content(driver.getResourceBytes("webhdfs-rename-safemode.json"))
 +            .contentType("application/json");
 +      given()
 +            .auth().preemptive().basic(username, password)
 +            .header("X-XSRF-Header", "jksdhfkhdsf")
 +            .queryParam("op", "RENAME")
 +            .queryParam("destination", "/user/hdfs/foo.txt")
 +            .expect()
 +//            .log().ifError()
 +            .statusCode(HttpStatus.SC_INTERNAL_SERVER_ERROR)
 +            .when().post(driver.getUrl("WEBHDFS") + "/v1/user/hdfs/foo.txt");
 +      masterServer.isEmpty();
 +      LOG_EXIT();
 +   }
 +}


[11/11] knox git commit: KNOX-998 - Merging from master

Posted by mo...@apache.org.
KNOX-998 - Merging from master


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/f4a4355d
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/f4a4355d
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/f4a4355d

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: f4a4355d430ae6152eaefbfd3a118cfd65b00f07
Parents: 2e37584
Author: Sandeep More <mo...@apache.org>
Authored: Wed Sep 13 09:48:06 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Wed Sep 13 09:48:06 2017 -0400

----------------------------------------------------------------------
 .../org/apache/knox/gateway/provider/federation/JWTTokenTest.java    | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/f4a4355d/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/JWTTokenTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/JWTTokenTest.java b/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/JWTTokenTest.java
deleted file mode 100644
index e69de29..0000000


[07/11] knox git commit: KNOX-998 - Merging from master

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewayBasicFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayBasicFuncTest.java
index 1861148,0000000..0fbd6db
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayBasicFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayBasicFuncTest.java
@@@ -1,4509 -1,0 +1,4515 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import java.io.ByteArrayOutputStream;
 +import java.io.File;
 +import java.io.FileFilter;
 +import java.io.FileNotFoundException;
 +import java.io.IOException;
 +import java.io.PrintStream;
 +import java.io.StringWriter;
 +import java.net.InetAddress;
 +import java.net.InetSocketAddress;
 +import java.net.URI;
 +import java.net.URISyntaxException;
 +import java.net.URL;
 +import java.nio.charset.Charset;
 +import java.util.HashMap;
 +import java.util.Map;
 +import java.util.Map.Entry;
 +import javax.ws.rs.core.MediaType;
 +
- import com.jayway.restassured.http.ContentType;
- import com.jayway.restassured.path.json.JsonPath;
- import com.jayway.restassured.response.Cookie;
- import com.jayway.restassured.response.Header;
- import com.jayway.restassured.response.Response;
- import com.jayway.restassured.specification.ResponseSpecification;
++import io.restassured.RestAssured;
++import io.restassured.http.ContentType;
++import io.restassured.http.Cookie;
++import io.restassured.http.Header;
++import io.restassured.path.json.JsonPath;
++import io.restassured.response.Response;
++import io.restassured.specification.ResponseSpecification;
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +import org.apache.commons.io.filefilter.WildcardFileFilter;
 +import org.apache.commons.lang3.ArrayUtils;
 +import org.apache.knox.gateway.util.KnoxCLI;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.hadoop.test.category.MediumTests;
 +import org.apache.hadoop.test.category.VerifyTest;
 +import org.apache.hadoop.test.mock.MockRequestMatcher;
 +import org.apache.http.HttpHost;
 +import org.apache.http.HttpResponse;
 +import org.apache.http.HttpStatus;
 +import org.apache.http.auth.AuthScope;
 +import org.apache.http.auth.UsernamePasswordCredentials;
 +import org.apache.http.client.AuthCache;
 +import org.apache.http.client.CredentialsProvider;
 +import org.apache.http.client.methods.HttpGet;
 +import org.apache.http.client.methods.HttpPost;
 +import org.apache.http.client.protocol.HttpClientContext;
 +import org.apache.http.entity.StringEntity;
 +import org.apache.http.impl.auth.BasicScheme;
 +import org.apache.http.impl.client.BasicAuthCache;
 +import org.apache.http.impl.client.BasicCredentialsProvider;
 +import org.apache.http.impl.client.CloseableHttpClient;
 +import org.apache.http.impl.client.HttpClientBuilder;
 +import org.apache.http.util.EntityUtils;
 +import org.apache.velocity.Template;
 +import org.apache.velocity.VelocityContext;
 +import org.apache.velocity.app.VelocityEngine;
 +import org.apache.velocity.runtime.RuntimeConstants;
 +import org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader;
 +import org.hamcrest.CoreMatchers;
 +import org.hamcrest.Matcher;
 +import org.hamcrest.MatcherAssert;
 +import org.hamcrest.Matchers;
 +import org.junit.After;
 +import org.junit.AfterClass;
 +import org.junit.Assert;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.junit.experimental.categories.Category;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.*;
 +import static org.hamcrest.Matchers.containsString;
 +import static org.hamcrest.Matchers.greaterThan;
 +import static org.hamcrest.text.IsEmptyString.isEmptyString;
 +import static org.junit.Assert.assertThat;
 +import static org.junit.Assert.assertTrue;
 +import static org.xmlmatchers.XmlMatchers.isEquivalentTo;
 +import static org.xmlmatchers.transform.XmlConverters.the;
 +import static uk.co.datumedge.hamcrest.json.SameJSONAs.sameJSONAs;
 +
 +@Category( { VerifyTest.class, MediumTests.class } )
 +public class GatewayBasicFuncTest {
 +
 +  private static final Charset UTF8 = Charset.forName("UTF-8");
 +
 +  // Uncomment to cause the test to hang after the gateway instance is setup.
 +  // This will allow the gateway instance to be hit directly via some external client.
 +//  @Test
 +//  public void hang() throws IOException {
 +//    System.out.println( "Server on port " + driver.gateway.getAddresses()[0].getPort() );
 +//    System.out.println();
 +//    System.in.read();
 +//  }
 +
 +  private static Logger log = LoggerFactory.getLogger( GatewayBasicFuncTest.class );
 +
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  // Controls the host name to which the gateway dispatch requests.  This may be the name of a sandbox VM
 +  // or an EC2 instance.  Currently only a single host is supported.
 +  private static final String TEST_HOST = "vm.local";
 +
 +  // Specifies if the test requests should go through the gateway or directly to the services.
 +  // This is frequently used to verify the behavior of the test both with and without the gateway.
 +  private static final boolean USE_GATEWAY = true;
 +
 +  // Specifies if the test requests should be sent to mock services or the real services.
 +  // This is frequently used to verify the behavior of the test both with and without mock services.
 +  private static final boolean USE_MOCK_SERVICES = true;
 +
 +  // Specifies if the GATEWAY_HOME created for the test should be deleted when the test suite is complete.
 +  // This is frequently used during debugging to keep the GATEWAY_HOME around for inspection.
 +  private static final boolean CLEANUP_TEST = true;
 +
 +//  private static final boolean USE_GATEWAY = false;
 +//  private static final boolean USE_MOCK_SERVICES = false;
 +//  private static final boolean CLEANUP_TEST = false;
 +
 +  /**
 +   * Creates a deployment of a gateway instance that all test methods will share.  This method also creates a
 +   * registry of sorts for all of the services that will be used by the test methods.
 +   * The createTopology method is used to create the topology file that would normally be read from disk.
 +   * The driver.setupGateway invocation is where the creation of GATEWAY_HOME occurs.
 +   * @throws Exception Thrown if any failure occurs.
 +   */
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    //Log.setLog( new NoOpLogger() );
 +    LOG_ENTER();
 +    GatewayTestConfig config = new GatewayTestConfig();
 +    driver.setResourceBase(GatewayBasicFuncTest.class);
 +    driver.setupLdap(0);
 +    driver.setupService("WEBHDFS", "http://" + TEST_HOST + ":50070/webhdfs", "/cluster/webhdfs", USE_MOCK_SERVICES);
 +    driver.setupService( "DATANODE", "http://" + TEST_HOST + ":50075/webhdfs", "/cluster/webhdfs/data", USE_MOCK_SERVICES );
 +    driver.setupService( "WEBHCAT", "http://" + TEST_HOST + ":50111/templeton", "/cluster/templeton", USE_MOCK_SERVICES );
 +    driver.setupService( "OOZIE", "http://" + TEST_HOST + ":11000/oozie", "/cluster/oozie", USE_MOCK_SERVICES );
 +    driver.setupService( "HIVE", "http://" + TEST_HOST + ":10000", "/cluster/hive", USE_MOCK_SERVICES );
 +    driver.setupService( "WEBHBASE", "http://" + TEST_HOST + ":60080", "/cluster/hbase", USE_MOCK_SERVICES );
 +    driver.setupService( "NAMENODE", "hdfs://" + TEST_HOST + ":8020", null, USE_MOCK_SERVICES );
 +    driver.setupService( "JOBTRACKER", "thrift://" + TEST_HOST + ":8021", null, USE_MOCK_SERVICES );
 +    driver.setupService( "RESOURCEMANAGER", "http://" + TEST_HOST + ":8088/ws", "/cluster/resourcemanager", USE_MOCK_SERVICES );
 +    driver.setupService( "FALCON", "http://" + TEST_HOST + ":15000", "/cluster/falcon", USE_MOCK_SERVICES );
 +    driver.setupService( "STORM", "http://" + TEST_HOST + ":8477", "/cluster/storm", USE_MOCK_SERVICES );
 +    driver.setupService( "STORM-LOGVIEWER", "http://" + TEST_HOST + ":8477", "/cluster/storm", USE_MOCK_SERVICES );
 +    driver.setupService( "SOLR", "http://" + TEST_HOST + ":8983", "/cluster/solr", USE_MOCK_SERVICES );
 +    driver.setupService( "KAFKA", "http://" + TEST_HOST + ":8477", "/cluster/kafka", USE_MOCK_SERVICES );
 +    driver.setupGateway( config, "cluster", createTopology(), USE_GATEWAY );
 +    LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    LOG_ENTER();
 +    if( CLEANUP_TEST ) {
 +      driver.cleanup();
 +    }
 +    LOG_EXIT();
 +  }
 +
 +  @After
 +  public void cleanupTest() {
 +    driver.reset();
 +  }
 +
 +  /**
 +   * Creates a topology that is deployed to the gateway instance for the test suite.
 +   * Note that this topology is shared by all of the test methods in this suite.
 +   * @return A populated XML structure for a topology file.
 +   */
 +  private static XMLTag createTopology() {
 +    XMLTag xml = XMLDoc.newDocument( true )
 +        .addRoot( "topology" )
 +          .addTag( "gateway" )
 +            .addTag( "provider" )
 +              .addTag( "role" ).addText( "webappsec" )
 +              .addTag("name").addText("WebAppSec")
 +              .addTag("enabled").addText("true")
 +              .addTag( "param" )
 +                .addTag("name").addText("csrf.enabled")
 +                .addTag("value").addText("true").gotoParent().gotoParent()
 +            .addTag("provider")
 +              .addTag("role").addText("authentication")
 +              .addTag("name").addText("ShiroProvider")
 +              .addTag("enabled").addText("true")
 +              .addTag( "param" )
 +                .addTag("name").addText("main.ldapRealm")
 +                .addTag("value").addText("org.apache.knox.gateway.shirorealm.KnoxLdapRealm").gotoParent()
 +              .addTag( "param" )
 +                .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
 +                .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
 +              .addTag( "param" )
 +                .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
 +                .addTag( "value" ).addText( driver.getLdapUrl() ).gotoParent()
 +              .addTag( "param" )
 +                .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
 +                .addTag( "value" ).addText( "simple" ).gotoParent()
 +              .addTag( "param" )
 +                .addTag( "name" ).addText( "urls./**" )
 +                .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
 +            .addTag("provider")
 +              .addTag("role").addText("identity-assertion")
 +              .addTag("enabled").addText("true")
 +              .addTag("name").addText("Default").gotoParent()
 +            .addTag("provider")
 +              .addTag( "role" ).addText( "authorization" )
 +              .addTag( "enabled" ).addText( "true" )
 +              .addTag("name").addText("AclsAuthz").gotoParent()
 +              .addTag("param")
 +                .addTag("name").addText( "webhdfs-acl" )
 +                .addTag("value").addText( "hdfs;*;*" ).gotoParent()
 +          .gotoRoot()
 +          .addTag("service")
 +            .addTag("role").addText("WEBHDFS")
 +            .addTag("url").addText(driver.getRealUrl("WEBHDFS")).gotoParent()
 +          .addTag( "service" )
 +            .addTag( "role" ).addText( "NAMENODE" )
 +            .addTag( "url" ).addText( driver.getRealUrl( "NAMENODE" ) ).gotoParent()
 +          .addTag( "service" )
 +            .addTag( "role" ).addText( "DATANODE" )
 +            .addTag( "url" ).addText( driver.getRealUrl( "DATANODE" ) ).gotoParent()
 +          .addTag( "service" )
 +            .addTag( "role" ).addText( "JOBTRACKER" )
 +            .addTag( "url" ).addText( driver.getRealUrl( "JOBTRACKER" ) ).gotoParent()
 +          .addTag( "service" )
 +            .addTag( "role" ).addText( "WEBHCAT" )
 +            .addTag( "url" ).addText( driver.getRealUrl( "WEBHCAT" ) ).gotoParent()
 +          .addTag( "service" )
 +            .addTag( "role" ).addText( "OOZIE" )
 +            .addTag( "url" ).addText( driver.getRealUrl( "OOZIE" ) ).gotoParent()
 +          .addTag( "service" )
 +            .addTag( "role" ).addText( "HIVE" )
 +            .addTag( "url" ).addText( driver.getRealUrl( "HIVE" ) ).gotoParent()
 +          .addTag( "service" )
 +            .addTag( "role" ).addText( "WEBHBASE" )
 +            .addTag( "url" ).addText( driver.getRealUrl( "WEBHBASE" ) ).gotoParent()
 +        .addTag("service")
 +            .addTag("role").addText("RESOURCEMANAGER")
 +            .addTag("url").addText(driver.getRealUrl("RESOURCEMANAGER")).gotoParent()
 +        .addTag("service")
 +            .addTag("role").addText("FALCON")
 +            .addTag("url").addText(driver.getRealUrl("FALCON")).gotoParent()
 +        .addTag("service")
 +            .addTag("role").addText("STORM")
 +            .addTag("url").addText(driver.getRealUrl("STORM")).gotoParent()
 +        .addTag("service")
 +            .addTag("role").addText("STORM-LOGVIEWER")
 +            .addTag("url").addText(driver.getRealUrl("STORM-LOGVIEWER")).gotoParent()
 +        .addTag("service")
 +            .addTag("role").addText("SOLR")
 +            .addTag("url").addText(driver.getRealUrl("SOLR")).gotoParent()
 +        .addTag("service")
 +            .addTag("role").addText("KAFKA")
 +            .addTag("url").addText(driver.getRealUrl("KAFKA")).gotoParent()
 +        .addTag("service")
 +        .addTag("role").addText("SERVICE-TEST")
 +        .gotoRoot();
 +//     System.out.println( "GATEWAY=" + xml.toString() );
 +    return xml;
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testBasicJsonUseCase() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testBasicJsonUseCase";
 +    String username = "hdfs";
 +    String password = "hdfs-password";
 +    /* Create a directory.
 +    curl -i -X PUT "http://<HOST>:<PORT>/<PATH>?op=MKDIRS[&permission=<OCTAL>]"
 +
 +    The client receives a respond with a boolean JSON object:
 +    HTTP/1.1 HttpStatus.SC_OK OK
 +    Content-Type: application/json
 +    Transfer-Encoding: chunked
 +
 +    {"boolean": true}
 +    */
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "PUT" )
 +        .pathInfo( "/v1" + root + "/dir" )
 +        .queryParam( "op", "MKDIRS" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "webhdfs-success.json" ) )
 +        .contentType( "application/json" );
 +    Cookie cookie = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "MKDIRS" )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
-         .header( "Set-Cookie", containsString( "JSESSIONID" ) )
-         .header( "Set-Cookie", containsString( "HttpOnly" ) )
 +        .contentType( "application/json" )
 +        .content( "boolean", is( true ) )
 +        .when().put( driver.getUrl( "WEBHDFS" ) + "/v1" + root + "/dir" ).getDetailedCookie( "JSESSIONID" );
 +    assertThat( cookie.isSecured(), is( true ) );
++    assertThat( cookie.isHttpOnly(), is( true ) );
 +    assertThat( cookie.getPath(), is( "/gateway/cluster" ) );
 +    assertThat( cookie.getValue().length(), greaterThan( 16 ) );
 +    driver.assertComplete();
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testBasicOutboundHeaderUseCase() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testBasicOutboundHeaderUseCase";
 +    String username = "hdfs";
 +    String password = "hdfs-password";
 +    InetSocketAddress gatewayAddress = driver.gateway.getAddresses()[0];
 +    String gatewayHostName = gatewayAddress.getHostName();
 +    String gatewayAddrName = InetAddress.getByName(gatewayHostName).getHostAddress();
 +
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "PUT" )
 +        .pathInfo( "/v1" + root + "/dir/file" )
 +        .header( "Host", driver.getRealAddr( "WEBHDFS" ) )
 +        .queryParam( "op", "CREATE" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_TEMPORARY_REDIRECT )
 +        .header("Location", driver.getRealUrl("DATANODE") + "/v1" + root + "/dir/file?op=CREATE&user.name=hdfs");
 +    Response response = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "CREATE" )
 +        .expect()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_TEMPORARY_REDIRECT )
 +        .when().put( driver.getUrl("WEBHDFS") + "/v1" + root + "/dir/file" );
 +    String location = response.getHeader( "Location" );
 +    //System.out.println( location );
 +    log.debug( "Redirect location: " + response.getHeader( "Location" ) );
 +    if( driver.isUseGateway() ) {
 +      MatcherAssert.assertThat( location, anyOf(
 +          startsWith( "http://" + gatewayHostName + ":" + gatewayAddress.getPort() + "/" ),
 +          startsWith( "http://" + gatewayAddrName + ":" + gatewayAddress.getPort() + "/" ) ) );
 +      MatcherAssert.assertThat( location, containsString( "?_=" ) );
 +    }
 +    MatcherAssert.assertThat(location, not(containsString("host=")));
 +    MatcherAssert.assertThat(location, not(containsString("port=")));
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testBasicOutboundEncodedHeaderUseCase() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testBasicOutboundHeaderUseCase";
 +    String username = "hdfs";
 +    String password = "hdfs-password";
 +    InetSocketAddress gatewayAddress = driver.gateway.getAddresses()[0];
 +    String gatewayHostName = gatewayAddress.getHostName();
 +    String gatewayAddrName = InetAddress.getByName(gatewayHostName).getHostAddress();
 +
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "PUT" )
 +        .pathInfo( "/v1" + root + "/dir/fileレポー" )
 +        .header( "Host", driver.getRealAddr( "WEBHDFS" ) )
 +        .queryParam( "op", "CREATE" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_TEMPORARY_REDIRECT )
 +        .header("Location", driver.getRealUrl("DATANODE") + "/v1" + root + "/dir/file%E3%83%AC%E3%83%9D%E3%83%BC?op=CREATE&user.name=hdfs");
 +    Response response = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "CREATE" )
 +        .expect()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_TEMPORARY_REDIRECT )
 +        .when().put( driver.getUrl("WEBHDFS") + "/v1" + root + "/dir/fileレポー" );
 +//        .when().put( driver.getUrl("WEBHDFS") + "/v1" + root + "/dir/file%E3%83%AC%E3%83%9D%E3%83%BC" );
 +    String location = response.getHeader( "Location" );
 +    //System.out.println( location );
 +    log.debug( "Redirect location: " + response.getHeader( "Location" ) );
 +    if( driver.isUseGateway() ) {
 +      MatcherAssert.assertThat( location, containsString("/dir/file%E3%83%AC%E3%83%9D%E3%83%BC") );
 +    }
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHdfsTildeUseCase() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testHdfsTildeUseCase";
 +    String username = "hdfs";
 +    String password = "hdfs-password";
 +    InetSocketAddress gatewayAddress = driver.gateway.getAddresses()[0];
 +
 +    // Attempt to delete the test directory in case a previous run failed.
 +    // Ignore any result.
 +    // Cleanup anything that might have been leftover because the test failed previously.
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "DELETE" )
 +        .from( "testHdfsTildeUseCase" )
 +        .pathInfo( "/v1/user/hdfs" + root )
 +        .queryParam( "op", "DELETE" )
 +        .queryParam( "user.name", username )
 +        .queryParam( "recursive", "true" )
 +        .respond()
 +        .status( HttpStatus.SC_OK );
-     given()
-         //.log().all()
-         .auth().preemptive().basic( username, password )
-         .header("X-XSRF-Header", "jksdhfkhdsf")
-         .queryParam( "op", "DELETE" )
-         .queryParam( "recursive", "true" )
-         .expect()
-         //.log().all()
-         .statusCode( HttpStatus.SC_OK )
-         .when().delete( driver.getUrl( "WEBHDFS" ) + "/v1/~" + root + ( driver.isUseGateway() ? "" : "?user.name=" + username ) );
-     driver.assertComplete();
 +
-     driver.getMock( "WEBHDFS" )
-         .expect()
-         .method( "PUT" )
-         .pathInfo( "/v1/user/hdfs/dir" )
-         .queryParam( "op", "MKDIRS" )
-         .queryParam( "user.name", username )
-         .respond()
-         .status( HttpStatus.SC_OK )
-         .content( driver.getResourceBytes( "webhdfs-success.json" ) )
-         .contentType("application/json");
-     given()
-         //.log().all()
-         .auth().preemptive().basic( username, password )
-         .header("X-XSRF-Header", "jksdhfkhdsf")
-         .queryParam( "op", "MKDIRS" )
-         .expect()
-         //.log().all();
-         .statusCode( HttpStatus.SC_OK )
-         .contentType( "application/json" )
-         .content( "boolean", is( true ) )
-         .when().put( driver.getUrl( "WEBHDFS" ) + "/v1/~/dir" );
-     driver.assertComplete();
++    try {
++      // Need to turn off URL encoding here or otherwise the tilde gets encoded and the rewrite rules fail
++      RestAssured.urlEncodingEnabled = false;
++      given()
++          //.log().all()
++          .auth().preemptive().basic( username, password )
++          .header("X-XSRF-Header", "jksdhfkhdsf")
++          .queryParam( "op", "DELETE" )
++          .queryParam( "recursive", "true" )
++          .expect()
++          //.log().all()
++          .statusCode( HttpStatus.SC_OK )
++          .when().delete( driver.getUrl( "WEBHDFS" ) + "/v1/~" + root + ( driver.isUseGateway() ? "" : "?user.name=" + username ) );
++      driver.assertComplete();
++
++      driver.getMock( "WEBHDFS" )
++          .expect()
++          .method( "PUT" )
++          .pathInfo( "/v1/user/hdfs/dir" )
++          .queryParam( "op", "MKDIRS" )
++          .queryParam( "user.name", username )
++          .respond()
++          .status( HttpStatus.SC_OK )
++          .content( driver.getResourceBytes( "webhdfs-success.json" ) )
++          .contentType("application/json");
++      given()
++          //.log().all()
++          .auth().preemptive().basic( username, password )
++          .header("X-XSRF-Header", "jksdhfkhdsf")
++          .queryParam( "op", "MKDIRS" )
++          .expect()
++          //.log().all();
++          .statusCode( HttpStatus.SC_OK )
++          .contentType( "application/json" )
++          .content( "boolean", is( true ) )
++          .when().put( driver.getUrl( "WEBHDFS" ) + "/v1/~/dir" );
++      driver.assertComplete();
++    } finally {
++      RestAssured.urlEncodingEnabled = true;
++    }
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testBasicHdfsUseCase() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testBasicHdfsUseCase";
 +    String username = "hdfs";
 +    String password = "hdfs-password";
 +    InetSocketAddress gatewayAddress = driver.gateway.getAddresses()[0];
 +    String gatewayHostName = gatewayAddress.getHostName();
 +    String gatewayAddrName = InetAddress.getByName( gatewayHostName ).getHostAddress();
 +
 +    // Attempt to delete the test directory in case a previous run failed.
 +    // Ignore any result.
 +    // Cleanup anything that might have been leftover because the test failed previously.
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "DELETE" )
 +        .from( "testBasicHdfsUseCase-1" )
 +        .pathInfo( "/v1" + root )
 +        .queryParam( "op", "DELETE" )
 +        .queryParam( "user.name", username )
 +        .queryParam( "recursive", "true" )
 +        .respond()
 +        .status( HttpStatus.SC_OK );
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "DELETE" )
 +        .queryParam( "recursive", "true" )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .when().delete( driver.getUrl( "WEBHDFS" ) + "/v1" + root + ( driver.isUseGateway() ? "" : "?user.name=" + username ) );
 +    driver.assertComplete();
 +
 +    /* Create a directory.
 +    curl -i -X PUT "http://<HOST>:<PORT>/<PATH>?op=MKDIRS[&permission=<OCTAL>]"
 +
 +    The client receives a respond with a boolean JSON object:
 +    HTTP/1.1 HttpStatus.SC_OK OK
 +    Content-Type: application/json
 +    Transfer-Encoding: chunked
 +
 +    {"boolean": true}
 +    */
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "PUT" )
 +        .pathInfo( "/v1" + root + "/dir" )
 +        .queryParam( "op", "MKDIRS" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "webhdfs-success.json" ) )
 +        .contentType( "application/json" );
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "MKDIRS" )
 +        .expect()
 +        //.log().all();
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "application/json" )
 +        .content( "boolean", is( true ) )
 +        .when().put( driver.getUrl( "WEBHDFS" ) + "/v1" + root + "/dir" );
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "GET" )
 +        .pathInfo( "/v1" + root )
 +        .queryParam( "op", "LISTSTATUS" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "webhdfs-liststatus-test.json" ) )
 +        .contentType( "application/json" );
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "LISTSTATUS" )
 +        .expect()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_OK )
 +        .content( "FileStatuses.FileStatus[0].pathSuffix", is( "dir" ) )
 +        .when().get( driver.getUrl( "WEBHDFS" ) + "/v1" + root );
 +    driver.assertComplete();
 +
 +    //NEGATIVE: Test a bad password.
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, "invalid-password" )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "LISTSTATUS" )
 +        .expect()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_UNAUTHORIZED )
 +        .when().get( driver.getUrl( "WEBHDFS" ) + "/v1" + root );
 +    driver.assertComplete();
 +
 +    //NEGATIVE: Test a bad user.
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( "hdfs-user", "hdfs-password" )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "LISTSTATUS" )
 +        .expect()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_UNAUTHORIZED )
 +        .when().get( driver.getUrl( "WEBHDFS" ) + "/v1" + root );
 +    driver.assertComplete();
 +
 +    //NEGATIVE: Test a valid but unauthorized user.
 +    given()
 +      //.log().all()
 +      .auth().preemptive().basic( "mapred-user", "mapred-password" )
 +      .header("X-XSRF-Header", "jksdhfkhdsf")
 +      .queryParam( "op", "LISTSTATUS" )
 +      .expect()
 +      //.log().ifError()
 +      .statusCode( HttpStatus.SC_UNAUTHORIZED )
 +      .when().get( driver.getUrl( "WEBHDFS" ) + "/v1" + root );
 +
 +    /* Add a file.
 +    curl -i -X PUT "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=CREATE
 +                       [&overwrite=<true|false>][&blocksize=<LONG>][&replication=<SHORT>]
 +                     [&permission=<OCTAL>][&buffersize=<INT>]"
 +
 +    The expect is redirected to a datanode where the file data is to be written:
 +    HTTP/1.1 307 TEMPORARY_REDIRECT
 +    Location: http://<DATANODE>:<PORT>/webhdfs/v1/<PATH>?op=CREATE...
 +    Content-Length: 0
 +
 +    Step 2: Submit another HTTP PUT expect using the URL in the Location header with the file data to be written.
 +    curl -i -X PUT -T <LOCAL_FILE> "http://<DATANODE>:<PORT>/webhdfs/v1/<PATH>?op=CREATE..."
 +
 +    The client receives a HttpStatus.SC_CREATED Created respond with zero content length and the WebHDFS URI of the file in the Location header:
 +    HTTP/1.1 HttpStatus.SC_CREATED Created
 +    Location: webhdfs://<HOST>:<PORT>/<PATH>
 +    Content-Length: 0
 +    */
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "PUT" )
 +        .pathInfo( "/v1" + root + "/dir/file" )
 +        .queryParam( "op", "CREATE" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_TEMPORARY_REDIRECT )
 +        .header( "Location", driver.getRealUrl( "DATANODE" ) + "/v1" + root + "/dir/file?op=CREATE&user.name=hdfs" );
 +    driver.getMock( "DATANODE" )
 +        .expect()
 +        .method( "PUT" )
 +        .pathInfo( "/v1" + root + "/dir/file" )
 +        .queryParam( "op", "CREATE" )
 +        .queryParam( "user.name", username )
 +        .contentType( "text/plain" )
 +        .content( driver.getResourceBytes( "test.txt" ) )
 +            //.content( driver.gerResourceBytes( "hadoop-examples.jar" ) )
 +        .respond()
 +        .status( HttpStatus.SC_CREATED )
 +        .header( "Location", "webhdfs://" + driver.getRealAddr( "DATANODE" ) + "/v1" + root + "/dir/file" );
 +    Response response = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "CREATE" )
 +        .expect()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_TEMPORARY_REDIRECT )
 +        .when().put( driver.getUrl("WEBHDFS") + "/v1" + root + "/dir/file" );
 +    String location = response.getHeader( "Location" );
 +    log.debug( "Redirect location: " + response.getHeader( "Location" ) );
 +    if( driver.isUseGateway() ) {
 +      MatcherAssert.assertThat( location, anyOf(
 +          startsWith( "http://" + gatewayHostName + ":" + gatewayAddress.getPort() + "/" ),
 +          startsWith( "http://" + gatewayAddrName + ":" + gatewayAddress.getPort() + "/" ) ) );
 +      MatcherAssert.assertThat( location, containsString( "?_=" ) );
 +    }
 +    MatcherAssert.assertThat( location, not( containsString( "host=" ) ) );
 +    MatcherAssert.assertThat( location, not( containsString( "port=" ) ) );
 +    response = given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "test.txt" ) )
 +        .contentType( "text/plain" )
 +        .expect()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_CREATED )
 +        .when().put( location );
 +    location = response.getHeader( "Location" );
 +    log.debug( "Created location: " + location );
 +    if( driver.isUseGateway() ) {
 +      MatcherAssert.assertThat( location, anyOf(
 +          startsWith( "http://" + gatewayHostName + ":" + gatewayAddress.getPort() + "/" ),
 +          startsWith( "http://" + gatewayAddrName + ":" + gatewayAddress.getPort() + "/" ) ) );
 +    }
 +    driver.assertComplete();
 +
 +    /* Get the file.
 +    curl -i -L "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=OPEN
 +                       [&offset=<LONG>][&length=<LONG>][&buffersize=<INT>]"
 +
 +    The expect is redirected to a datanode where the file data can be read:
 +    HTTP/1.1 307 TEMPORARY_REDIRECT
 +    Location: http://<DATANODE>:<PORT>/webhdfs/v1/<PATH>?op=OPEN...
 +    Content-Length: 0
 +
 +    The client follows the redirect to the datanode and receives the file data:
 +    HTTP/1.1 HttpStatus.SC_OK OK
 +    Content-Type: application/octet-stream
 +    Content-Length: 22
 +
 +    Hello, webhdfs user!
 +    */
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .method( "GET" )
 +        .pathInfo( "/v1" + root + "/dir/file" )
 +        .queryParam( "op", "OPEN" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_TEMPORARY_REDIRECT )
 +        .header( "Location", driver.getRealUrl( "DATANODE" ) + "/v1" + root + "/dir/file?op=OPEN&user.name=hdfs" );
 +    driver.getMock( "DATANODE" )
 +        .expect()
 +        .method( "GET" )
 +        .pathInfo( "/v1" + root + "/dir/file" )
 +        .queryParam( "op", "OPEN" )
 +        .queryParam( "user.name", username )
 +        .respond()
 +        .status( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .content( driver.getResourceBytes( "test.txt" ) );
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "OPEN" )
 +        .expect()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_OK )
 +        .content( is( "TEST" ) )
 +        .when().get( driver.getUrl("WEBHDFS") + "/v1" + root + "/dir/file" );
 +    driver.assertComplete();
 +
 +    /* Delete the directory.
 +    curl -i -X DELETE "http://<host>:<port>/webhdfs/v1/<path>?op=DELETE
 +                                 [&recursive=<true|false>]"
 +
 +    The client receives a respond with a boolean JSON object:
 +    HTTP/1.1 HttpStatus.SC_OK OK
 +    Content-Type: application/json
 +    Transfer-Encoding: chunked
 +
 +    {"boolean": true}
 +    */
 +    // Mock the interaction with the namenode.
 +    driver.getMock( "WEBHDFS" )
 +        .expect()
 +        .from( "testBasicHdfsUseCase-1" )
 +        .method( "DELETE" )
 +        .pathInfo( "/v1" + root )
 +        .queryParam( "op", "DELETE" )
 +        .queryParam( "user.name", username )
 +        .queryParam( "recursive", "true" )
 +        .respond()
 +        .status( HttpStatus.SC_OK );
 +    given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .queryParam( "op", "DELETE" )
 +        .queryParam( "recursive", "true" )
 +        .expect()
 +        //.log().ifError()
 +        .statusCode( HttpStatus.SC_OK )
 +        .when().delete( driver.getUrl( "WEBHDFS" ) + "/v1" + root );
 +    driver.assertComplete();
 +    LOG_EXIT();
 +  }
 +
 +  // User hdfs in groups hadoop, hdfs
 +  // User mapred in groups hadoop, mapred
 +  // User hcat in group hcat
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testPmHdfsM1UseCase() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testPmHdfdM1UseCase";
 +    String userA = "hdfs";
 +    String passA = "hdfs-password";
 +    String userB = "mapred";
 +    String passB = "mapred-password";
 +    String userC = "hcat";
 +    String passC = "hcat-password";
 +    String groupA = "hdfs";
 +    String groupB = "mapred";
 +    String groupAB = "hadoop";
 +    String groupC = "hcat";
 +
 +    deleteFile( userA, passA, root, "true", 200 );
 +
 +    createDir( userA, passA, groupA, root + "/dirA700", "700", 200, 200 );
 +    createDir( userA, passA, groupA, root + "/dirA770", "770", 200, 200 );
 +    createDir( userA, passA, groupA, root + "/dirA707", "707", 200, 200 );
 +    createDir( userA, passA, groupA, root + "/dirA777", "777", 200, 200 );
 +    createDir( userA, passA, groupAB, root + "/dirAB700", "700", 200, 200 );
 +    createDir( userA, passA, groupAB, root + "/dirAB770", "770", 200, 200 );
 +    createDir( userA, passA, groupAB, root + "/dirAB707", "707", 200, 200 );
 +    createDir( userA, passA, groupAB, root + "/dirAB777", "777", 200, 200 );
 +
 +    // CREATE: Files
 +    // userA:groupA
 +    createFile( userA, passA, groupA, root + "/dirA700/fileA700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +    createFile( userA, passA, groupA, root + "/dirA770/fileA770", "770", "text/plain", "small1.txt", 307, 201, 200 );
 +    createFile( userA, passA, groupA, root + "/dirA707/fileA707", "707", "text/plain", "small1.txt", 307, 201, 200 );
 +    createFile( userA, passA, groupA, root + "/dirA777/fileA777", "777", "text/plain", "small1.txt", 307, 201, 200 );
 +    // userA:groupAB
 +    createFile( userA, passA, groupAB, root + "/dirAB700/fileAB700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +    createFile( userA, passA, groupAB, root + "/dirAB770/fileAB770", "770", "text/plain", "small1.txt", 307, 201, 200 );
 +    createFile( userA, passA, groupAB, root + "/dirAB707/fileAB707", "707", "text/plain", "small1.txt", 307, 201, 200 );
 +    createFile( userA, passA, groupAB, root + "/dirAB777/fileAB777", "777", "text/plain", "small1.txt", 307, 201, 200 );
 +    // userB:groupB
 +    createFile( userB, passB, groupB, root + "/dirA700/fileB700", "700", "text/plain", "small1.txt", 307, 403, 0 );
 +    createFile( userB, passB, groupB, root + "/dirA770/fileB700", "700", "text/plain", "small1.txt", 307, 403, 0 );
 +//kam:20130219[ chmod seems to be broken at least in Sandbox 1.2
 +//    createFile( userB, passB, groupB, root + "/dirA707/fileB700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +//    createFile( userB, passB, groupB, root + "/dirA777/fileB700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +//kam]
 +    // userB:groupAB
 +    createFile( userB, passB, groupAB, root + "/dirA700/fileBA700", "700", "text/plain", "small1.txt", 307, 403, 0 );
 +    createFile( userB, passB, groupAB, root + "/dirA770/fileBA700", "700", "text/plain", "small1.txt", 307, 403, 0 );
 +    createFile( userB, passB, groupAB, root + "/dirA707/fileBA700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +    createFile( userB, passB, groupAB, root + "/dirA777/fileBA700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +    // userC:groupC
 +    createFile( userC, passC, groupC, root + "/dirA700/fileC700", "700", "text/plain", "small1.txt", 307, 403, 0 );
 +    createFile( userC, passC, groupC, root + "/dirA770/fileC700", "700", "text/plain", "small1.txt", 307, 403, 0 );
 +//kam:20130219[ chmod seems to be broken at least in Sandbox 1.2
 +//    createFile( userC, passC, groupC, root + "/dirA707/fileC700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +//    createFile( userC, passC, groupC, root + "/dirA777/fileC700", "700", "text/plain", "small1.txt", 307, 201, 200 );
 +//kam]
 +
 +    // READ
 +    // userA
 +    readFile( userA, passA, root + "/dirA700/fileA700", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    readFile( userA, passA, root + "/dirA770/fileA770", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    readFile( userA, passA, root + "/dirA707/fileA707", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    readFile( userA, passA, root + "/dirA777/fileA777", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    // userB:groupB
 +    readFile( userB, passB, root + "/dirA700/fileA700", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
 +    readFile( userB, passB, root + "/dirA770/fileA770", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
 +    readFile( userB, passB, root + "/dirA707/fileA707", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    readFile( userB, passB, root + "/dirA777/fileA777", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    // userB:groupAB
 +    readFile( userB, passB, root + "/dirAB700/fileAB700", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
 +    readFile( userB, passB, root + "/dirAB770/fileAB770", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
 +    readFile( userB, passB, root + "/dirAB707/fileAB707", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
 +    readFile( userB, passB, root + "/dirAB777/fileAB777", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    // userC:groupC
 +    readFile( userC, passC, root + "/dirA700/fileA700", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
 +    readFile( userC, passC, root + "/dirA770/fileA770", "text/plain", "small1.txt", HttpStatus.SC_FORBIDDEN );
 +    readFile( userC, passC, root + "/dirA707/fileA707", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +    readFile( userC, passC, root + "/dirA777/fileA777", "text/plain", "small1.txt", HttpStatus.SC_OK );
 +
 +    //NEGATIVE: Test a bad password.
 +    if( driver.isUseGateway() ) {
 +      given()
 +          //.log().all()
 +          .auth().preemptive().basic( userA, "invalid-password" )
 +          .header("X-XSRF-Header", "jksdhfkhdsf")
 +          .queryParam( "op", "OPEN" )
 +          .expect()
 +          //.log().all()
 +          .statusCode( HttpStatus.SC_UNAUTHORIZED )
 +          .when().get( driver.getUrl("WEBHDFS") + "/v1" + root + "/dirA700/fileA700" );
 +    }
 +    driver.assertComplete();
 +
 +    // UPDATE (Negative First)
 +    updateFile( userC, passC, root + "/dirA700/fileA700", "text/plain", "small2.txt", 307, 403 );
 +    updateFile( userB, passB, root + "/dirAB700/fileAB700", "text/plain", "small2.txt", 307, 403 );
 +    updateFile( userB, passB, root + "/dirAB770/fileAB700", "text/plain", "small2.txt", 307, 403 );
 +    updateFile( userB, passB, root + "/dirAB770/fileAB770", "text/plain", "small2.txt", 307, 403 );
 +    updateFile( userA, passA, root + "/dirA700/fileA700", "text/plain", "small2.txt", 307, 201 );
 +
 +    // DELETE (Negative First)
 +    deleteFile( userC, passC, root + "/dirA700/fileA700", "false", HttpStatus.SC_FORBIDDEN );
 +    deleteFile( userB, passB, root + "/dirAB700/fileAB700", "false", HttpStatus.SC_FORBIDDEN );
 +    deleteFile( userB, passB, root + "/dirAB770/fileAB770", "false", HttpStatus.SC_FORBIDDEN );
 +    deleteFile( userA, passA, root + "/dirA700/fileA700", "false", HttpStatus.SC_OK );
 +
 +    // Cleanup anything that might have been leftover because the test failed previously.
 +    deleteFile( userA, passA, root, "true", HttpStatus.SC_OK );
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testJavaMapReduceViaWebHCat() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testJavaMapReduceViaWebHCat";
 +    String user = "mapred";
 +    String pass = "mapred-password";
 +    String group = "mapred";
 +//    String user = "hcat";
 +//    String pass = "hcat-password";
 +//    String group = "hcat";
 +
 +    // Cleanup anything that might have been leftover because the test failed previously.
 +    deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
 +
 +    /* Put the mapreduce code into HDFS. (hadoop-examples.jar)
 +    curl -X PUT --data-binary @hadoop-examples.jar 'http://192.168.1.163:8888/org.apache.org.apache.knox.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/hadoop-examples.jar?user.name=hdfs&op=CREATE'
 +     */
 +    createFile( user, pass, null, root+"/hadoop-examples.jar", "777", "application/octet-stream", findHadoopExamplesJar(), 307, 201, 200 );
 +
 +    /* Put the data file into HDFS (changes.txt)
 +    curl -X PUT --data-binary @changes.txt 'http://192.168.1.163:8888/org.apache.org.apache.knox.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/input/changes.txt?user.name=hdfs&op=CREATE'
 +     */
 +    createFile( user, pass, null, root+"/input/changes.txt", "777", "text/plain", "changes.txt", 307, 201, 200 );
 +
 +    /* Create the output directory
 +    curl -X PUT 'http://192.168.1.163:8888/org.apache.org.apache.knox.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/output?op=MKDIRS&user.name=hdfs'
 +    */
 +    createDir( user, pass, null, root+"/output", "777", 200, 200 );
 +
 +    /* Submit the job
 +    curl -d user.name=hdfs -d jar=wordcount/hadoop-examples.jar -d class=org.apache.org.apache.hadoop.examples.WordCount -d arg=wordcount/input -d arg=wordcount/output 'http://localhost:8888/org.apache.org.apache.knox.gateway/cluster/templeton/v1/mapreduce/jar'
 +    {"id":"job_201210301335_0059"}
 +    */
 +    String job = submitJava(
 +        user, pass,
 +        root+"/hadoop-examples.jar", "org.apache.org.apache.hadoop.examples.WordCount",
 +        root+"/input", root+"/output",
 +        200 );
 +
 +    /* Get the job status
 +    curl 'http://vm:50111/templeton/v1/queue/:jobid?user.name=hdfs'
 +    */
 +    queryQueue( user, pass, job );
 +
 +    // Can't really check for the output here because the job won't be done.
 +    /* Retrieve results
 +    curl 'http://192.168.1.163:8888/org.apache.org.apache.knox.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/input?op=LISTSTATUS'
 +    */
 +
 +    if( CLEANUP_TEST ) {
 +      // Cleanup anything that might have been leftover because the test failed previously.
 +      deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
 +    }
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testPigViaWebHCat() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayWebHCatFuncTest/testPigViaWebHCat";
 +    String user = "mapred";
 +    String pass = "mapred-password";
 +    String group = "mapred";
 +
 +    // Cleanup if previous run failed.
 +    deleteFile( user, pass, root, "true", 200, 404 );
 +
 +    // Post the data to HDFS
 +    createFile( user, pass, null, root + "/passwd.txt", "777", "text/plain", "passwd.txt", 307, 201, 200 );
 +
 +    // Post the script to HDFS
 +    createFile( user, pass, null, root+"/script.pig", "777", "text/plain", "script.pig", 307, 201, 200 );
 +
 +    // Create the output directory
 +    createDir( user, pass, null, root + "/output", "777", 200, 200 );
 +
 +    // Submit the job
 +    submitPig( user, pass, group, root + "/script.pig", "-v", root + "/output", 200 );
 +
 +    // Check job status (if possible)
 +    // Check output (if possible)
 +
 +    // Cleanup
 +    deleteFile( user, pass, root, "true", 200 );
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHiveViaWebHCat() throws IOException {
 +    LOG_ENTER();
 +    String user = "hive";
 +    String pass = "hive-password";
 +    String group = "hive";
 +    String root = "/tmp/GatewayWebHCatFuncTest/testHiveViaWebHCat";
 +
 +    // Cleanup if previous run failed.
 +    deleteFile( user, pass, root, "true", 200, 404 );
 +
 +    // Post the data to HDFS
 +
 +    // Post the script to HDFS
 +    createFile(user, pass, null, root + "/script.hive", "777", "text/plain", "script.hive", 307, 201, 200);
 +
 +    // Submit the job
 +    submitHive(user, pass, group, root + "/script.hive", root + "/output", 200);
 +
 +    // Check job status (if possible)
 +    // Check output (if possible)
 +
 +    // Cleanup
 +    deleteFile( user, pass, root, "true", 200 );
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testOozieJobSubmission() throws Exception {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayBasicFuncTest/testOozieJobSubmission";
 +    String user = "hdfs";
 +    String pass = "hdfs-password";
 +    String group = "hdfs";
 +
 +    // Cleanup anything that might have been leftover because the test failed previously.
 +    deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
 +
 +    /* Put the workflow definition into HDFS */
 +    createFile( user, pass, group, root+"/workflow.xml", "666", "application/octet-stream", "oozie-workflow.xml", 307, 201, 200 );
 +
 +    /* Put the mapreduce code into HDFS. (hadoop-examples.jar)
 +    curl -X PUT --data-binary @hadoop-examples.jar 'http://192.168.1.163:8888/org.apache.org.apache.knox.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/hadoop-examples.jar?user.name=hdfs&op=CREATE'
 +     */
 +    createFile( user, pass, group, root+"/lib/hadoop-examples.jar", "777", "application/octet-stream", findHadoopExamplesJar(), 307, 201, 200 );
 +
 +    /* Put the data file into HDFS (changes.txt)
 +    curl -X PUT --data-binary @changes.txt 'http://192.168.1.163:8888/org.apache.org.apache.knox.gateway/cluster/webhdfs/v1/user/hdfs/wordcount/input/changes.txt?user.name=hdfs&op=CREATE'
 +     */
 +    createFile( user, pass, group, root+"/input/changes.txt", "666", "text/plain", "changes.txt", 307, 201, 200 );
 +
 +    VelocityEngine velocity = new VelocityEngine();
 +    velocity.setProperty( RuntimeConstants.RUNTIME_LOG_LOGSYSTEM_CLASS, "org.apache.velocity.runtime.log.NullLogSystem" );
 +    velocity.setProperty( RuntimeConstants.RESOURCE_LOADER, "classpath" );
 +    velocity.setProperty( "classpath.resource.loader.class", ClasspathResourceLoader.class.getName() );
 +    velocity.init();
 +
 +    VelocityContext context = new VelocityContext();
 +    context.put( "userName", user );
 +    context.put( "nameNode", "hdfs://sandbox:8020" );
 +    context.put( "jobTracker", "sandbox:50300" );
 +    //context.put( "appPath", "hdfs://sandbox:8020" + root );
 +    context.put( "appPath", root );
 +    context.put( "inputDir", root + "/input" );
 +    context.put( "outputDir", root + "/output" );
 +
 +    //URL url = TestUtils.getResourceUrl( GatewayBasicFuncTest.class, "oozie-jobs-submit-request.xml" );
 +    //String name = url.toExternalForm();
 +    String name = TestUtils.getResourceName( this.getClass(), "oozie-jobs-submit-request.xml" );
 +    Template template = velocity.getTemplate( name );
 +    StringWriter sw = new StringWriter();
 +    template.merge( context, sw );
 +    String request = sw.toString();
 +    //System.out.println( "REQUEST=" + request );
 +
 +    /* Submit the job via Oozie. */
 +    String id = oozieSubmitJob( user, pass, request, 201 );
 +    //System.out.println( "ID=" + id );
 +
 +    String success = "SUCCEEDED";
 +    String status = "UNKNOWN";
 +    long delay = 1000 * 1; // 1 second.
 +    long limit = 1000 * 60; // 60 seconds.
 +    long start = System.currentTimeMillis();
 +    while( System.currentTimeMillis() <= start+limit ) {
 +      status = oozieQueryJobStatus( user, pass, id, 200 );
 +      //System.out.println( "Status=" + status );
 +      if( success.equalsIgnoreCase( status ) ) {
 +        break;
 +      } else {
 +        //System.out.println( "Status=" + status );
 +        Thread.sleep( delay );
 +      }
 +    }
 +    //System.out.println( "Status is " + status + " after " + ((System.currentTimeMillis()-start)/1000) + " seconds." );
 +    MatcherAssert.assertThat( status, is( success ) );
 +
 +    if( CLEANUP_TEST ) {
 +      // Cleanup anything that might have been leftover because the test failed previously.
 +      deleteFile( user, pass, root, "true", HttpStatus.SC_OK );
 +    }
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testBasicHiveJDBCUseCase() throws IOException {
 +    LOG_ENTER();
 +    String root = "/tmp/GatewayHiveJDBCFuncTest/testBasicHiveUseCase";
 +    String username = "hive";
 +    String password = "hive-password";
 +    InetSocketAddress gatewayAddress = driver.gateway.getAddresses()[0];
 +
 +    // This use case emulates simple JDBC scenario which consists of following steps:
 +    // -open connection;
 +    // -configure Hive using 'execute' statements (this also includes execution of 'close operation' requests internally);
 +    // -execution of create table command;
 +    // -execution of select from table command;
 +    // Data insertion is omitted because it causes a lot of additional command during insertion/querying.
 +    // All binary data was intercepted during real scenario and stored into files as array of bytes.
 +
 +    // open session
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/open-session-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/open-session-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    Response response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/open-session-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/open-session-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/open-session-result.bin" ) ) );
 +
 +    driver.assertComplete();
 +
 +    // execute 'set hive.fetch.output.serde=...' (is called internally be JDBC driver)
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/execute-set-fetch-output-serde-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/execute-set-fetch-output-serde-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/execute-set-fetch-output-serde-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/execute-set-fetch-output-serde-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/execute-set-fetch-output-serde-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // close operation for execute 'set hive.fetch.output.serde=...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/close-operation-1-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/close-operation-1-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/close-operation-1-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/close-operation-1-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/close-operation-1-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // execute 'set hive.server2.http.path=...' (is called internally be JDBC driver)
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/execute-set-server2-http-path-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/execute-set-server2-http-path-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/execute-set-server2-http-path-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/execute-set-server2-http-path-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/execute-set-server2-http-path-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // close operation for execute 'set hive.server2.http.path=...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/close-operation-2-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/close-operation-2-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/close-operation-2-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/close-operation-2-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/close-operation-2-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // execute 'set hive.server2.servermode=...' (is called internally be JDBC driver)
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/execute-set-server2-servermode-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/execute-set-server2-servermode-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/execute-set-server2-servermode-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/execute-set-server2-servermode-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/execute-set-server2-servermode-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // close operation for execute 'set hive.server2.servermode=...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/close-operation-3-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/close-operation-3-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/close-operation-3-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/close-operation-3-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/close-operation-3-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // execute 'set hive.security.authorization.enabled=...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/execute-set-security-authorization-enabled-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/execute-set-security-authorization-enabled-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/execute-set-security-authorization-enabled-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/execute-set-security-authorization-enabled-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/execute-set-security-authorization-enabled-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // close operation for execute 'set hive.security.authorization.enabled=...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/close-operation-4-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/close-operation-4-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/close-operation-4-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/close-operation-4-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/close-operation-4-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // execute 'create table...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/execute-create-table-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/execute-create-table-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/execute-create-table-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/execute-create-table-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/execute-create-table-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // close operation for execute 'create table...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/close-operation-5-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/close-operation-5-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/close-operation-5-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/close-operation-5-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/close-operation-5-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // execute 'select * from...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/execute-select-from-table-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/execute-select-from-table-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/execute-select-from-table-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/execute-select-from-table-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/execute-select-from-table-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // execute 'GetResultSetMetadata' (is called internally be JDBC driver)
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/get-result-set-metadata-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/get-result-set-metadata-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/get-result-set-metadata-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/get-result-set-metadata-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/get-result-set-metadata-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // execute 'FetchResults' (is called internally be JDBC driver)
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/fetch-results-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/fetch-results-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/fetch-results-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/fetch-results-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/fetch-results-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // close operation for execute 'select * from...'
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/close-operation-6-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/close-operation-6-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/close-operation-6-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/close-operation-6-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/close-operation-6-result.bin" ) ) );
 +    driver.assertComplete();
 +
 +    // close session
 +    driver.getMock( "HIVE" )
 +        .expect()
 +        .method( "POST" )
 +        .content( driver.getResourceBytes( "hive/close-session-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .respond()
 +        .characterEncoding( "UTF-8" )
 +        .status( HttpStatus.SC_OK )
 +        .content( driver.getResourceBytes( "hive/close-session-result.bin" ) )
 +        .contentType( "application/x-thrift" );
 +    response = given()
 +        .auth().preemptive().basic( username, password )
 +        .header("X-XSRF-Header", "jksdhfkhdsf")
 +        .content( driver.getResourceBytes( "hive/close-session-request.bin" ) )
 +        .contentType( "application/x-thrift" )
 +        .expect()
 +        .statusCode( HttpStatus.SC_OK )
 +        //.content( is( driver.getResourceBytes( "hive/close-session-result.bin" ) ) )
 +        .contentType( "application/x-thrift" )
 +        .when().post( driver.getUrl( "HIVE" ) );
 +    assertThat( response.body().asByteArray(), is( driver.getResourceBytes( "hive/close-session-result.bin" ) ) );
 +    driver.assertComplete();
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHBaseGetTableList() throws IOException {
 +    LOG_ENTER();
 +    String username = "hbase";
 +    String password = "hbase-password";
 +    String resourceName = "hbase/table-list";
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "GET" )
 +    .pathInfo( "/" )
 +    .header( "Accept", ContentType.XML.toString() )
 +    .respond()
 +    .status( HttpStatus.SC_OK )
 +    .content( driver.getResourceBytes( resourceName + ".xml" ) )
 +    .contentType( ContentType.XML.toString() );
 +
 +    Response response = given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", ContentType.XML.toString() )
 +    .expect()
 +    .statusCode( HttpStatus.SC_OK )
 +    .contentType( ContentType.XML )
 +    .when().get( driver.getUrl( "WEBHBASE" ) );
 +
 +    MatcherAssert
 +        .assertThat(
 +            the( response.getBody().asString() ),
 +            isEquivalentTo( the( driver.getResourceString( resourceName + ".xml", UTF8 ) ) ) );
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "GET" )
 +    .pathInfo( "/" )
 +    .header( "Accept", ContentType.JSON.toString() )
 +    .respond()
 +    .status( HttpStatus.SC_OK )
 +    .content( driver.getResourceBytes( resourceName + ".json" ) )
 +    .contentType( ContentType.JSON.toString() );
 +
 +    response = given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", ContentType.JSON.toString() )
 +    .expect()
 +    .statusCode( HttpStatus.SC_OK )
 +    .contentType( ContentType.JSON )
 +    .when().get( driver.getUrl( "WEBHBASE" ) );
 +
 +    MatcherAssert
 +    .assertThat( response.getBody().asString(), sameJSONAs( driver.getResourceString( resourceName + ".json", UTF8 ) ) );
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "GET" )
 +    .pathInfo( "/" )
 +    .header( "Accept", "application/x-protobuf" )
 +    .respond()
 +    .status( HttpStatus.SC_OK )
 +    .content( driver.getResourceString( resourceName + ".protobuf", UTF8 ), UTF8 )
 +    .contentType( "application/x-protobuf" );
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", "application/x-protobuf" )
 +    .expect()
 +    .statusCode( HttpStatus.SC_OK )
 +    .contentType( "application/x-protobuf" )
 +    .content( is( driver.getResourceString( resourceName + ".protobuf", UTF8 ) ) )
 +    .when().get( driver.getUrl( "WEBHBASE" ) );
 +    driver.assertComplete();
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHBaseCreateTableAndVerifySchema() throws IOException {
 +    LOG_ENTER();
 +    String username = "hbase";
 +    String password = "hbase-password";
 +    String resourceName = "hbase/table-schema";
 +    String path = "/table/schema";
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "PUT" )
 +    .pathInfo( path )
 +    .respond()
 +    .status( HttpStatus.SC_CREATED )
 +    .content( driver.getResourceBytes( resourceName + ".xml" ) )
 +    .contentType( ContentType.XML.toString() )
 +    .header( "Location", driver.getRealUrl( "WEBHBASE" ) + path  );
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .expect()
 +    .statusCode( HttpStatus.SC_CREATED )
 +    .contentType( ContentType.XML )
 +    .header( "Location", startsWith( driver.getUrl( "WEBHBASE" ) + path ) )
 +    .when().put(driver.getUrl("WEBHBASE") + path);
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "PUT" )
 +    .pathInfo( path )
 +    .respond()
 +    .status(HttpStatus.SC_CREATED)
 +    .content(driver.getResourceBytes(resourceName + ".json"))
 +    .contentType(ContentType.JSON.toString())
 +    .header("Location", driver.getRealUrl("WEBHBASE") + path);
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .expect()
 +    .statusCode( HttpStatus.SC_CREATED )
 +    .contentType( ContentType.JSON )
 +    .header( "Location", startsWith( driver.getUrl( "WEBHBASE" ) + path ) )
 +    .when().put( driver.getUrl( "WEBHBASE" ) + path );
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "PUT" )
 +    .pathInfo( path )
 +    .respond()
 +    .status( HttpStatus.SC_CREATED )
 +    .content( driver.getResourceBytes( resourceName + ".protobuf" ) )
 +    .contentType( "application/x-protobuf" )
 +    .header("Location", driver.getRealUrl("WEBHBASE") + path);
 +
 +    given()
 +    .auth().preemptive().basic(username, password)
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .expect()
 +    .statusCode(HttpStatus.SC_CREATED)
 +    .contentType("application/x-protobuf")
 +    .header("Location", startsWith(driver.getUrl("WEBHBASE") + path))
 +    .when().put(driver.getUrl("WEBHBASE") + path);
 +    driver.assertComplete();
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHBaseGetTableSchema() throws IOException {
 +    LOG_ENTER();
 +    String username = "hbase";
 +    String password = "hbase-password";
 +    String resourceName = "hbase/table-metadata";
 +    String path = "/table/schema";
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "GET" )
 +    .pathInfo( path )
 +    .header("Accept", ContentType.XML.toString())
 +    .respond()
 +    .status(HttpStatus.SC_OK)
 +    .content(driver.getResourceBytes(resourceName + ".xml"))
 +    .contentType(ContentType.XML.toString());
 +
 +    Response response = given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", ContentType.XML.toString() )
 +    .expect()
 +    .statusCode( HttpStatus.SC_OK )
 +    .contentType( ContentType.XML )
 +    .when().get( driver.getUrl( "WEBHBASE" ) + path );
 +
 +    MatcherAssert
 +        .assertThat(
 +            the(response.getBody().asString()),
 +            isEquivalentTo(the(driver.getResourceString(resourceName + ".xml", UTF8))));
 +    driver.assertComplete();
 +
 +    driver.getMock("WEBHBASE")
 +    .expect()
 +    .method("GET")
 +    .pathInfo(path)
 +    .header("Accept", ContentType.JSON.toString())
 +    .respond()
 +    .status(HttpStatus.SC_OK)
 +    .content(driver.getResourceBytes(resourceName + ".json"))
 +    .contentType(ContentType.JSON.toString());
 +
 +    response = given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", ContentType.JSON.toString() )
 +    .expect()
 +    .statusCode( HttpStatus.SC_OK )
 +    .contentType( ContentType.JSON )
 +    .when().get( driver.getUrl( "WEBHBASE" ) + path );
 +
 +    MatcherAssert
 +    .assertThat(response.getBody().asString(), sameJSONAs(driver.getResourceString(resourceName + ".json", UTF8)));
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "GET" )
 +    .pathInfo( path )
 +    .header( "Accept", "application/x-protobuf" )
 +    .respond()
 +    .status( HttpStatus.SC_OK )
 +    .content( driver.getResourceBytes( resourceName + ".protobuf" ) )
 +    .contentType("application/x-protobuf");
 +
 +    response = given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", "application/x-protobuf" )
 +    .expect()
 +    .statusCode( HttpStatus.SC_OK )
 +    //.content( is( driver.getResourceBytes( resourceName + ".protobuf" ) ) )
 +    .contentType( "application/x-protobuf" )
 +    .when().get( driver.getUrl( "WEBHBASE" ) + path );
 +    // RestAssured seems to be screwing up the binary comparison so do it explicitly.
 +    assertThat( driver.getResourceBytes( resourceName + ".protobuf" ), is( response.body().asByteArray() ) );
 +    driver.assertComplete();
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHBaseInsertDataIntoTable() throws IOException {
 +    LOG_ENTER();
 +    String username = "hbase";
 +    String password = "hbase-password";
 +
 +    String resourceName = "hbase/table-data";
 +    String singleRowPath = "/table/testrow";
 +    String multipleRowPath = "/table/false-row-key";
 +
 +    //PUT request
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "PUT" )
 +    .pathInfo( multipleRowPath )
 +    //.header( "Content-Type", ContentType.XML.toString() )
 +    .content( driver.getResourceBytes( resourceName + ".xml" ) )
 +    .contentType( ContentType.XML.toString() )
 +    .respond()
 +    .status(HttpStatus.SC_OK);
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    //.header( "Content-Type", ContentType.XML.toString() )
 +    .content( driver.getResourceBytes( resourceName + ".xml" ) )
 +    .contentType( ContentType.XML.toString() )
 +    .expect()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().put(driver.getUrl("WEBHBASE") + multipleRowPath);
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "PUT" )
 +    .pathInfo( singleRowPath )
 +    //.header( "Content-Type", ContentType.JSON.toString() )
 +    .contentType( ContentType.JSON.toString() )
 +    .respond()
 +    .status( HttpStatus.SC_OK );
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    //.header( "Content-Type", ContentType.JSON.toString() )
 +    .content( driver.getResourceBytes( resourceName + ".json" ) )
 +    .contentType( ContentType.JSON.toString() )
 +    .expect()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().put(driver.getUrl("WEBHBASE") + singleRowPath);
 +    driver.assertComplete();
 +
 +    driver.getMock("WEBHBASE")
 +    .expect()
 +    .method("PUT")
 +    .pathInfo(multipleRowPath)
 +    //.header( "Content-Type", "application/x-protobuf" )
 +    .contentType("application/x-protobuf")
 +    .content(driver.getResourceBytes(resourceName + ".protobuf"))
 +    .respond()
 +    .status(HttpStatus.SC_OK);
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    //.header( "Content-Type", "application/x-protobuf" )
 +    .content( driver.getResourceBytes( resourceName + ".protobuf" ) )
 +    .contentType( "application/x-protobuf" )
 +    .expect()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().put( driver.getUrl( "WEBHBASE" ) + multipleRowPath );
 +    driver.assertComplete();
 +
 +    //POST request
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "POST" )
 +    .pathInfo( multipleRowPath )
 +    //.header( "Content-Type", ContentType.XML.toString() )
 +    .content( driver.getResourceBytes( resourceName + ".xml" ) )
 +    .contentType( ContentType.XML.toString() )
 +    .respond()
 +    .status( HttpStatus.SC_OK );
 +
 +    given()
 +      .auth().preemptive().basic( username, password )
 +      .header("X-XSRF-Header", "jksdhfkhdsf")
 +      //.header( "Content-Type", ContentType.XML.toString() )
 +      .content( driver.getResourceBytes( resourceName + ".xml" ) )
 +      .contentType( ContentType.XML.toString() )
 +      .expect()
 +      .statusCode( HttpStatus.SC_OK )
 +      .when().post( driver.getUrl( "WEBHBASE" ) + multipleRowPath );
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "POST" )
 +    .pathInfo( singleRowPath )
 +    //.header( "Content-Type", ContentType.JSON.toString() )
 +    .contentType( ContentType.JSON.toString() )
 +    .respond()
 +    .status( HttpStatus.SC_OK );
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    //.header( "Content-Type", ContentType.JSON.toString() )
 +    .content( driver.getResourceBytes( resourceName + ".json" ) )
 +    .contentType( ContentType.JSON.toString() )
 +    .expect()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().post( driver.getUrl( "WEBHBASE" ) + singleRowPath );
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "POST" )
 +    .pathInfo( multipleRowPath )
 +    //.header( "Content-Type", "application/x-protobuf" )
 +    .content( driver.getResourceBytes( resourceName + ".protobuf" ) )
 +    .contentType( "application/x-protobuf" )
 +    .respond()
 +    .status( HttpStatus.SC_OK );
 +
 +    given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    //.header( "Content-Type", "application/x-protobuf" )
 +    .content( driver.getResourceBytes( resourceName + ".protobuf" ) )
 +    .contentType( "application/x-protobuf" )
 +    .expect()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().post(driver.getUrl("WEBHBASE") + multipleRowPath);
 +    driver.assertComplete();
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHBaseDeleteDataFromTable() {
 +    LOG_ENTER();
 +    String username = "hbase";
 +    String password = "hbase-password";
 +
 +    String tableId = "table";
 +    String rowId = "row";
 +    String familyId = "family";
 +    String columnId = "column";
 +
 +    driver.getMock("WEBHBASE")
 +    .expect()
 +    .from("testHBaseDeleteDataFromTable-1")
 +    .method("DELETE")
 +    .pathInfo("/" + tableId + "/" + rowId)
 +    .respond()
 +    .status(HttpStatus.SC_OK);
 +
 +    given()
 +    .auth().preemptive().basic(username, password)
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .expect()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().delete(driver.getUrl("WEBHBASE") + "/" + tableId + "/" + rowId);
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .from("testHBaseDeleteDataFromTable-2")
 +    .method("DELETE")
 +    .pathInfo("/" + tableId + "/" + rowId + "/" + familyId)
 +    .respond()
 +    .status( HttpStatus.SC_OK );
 +
 +    given()
 +    .auth().preemptive().basic(username, password)
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .expect()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().delete(driver.getUrl("WEBHBASE") + "/" + tableId + "/" + rowId + "/" + familyId);
 +    driver.assertComplete();
 +
 +    driver.getMock("WEBHBASE")
 +    .expect()
 +    .from("testHBaseDeleteDataFromTable-3")
 +    .method("DELETE")
 +    .pathInfo("/" + tableId + "/" + rowId + "/" + familyId + ":" + columnId)
 +    .respond()
 +    .status(HttpStatus.SC_OK);
 +
 +    given()
 +    .auth().preemptive().basic(username, password)
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .expect()
 +    .statusCode( HttpStatus.SC_OK )
 +    .when().delete(driver.getUrl("WEBHBASE") + "/" + tableId + "/" + rowId + "/" + familyId + ":" + columnId);
 +    driver.assertComplete();
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testHBaseQueryTableData() throws IOException {
 +    LOG_ENTER();
 +    String username = "hbase";
 +    String password = "hbase-password";
 +
 +    String resourceName = "hbase/table-data";
 +
 +    String allRowsPath = "/table/*";
 +    String rowsStartsWithPath = "/table/row*";
 +    String rowsWithKeyPath = "/table/row";
 +    String rowsWithKeyAndColumnPath = "/table/row/family:col";
 +
 +    driver.getMock("WEBHBASE")
 +    .expect()
 +    .method("GET")
 +    .pathInfo(allRowsPath)
 +    .header("Accept", ContentType.XML.toString())
 +    .respond()
 +    .status(HttpStatus.SC_OK)
 +    .content(driver.getResourceBytes(resourceName + ".xml"))
 +    .contentType(ContentType.XML.toString());
 +
 +    Response response = given()
 +    .auth().preemptive().basic( username, password )
 +    .header("X-XSRF-Header", "jksdhfkhdsf")
 +    .header( "Accept", ContentType.XML.toString() )
 +    .expect()
 +    .statusCode( HttpStatus.SC_OK )
 +    .contentType( ContentType.XML )
 +    .when().get( driver.getUrl( "WEBHBASE" ) + allRowsPath );
 +
 +    MatcherAssert
 +    .assertThat(
 +        the(response.getBody().asString()),
 +        isEquivalentTo(the(driver.getResourceString(resourceName + ".xml", UTF8))));
 +    driver.assertComplete();
 +
 +    driver.getMock( "WEBHBASE" )
 +    .expect()
 +    .method( "GET" )
 +    .pathInfo( rowsStartsWithPath

<TRUNCATED>

[03/11] knox git commit: KNOX-1005 - WebHbase get rid of extra decoding

Posted by mo...@apache.org.
KNOX-1005 - WebHbase get rid of extra decoding


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/751f7648
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/751f7648
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/751f7648

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 751f7648982dff7ffbaf1dbb1cb1a02fb831d797
Parents: 5c95ab4
Author: Sandeep More <mo...@apache.org>
Authored: Tue Sep 12 17:56:23 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Tue Sep 12 17:56:23 2017 -0400

----------------------------------------------------------------------
 .../src/main/resources/services/hbase/0.98.0/service.xml         | 2 +-
 .../hadoop/gateway/service/definition/ServiceDefinitionTest.java | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/751f7648/gateway-service-definitions/src/main/resources/services/hbase/0.98.0/service.xml
----------------------------------------------------------------------
diff --git a/gateway-service-definitions/src/main/resources/services/hbase/0.98.0/service.xml b/gateway-service-definitions/src/main/resources/services/hbase/0.98.0/service.xml
index 181b536..28896b5 100644
--- a/gateway-service-definitions/src/main/resources/services/hbase/0.98.0/service.xml
+++ b/gateway-service-definitions/src/main/resources/services/hbase/0.98.0/service.xml
@@ -29,7 +29,7 @@
             <rewrite apply="WEBHBASE/webhbase/regions/outbound" to="response.body"/>
         </route>
     </routes>
-    <dispatch classname="org.apache.hadoop.gateway.hbase.HBaseDispatch"/>
+
     <testURLs>
         <testURL>/hbase/version</testURL>
         <testURL>/hbase/version/cluster</testURL>

http://git-wip-us.apache.org/repos/asf/knox/blob/751f7648/gateway-service-definitions/src/test/java/org/apache/hadoop/gateway/service/definition/ServiceDefinitionTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-definitions/src/test/java/org/apache/hadoop/gateway/service/definition/ServiceDefinitionTest.java b/gateway-service-definitions/src/test/java/org/apache/hadoop/gateway/service/definition/ServiceDefinitionTest.java
index 436e83d..4e8c39c 100644
--- a/gateway-service-definitions/src/test/java/org/apache/hadoop/gateway/service/definition/ServiceDefinitionTest.java
+++ b/gateway-service-definitions/src/test/java/org/apache/hadoop/gateway/service/definition/ServiceDefinitionTest.java
@@ -78,8 +78,8 @@ public class ServiceDefinitionTest {
     assertNotNull(routes.get(0).getPath());
     url = ClassLoader.getSystemResource("services/hbase/0.98.0/service.xml");
     definition = (ServiceDefinition) unmarshaller.unmarshal(url.openStream());
-    assertNotNull(definition.getDispatch());
-    assertEquals("org.apache.hadoop.gateway.hbase.HBaseDispatch", definition.getDispatch().getClassName());
+    assertNotNull(definition.getName());
+    assertEquals("webhbase", definition.getName());
     url = ClassLoader.getSystemResource("services/webhdfs/2.4.0/service.xml");
     definition = (ServiceDefinition) unmarshaller.unmarshal(url.openStream());
     assertNotNull(definition.getDispatch());


[10/11] knox git commit: KNOX-998 - Merging from master

Posted by mo...@apache.org.
KNOX-998 - Merging from master


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/2e375843
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/2e375843
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/2e375843

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 2e3758434a9afb1fa6b4a02df991b59cbf7be8dc
Parents: 582cc7e
Author: Sandeep More <mo...@apache.org>
Authored: Wed Sep 13 09:42:23 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Wed Sep 13 09:42:23 2017 -0400

----------------------------------------------------------------------
 .../security/token/impl/JWTTokenTest.java       | 213 -------------------
 .../security/token/impl/JWTTokenTest.java       | 213 +++++++++++++++++++
 2 files changed, 213 insertions(+), 213 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/2e375843/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java b/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
deleted file mode 100644
index ef4023d..0000000
--- a/gateway-spi/src/test/java/org/apache/hadoop/gateway/services/security/token/impl/JWTTokenTest.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.services.security.token.impl;
-
-import java.security.KeyPair;
-import java.security.KeyPairGenerator;
-import java.security.NoSuchAlgorithmException;
-import java.security.interfaces.RSAPrivateKey;
-import java.security.interfaces.RSAPublicKey;
-import java.util.ArrayList;
-
-import org.junit.Test;
-
-import com.nimbusds.jose.JWSAlgorithm;
-import com.nimbusds.jose.JWSSigner;
-import com.nimbusds.jose.JWSVerifier;
-import com.nimbusds.jose.crypto.RSASSASigner;
-import com.nimbusds.jose.crypto.RSASSAVerifier;
-
-public class JWTTokenTest extends org.junit.Assert {
-  private static final String JWT_TOKEN = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOjE0MTY5MjkxMDksImp0aSI6ImFhN2Y4ZDBhOTVjIiwic2NvcGVzIjpbInJlcG8iLCJwdWJsaWNfcmVwbyJdfQ.XCEwpBGvOLma4TCoh36FU7XhUbcskygS81HE1uHLf0E";
-  private static final String HEADER = "{\"alg\":\"RS256\", \"type\":\"JWT\"}";
-  private static final String CLAIMS = "{\"iss\": \"gateway\", \"prn\": \"john.doe@example.com\", \"aud\": \"https://login.example.com\", \"exp\": \"1363360913\"}";
-
-  private RSAPublicKey publicKey;
-  private RSAPrivateKey privateKey;
-
-  public JWTTokenTest() throws Exception, NoSuchAlgorithmException {
-    KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
-    kpg.initialize(2048);
-
-    KeyPair kp = kpg.genKeyPair();
-    publicKey = (RSAPublicKey) kp.getPublic();
-    privateKey = (RSAPrivateKey) kp.getPrivate();
-  }
-
-  public void testTokenParsing() throws Exception {
-    JWTToken token = JWTToken.parseToken(JWT_TOKEN);
-    assertEquals(token.getHeader(), HEADER);
-    assertEquals(token.getClaims(), CLAIMS);
-
-    assertEquals(token.getIssuer(), "gateway");
-    assertEquals(token.getPrincipal(), "john.doe@example.com");
-    assertEquals(token.getAudience(), "https://login.example.com");
-    assertEquals(token.getExpires(), "1363360913");
-  }
-
-  @Test
-  public void testTokenCreation() throws Exception {
-    String[] claims = new String[4];
-    claims[0] = "KNOXSSO";
-    claims[1] = "john.doe@example.com";
-    claims[2] = "https://login.example.com";
-    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    JWTToken token = new JWTToken("RS256", claims);
-
-    assertEquals("KNOXSSO", token.getIssuer());
-    assertEquals("john.doe@example.com", token.getSubject());
-    assertEquals("https://login.example.com", token.getAudience());
-  }
-
-  @Test
-  public void testTokenCreationWithAudienceListSingle() throws Exception {
-    String[] claims = new String[4];
-    claims[0] = "KNOXSSO";
-    claims[1] = "john.doe@example.com";
-    claims[2] = null;
-    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    ArrayList<String> audiences = new ArrayList<String>();
-    audiences.add("https://login.example.com");
-
-    JWTToken token = new JWTToken("RS256", claims, audiences);
-
-    assertEquals("KNOXSSO", token.getIssuer());
-    assertEquals("john.doe@example.com", token.getSubject());
-    assertEquals("https://login.example.com", token.getAudience());
-    assertEquals(1, token.getAudienceClaims().length);
-  }
-
-  @Test
-  public void testTokenCreationWithAudienceListMultiple() throws Exception {
-    String[] claims = new String[4];
-    claims[0] = "KNOXSSO";
-    claims[1] = "john.doe@example.com";
-    claims[2] = null;
-    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    ArrayList<String> audiences = new ArrayList<String>();
-    audiences.add("https://login.example.com");
-    audiences.add("KNOXSSO");
-
-    JWTToken token = new JWTToken("RS256", claims, audiences);
-
-    assertEquals("KNOXSSO", token.getIssuer());
-    assertEquals("john.doe@example.com", token.getSubject());
-    assertEquals("https://login.example.com", token.getAudience());
-    assertEquals(2, token.getAudienceClaims().length);
-  }
-
-  @Test
-  public void testTokenCreationWithAudienceListCombined() throws Exception {
-    String[] claims = new String[4];
-    claims[0] = "KNOXSSO";
-    claims[1] = "john.doe@example.com";
-    claims[2] = "LJM";
-    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    ArrayList<String> audiences = new ArrayList<String>();
-    audiences.add("https://login.example.com");
-    audiences.add("KNOXSSO");
-
-    JWTToken token = new JWTToken("RS256", claims, audiences);
-
-    assertEquals("KNOXSSO", token.getIssuer());
-    assertEquals("john.doe@example.com", token.getSubject());
-    assertEquals("https://login.example.com", token.getAudience());
-    assertEquals(3, token.getAudienceClaims().length);
-  }
-
-  @Test
-  public void testTokenCreationWithNullAudienceList() throws Exception {
-    String[] claims = new String[4];
-    claims[0] = "KNOXSSO";
-    claims[1] = "john.doe@example.com";
-    claims[2] = null;
-    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    ArrayList<String> audiences = null;
-
-    JWTToken token = new JWTToken("RS256", claims, audiences);
-
-    assertEquals("KNOXSSO", token.getIssuer());
-    assertEquals("john.doe@example.com", token.getSubject());
-    assertEquals(null, token.getAudience());
-    assertArrayEquals(null, token.getAudienceClaims());
-  }
-
-  @Test
-  public void testTokenCreationRS512() throws Exception {
-    String[] claims = new String[4];
-    claims[0] = "KNOXSSO";
-    claims[1] = "john.doe@example.com";
-    claims[2] = "https://login.example.com";
-    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    JWTToken token = new JWTToken(JWSAlgorithm.RS512.getName(), claims);
-
-    assertEquals("KNOXSSO", token.getIssuer());
-    assertEquals("john.doe@example.com", token.getSubject());
-    assertEquals("https://login.example.com", token.getAudience());
-    assertTrue(token.getHeader().contains(JWSAlgorithm.RS512.getName()));
-  }
-
-  @Test
-  public void testTokenSignature() throws Exception {
-    String[] claims = new String[4];
-    claims[0] = "KNOXSSO";
-    claims[1] = "john.doe@example.com";
-    claims[2] = "https://login.example.com";
-    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    JWTToken token = new JWTToken("RS256", claims);
-
-
-    assertEquals("KNOXSSO", token.getIssuer());
-    assertEquals("john.doe@example.com", token.getSubject());
-    assertEquals("https://login.example.com", token.getAudience());
-
-    // Sign the token
-    JWSSigner signer = new RSASSASigner(privateKey);
-    token.sign(signer);
-    assertTrue(token.getSignaturePayload().length > 0);
-
-    // Verify the signature
-    JWSVerifier verifier = new RSASSAVerifier((RSAPublicKey) publicKey);
-    assertTrue(token.verify(verifier));
-  }
-
-  @Test
-  public void testTokenSignatureRS512() throws Exception {
-    String[] claims = new String[4];
-    claims[0] = "KNOXSSO";
-    claims[1] = "john.doe@example.com";
-    claims[2] = "https://login.example.com";
-    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
-    JWTToken token = new JWTToken(JWSAlgorithm.RS512.getName(), claims);
-
-    assertEquals("KNOXSSO", token.getIssuer());
-    assertEquals("john.doe@example.com", token.getSubject());
-    assertEquals("https://login.example.com", token.getAudience());
-    assertTrue(token.getHeader().contains(JWSAlgorithm.RS512.getName()));
-
-    // Sign the token
-    JWSSigner signer = new RSASSASigner(privateKey);
-    token.sign(signer);
-    assertTrue(token.getSignaturePayload().length > 0);
-
-    // Verify the signature
-    JWSVerifier verifier = new RSASSAVerifier((RSAPublicKey) publicKey);
-    assertTrue(token.verify(verifier));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/2e375843/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/token/impl/JWTTokenTest.java
----------------------------------------------------------------------
diff --git a/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/token/impl/JWTTokenTest.java b/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/token/impl/JWTTokenTest.java
new file mode 100644
index 0000000..3ce28c5
--- /dev/null
+++ b/gateway-spi/src/test/java/org/apache/knox/gateway/services/security/token/impl/JWTTokenTest.java
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.services.security.token.impl;
+
+import java.security.KeyPair;
+import java.security.KeyPairGenerator;
+import java.security.NoSuchAlgorithmException;
+import java.security.interfaces.RSAPrivateKey;
+import java.security.interfaces.RSAPublicKey;
+import java.util.ArrayList;
+
+import org.junit.Test;
+
+import com.nimbusds.jose.JWSAlgorithm;
+import com.nimbusds.jose.JWSSigner;
+import com.nimbusds.jose.JWSVerifier;
+import com.nimbusds.jose.crypto.RSASSASigner;
+import com.nimbusds.jose.crypto.RSASSAVerifier;
+
+public class JWTTokenTest extends org.junit.Assert {
+  private static final String JWT_TOKEN = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOjE0MTY5MjkxMDksImp0aSI6ImFhN2Y4ZDBhOTVjIiwic2NvcGVzIjpbInJlcG8iLCJwdWJsaWNfcmVwbyJdfQ.XCEwpBGvOLma4TCoh36FU7XhUbcskygS81HE1uHLf0E";
+  private static final String HEADER = "{\"alg\":\"RS256\", \"type\":\"JWT\"}";
+  private static final String CLAIMS = "{\"iss\": \"gateway\", \"prn\": \"john.doe@example.com\", \"aud\": \"https://login.example.com\", \"exp\": \"1363360913\"}";
+
+  private RSAPublicKey publicKey;
+  private RSAPrivateKey privateKey;
+
+  public JWTTokenTest() throws Exception, NoSuchAlgorithmException {
+    KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
+    kpg.initialize(2048);
+
+    KeyPair kp = kpg.genKeyPair();
+    publicKey = (RSAPublicKey) kp.getPublic();
+    privateKey = (RSAPrivateKey) kp.getPrivate();
+  }
+
+  public void testTokenParsing() throws Exception {
+    JWTToken token = JWTToken.parseToken(JWT_TOKEN);
+    assertEquals(token.getHeader(), HEADER);
+    assertEquals(token.getClaims(), CLAIMS);
+
+    assertEquals(token.getIssuer(), "gateway");
+    assertEquals(token.getPrincipal(), "john.doe@example.com");
+    assertEquals(token.getAudience(), "https://login.example.com");
+    assertEquals(token.getExpires(), "1363360913");
+  }
+
+  @Test
+  public void testTokenCreation() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = "https://login.example.com";
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    JWTToken token = new JWTToken("RS256", claims);
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+  }
+
+  @Test
+  public void testTokenCreationWithAudienceListSingle() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = null;
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    ArrayList<String> audiences = new ArrayList<String>();
+    audiences.add("https://login.example.com");
+
+    JWTToken token = new JWTToken("RS256", claims, audiences);
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+    assertEquals(1, token.getAudienceClaims().length);
+  }
+
+  @Test
+  public void testTokenCreationWithAudienceListMultiple() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = null;
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    ArrayList<String> audiences = new ArrayList<String>();
+    audiences.add("https://login.example.com");
+    audiences.add("KNOXSSO");
+
+    JWTToken token = new JWTToken("RS256", claims, audiences);
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+    assertEquals(2, token.getAudienceClaims().length);
+  }
+
+  @Test
+  public void testTokenCreationWithAudienceListCombined() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = "LJM";
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    ArrayList<String> audiences = new ArrayList<String>();
+    audiences.add("https://login.example.com");
+    audiences.add("KNOXSSO");
+
+    JWTToken token = new JWTToken("RS256", claims, audiences);
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+    assertEquals(3, token.getAudienceClaims().length);
+  }
+
+  @Test
+  public void testTokenCreationWithNullAudienceList() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = null;
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    ArrayList<String> audiences = null;
+
+    JWTToken token = new JWTToken("RS256", claims, audiences);
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals(null, token.getAudience());
+    assertArrayEquals(null, token.getAudienceClaims());
+  }
+
+  @Test
+  public void testTokenCreationRS512() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = "https://login.example.com";
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    JWTToken token = new JWTToken(JWSAlgorithm.RS512.getName(), claims);
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+    assertTrue(token.getHeader().contains(JWSAlgorithm.RS512.getName()));
+  }
+
+  @Test
+  public void testTokenSignature() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = "https://login.example.com";
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    JWTToken token = new JWTToken("RS256", claims);
+
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+
+    // Sign the token
+    JWSSigner signer = new RSASSASigner(privateKey);
+    token.sign(signer);
+    assertTrue(token.getSignaturePayload().length > 0);
+
+    // Verify the signature
+    JWSVerifier verifier = new RSASSAVerifier((RSAPublicKey) publicKey);
+    assertTrue(token.verify(verifier));
+  }
+
+  @Test
+  public void testTokenSignatureRS512() throws Exception {
+    String[] claims = new String[4];
+    claims[0] = "KNOXSSO";
+    claims[1] = "john.doe@example.com";
+    claims[2] = "https://login.example.com";
+    claims[3] = Long.toString( ( System.currentTimeMillis()/1000 ) + 300);
+    JWTToken token = new JWTToken(JWSAlgorithm.RS512.getName(), claims);
+
+    assertEquals("KNOXSSO", token.getIssuer());
+    assertEquals("john.doe@example.com", token.getSubject());
+    assertEquals("https://login.example.com", token.getAudience());
+    assertTrue(token.getHeader().contains(JWSAlgorithm.RS512.getName()));
+
+    // Sign the token
+    JWSSigner signer = new RSASSASigner(privateKey);
+    token.sign(signer);
+    assertTrue(token.getSignaturePayload().length > 0);
+
+    // Verify the signature
+    JWSVerifier verifier = new RSASSAVerifier((RSAPublicKey) publicKey);
+    assertTrue(token.verify(verifier));
+  }
+
+}


[02/11] knox git commit: KNOX-1004 Failing (flaky) Knox unit tests

Posted by mo...@apache.org.
KNOX-1004 Failing (flaky) Knox unit tests

Signed-off-by: Colm O hEigeartaigh <co...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/5c95ab4e
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/5c95ab4e
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/5c95ab4e

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 5c95ab4e31ee547807e83e4b2d972b4d33da9eb2
Parents: 773ac9d
Author: Denes Bodo <bo...@gmail.com>
Authored: Mon Sep 11 15:15:02 2017 +0200
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Mon Sep 11 14:30:14 2017 +0100

----------------------------------------------------------------------
 .../org/apache/hadoop/gateway/AmbariServiceDefinitionTest.java   | 4 +++-
 .../test/java/org/apache/hadoop/gateway/GatewayAppFuncTest.java  | 4 +++-
 .../test/java/org/apache/hadoop/gateway/GatewaySslFuncTest.java  | 4 +++-
 3 files changed, 9 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/5c95ab4e/gateway-test/src/test/java/org/apache/hadoop/gateway/AmbariServiceDefinitionTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/AmbariServiceDefinitionTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/AmbariServiceDefinitionTest.java
index a55a519..e1b6d2b 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/AmbariServiceDefinitionTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/AmbariServiceDefinitionTest.java
@@ -94,7 +94,9 @@ public class AmbariServiceDefinitionTest {
   @After
   public void cleanupTest() throws Exception {
     FileUtils.cleanDirectory( new File( config.getGatewayTopologyDir() ) );
-    FileUtils.cleanDirectory( new File( config.getGatewayDeploymentDir() ) );
+    // Test run should not fail if deleting deployment files is not successful.
+    // Deletion has been already done by TopologyService.
+    FileUtils.deleteQuietly( new File( config.getGatewayDeploymentDir() ) );
   }
 
   public static void setupGateway() throws Exception {

http://git-wip-us.apache.org/repos/asf/knox/blob/5c95ab4e/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAppFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAppFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAppFuncTest.java
index a78506d..cd7a9fe 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAppFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAppFuncTest.java
@@ -99,7 +99,9 @@ public class GatewayAppFuncTest {
   @After
   public void cleanupTest() throws Exception {
     FileUtils.cleanDirectory( new File( config.getGatewayTopologyDir() ) );
-    FileUtils.cleanDirectory( new File( config.getGatewayDeploymentDir() ) );
+    // Test run should not fail if deleting deployment files is not successful.
+    // Deletion has been already done by TopologyService.
+    FileUtils.deleteQuietly( new File( config.getGatewayDeploymentDir() ) );
   }
 
   public static void setupGateway() throws Exception {

http://git-wip-us.apache.org/repos/asf/knox/blob/5c95ab4e/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewaySslFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewaySslFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewaySslFuncTest.java
index 92c5d06..86a411d 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewaySslFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewaySslFuncTest.java
@@ -119,7 +119,9 @@ public class GatewaySslFuncTest {
   @After
   public void cleanupTest() throws Exception {
     FileUtils.cleanDirectory( new File( config.getGatewayTopologyDir() ) );
-    FileUtils.cleanDirectory( new File( config.getGatewayDeploymentDir() ) );
+    // Test run should not fail if deleting deployment files is not successful.
+    // Deletion has been already done by TopologyService.
+    FileUtils.deleteQuietly( new File( config.getGatewayDeploymentDir() ) );
   }
 
   public static void setupGateway() throws Exception {


[06/11] knox git commit: KNOX-998 - Merging from master

Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewayDeployFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayDeployFuncTest.java
index 1f6496b,0000000..e9442cd
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayDeployFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayDeployFuncTest.java
@@@ -1,315 -1,0 +1,315 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
- import com.jayway.restassured.response.Response;
++import io.restassured.response.Response;
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +import org.apache.commons.io.FileUtils;
 +import org.apache.directory.server.protocol.shared.transport.TcpTransport;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.hadoop.test.category.ReleaseTest;
 +import org.apache.http.HttpStatus;
 +import org.apache.log4j.Appender;
 +import org.hamcrest.MatcherAssert;
 +import org.junit.After;
 +import org.junit.AfterClass;
 +import org.junit.Before;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.junit.experimental.categories.Category;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +import java.io.File;
 +import java.io.FileOutputStream;
 +import java.io.FilenameFilter;
 +import java.io.IOException;
 +import java.util.Enumeration;
 +import java.util.HashMap;
 +import java.util.Map;
 +import java.util.UUID;
 +import java.util.regex.Pattern;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.containsString;
 +import static org.hamcrest.CoreMatchers.is;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +import static org.hamcrest.Matchers.greaterThan;
 +import static org.junit.Assert.assertThat;
 +
 +@Category(ReleaseTest.class)
 +public class GatewayDeployFuncTest {
 +
 +  private static Logger LOG = LoggerFactory.getLogger( GatewayDeployFuncTest.class );
 +
 +  public static Enumeration<Appender> appenders;
 +  public static GatewayConfig config;
 +  public static GatewayServer gateway;
 +  public static File gatewayHome;
 +  public static String gatewayUrl;
 +  public static String clusterUrl;
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    LOG_ENTER();
 +    //appenders = NoOpAppender.setUp();
 +    driver.setupLdap(0);
 +    LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    LOG_ENTER();
 +    driver.cleanup();
 +    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
 +    //NoOpAppender.tearDown( appenders );
 +    LOG_EXIT();
 +  }
 +
 +  @Before
 +  public void setupGateway() throws Exception {
 +
 +    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
 +    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
 +    gatewayDir.mkdirs();
 +    gatewayHome = gatewayDir;
 +
 +    GatewayTestConfig testConfig = new GatewayTestConfig();
 +    config = testConfig;
 +    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
 +
 +    File topoDir = new File( testConfig.getGatewayTopologyDir() );
 +    topoDir.mkdirs();
 +
 +    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
 +    deployDir.mkdirs();
 +
 +    DefaultGatewayServices srvcs = new DefaultGatewayServices();
 +    Map<String,String> options = new HashMap<>();
 +    options.put( "persist-master", "false" );
 +    options.put( "master", "password" );
 +    try {
 +      srvcs.init( testConfig, options );
 +    } catch ( ServiceLifecycleException e ) {
 +      e.printStackTrace(); // I18N not required.
 +    }
 +    gateway = GatewayServer.startGateway( testConfig, srvcs );
 +    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
 +
 +    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
 +
 +    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
 +    clusterUrl = gatewayUrl + "/test-cluster";
 +  }
 +
 +  @After
 +  public void cleanupGateway() throws Exception {
 +    gateway.stop();
 +    FileUtils.deleteQuietly( gatewayHome );
 +  }
 +
 +  private static XMLTag createTopology() {
 +    XMLTag xml = XMLDoc.newDocument( true )
 +        .addRoot( "topology" )
 +        .addTag( "gateway" )
 +
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "authentication" )
 +        .addTag( "name" ).addText( "ShiroProvider" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
 +        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
 +        .addTag( "value" ).addText( driver.getLdapUrl() ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "urls./**" )
 +        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "identity-assertion" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "name" ).addText( "Default" ).gotoParent()
 +        .addTag( "provider" )
 +        .gotoRoot()
 +        .addTag( "service" )
 +        .addTag( "role" ).addText( "test-service-role" )
 +        .gotoRoot();
 +    return xml;
 +  }
 +
 +  //@Test
 +  public void waitForManualTesting() throws IOException {
 +    System.in.read();
 +  }
 +
 +  @Test( timeout = TestUtils.LONG_TIMEOUT )
 +  public void testDeployRedeployUndeploy() throws InterruptedException, IOException {
 +    LOG_ENTER();
 +    long sleep = 200;
 +    int numFilesInWebInf = 4; // # files in WEB-INF (ie gateway.xml, rewrite.xml, shiro.ini, web.xml)
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = clusterUrl + "/test-service-path/test-service-resource";
 +    long topoTimestampBefore, topoTimestampAfter;
 +
 +    File topoDir = new File( config.getGatewayTopologyDir() );
 +    File deployDir = new File( config.getGatewayDeploymentDir() );
 +    File earDir;
 +
 +    // Make sure deployment directory is empty.
 +    assertThat( topoDir.listFiles().length, is( 0 ) );
 +    assertThat( deployDir.listFiles().length, is( 0 ) );
 +
 +    File descriptor = writeTestTopology( "test-cluster", createTopology() );
 +    long writeTime = System.currentTimeMillis();
 +
 +    earDir = waitForFiles( deployDir, "test-cluster\\.topo\\.[0-9A-Fa-f]+", 1, 0, sleep );
 +    File warDir = new File( earDir, "%2F" );
 +    File webInfDir = new File( warDir, "WEB-INF" );
 +    waitForFiles( webInfDir, ".*", numFilesInWebInf, 0, sleep );
 +    waitForAccess( serviceUrl, username, password, sleep );
 +
 +    // Wait to make sure a second has passed to ensure the the file timestamps are different.
 +    waitForElapsed( writeTime, 1000, 100 );
 +
 +    // Redeploy and make sure the timestamp is updated.
 +    topoTimestampBefore = descriptor.lastModified();
 +    GatewayServer.redeployTopologies( null );
 +    writeTime = System.currentTimeMillis();
 +    topoTimestampAfter = descriptor.lastModified();
 +    assertThat( topoTimestampAfter, greaterThan( topoTimestampBefore ) );
 +
 +    // Check to make sure there are two war directories with the same root.
 +    earDir = waitForFiles( deployDir, "test-cluster\\.topo\\.[0-9A-Fa-f]+", 2, 1, sleep );
 +    warDir = new File( earDir, "%2F" );
 +    webInfDir = new File( warDir, "WEB-INF" );
 +    waitForFiles( webInfDir, ".*", numFilesInWebInf, 0, sleep );
 +    waitForAccess( serviceUrl, username, password, sleep );
 +
 +    // Wait to make sure a second has passed to ensure the the file timestamps are different.
 +    waitForElapsed( writeTime, 1000, 100 );
 +
 +    // Redeploy and make sure the timestamp is updated.
 +    topoTimestampBefore = descriptor.lastModified();
 +    GatewayServer.redeployTopologies( "test-cluster" );
 +    writeTime = System.currentTimeMillis();
 +    topoTimestampAfter = descriptor.lastModified();
 +    assertThat( topoTimestampAfter, greaterThan( topoTimestampBefore ) );
 +
 +    // Check to make sure there are two war directories with the same root.
 +    earDir = waitForFiles( deployDir, "test-cluster\\.topo\\.[0-9A-Fa-f]+", 3, 2, sleep );
 +    warDir = new File( earDir, "%2F" );
 +    webInfDir = new File( warDir, "WEB-INF" );
 +    waitForFiles( webInfDir, ".*", numFilesInWebInf, 0, sleep );
 +    waitForAccess( serviceUrl, username, password, sleep );
 +
 +    // Delete the test topology.
 +    assertThat( "Failed to delete the topology file.", descriptor.delete(), is( true ) );
 +
 +    // Wait to make sure a second has passed to ensure the the file timestamps are different.
 +    waitForElapsed( writeTime, 1000, 100 );
 +
 +    waitForFiles( deployDir, ".*", 0, -1, sleep );
 +
 +    // Wait a bit more to make sure undeployment finished.
 +    Thread.sleep( sleep );
 +
 +    // Make sure the test topology is not accessible.
 +    given().auth().preemptive().basic( username, password )
 +        .expect().statusCode( HttpStatus.SC_NOT_FOUND )
 +        .when().get( serviceUrl );
 +
 +    // Make sure deployment directory is empty.
 +    assertThat( topoDir.listFiles().length, is( 0 ) );
 +    assertThat( deployDir.listFiles().length, is( 0 ) );
 +    LOG_EXIT();
 +  }
 +
 +  private void waitForElapsed( long from, long total, long sleep ) throws InterruptedException {
 +    while( System.currentTimeMillis() - from < total ) {
 +      Thread.sleep( sleep );
 +    }
 +  }
 +
 +  private File writeTestTopology( String name, XMLTag xml ) throws IOException {
 +    // Create the test topology.
 +    File tempFile = new File( config.getGatewayTopologyDir(), name + ".xml." + UUID.randomUUID() );
 +    FileOutputStream stream = new FileOutputStream( tempFile );
 +    xml.toStream( stream );
 +    stream.close();
 +    File descriptor = new File( config.getGatewayTopologyDir(), name + ".xml" );
 +    tempFile.renameTo( descriptor );
 +    return descriptor;
 +  }
 +
 +  private File waitForFiles( File dir, String pattern, int count, int index, long sleep ) throws InterruptedException {
 +    RegexDirFilter filter = new RegexDirFilter( pattern );
 +    while( true ) {
 +      File[] files = dir.listFiles( filter );
 +      if( files.length == count ) {
 +        return ( index < 0 ) ? null : files[ index ];
 +      }
 +      Thread.sleep( sleep );
 +    }
 +  }
 +
 +  private void waitForAccess( String url, String username, String password, long sleep ) throws InterruptedException {
 +    while( true ) {
 +      Response response = given()
 +          .auth().preemptive().basic( username, password )
 +          .when().get( url ).andReturn();
 +      if( response.getStatusCode() == HttpStatus.SC_NOT_FOUND ) {
 +        Thread.sleep( sleep );
 +        continue;
 +      }
 +      assertThat( response.getContentType(), containsString( "text/plain" ) );
 +      assertThat( response.getBody().asString(), is( "test-service-response" ) );
 +      break;
 +    }
 +  }
 +
 +  private class RegexDirFilter implements FilenameFilter {
 +
 +    Pattern pattern;
 +
 +    RegexDirFilter( String regex ) {
 +      pattern = Pattern.compile( regex );
 +    }
 +
 +    @Override
 +    public boolean accept( File dir, String name ) {
 +      return pattern.matcher( name ).matches();
 +    }
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewayHealthFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayHealthFuncTest.java
index 607ef9a,0000000..a1d00b2
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayHealthFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayHealthFuncTest.java
@@@ -1,205 -1,0 +1,205 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
- import com.jayway.restassured.path.json.JsonPath;
++import io.restassured.path.json.JsonPath;
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +import org.apache.directory.server.protocol.shared.transport.TcpTransport;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.http.HttpStatus;
 +import org.hamcrest.MatcherAssert;
 +import org.junit.AfterClass;
 +import org.junit.Assert;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +import javax.ws.rs.core.MediaType;
 +import java.io.File;
 +import java.io.FileOutputStream;
 +import java.nio.file.FileSystems;
 +import java.nio.file.Path;
 +import java.util.Arrays;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.Map;
 +import java.util.UUID;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +
 +public class GatewayHealthFuncTest {
 +
 +  private static Logger LOG = LoggerFactory.getLogger(GatewayAdminFuncTest.class);
 +
 +  public static GatewayConfig config;
 +  public static GatewayServer gateway;
 +  public static String gatewayUrl;
 +  public static String clusterUrl;
 +  public static SimpleLdapDirectoryServer ldap;
 +  public static TcpTransport ldapTransport;
 +
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    TestUtils.LOG_ENTER();
 +    setupLdap();
 +    setupGateway();
 +    TestUtils.LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    TestUtils.LOG_ENTER();
 +    gateway.stop();
 +    ldap.stop(true);
 +    TestUtils.LOG_EXIT();
 +  }
 +
 +  public static void setupLdap() throws Exception {
 +    String basedir = System.getProperty("basedir");
 +    if (basedir == null) {
 +      basedir = new File(".").getCanonicalPath();
 +    }
 +
 +    final Path path = FileSystems
 +        .getDefault().getPath(basedir, "/src/test/resources/users.ldif");
 +
 +    ldapTransport = new TcpTransport(0);
 +    ldap = new SimpleLdapDirectoryServer("dc=hadoop,dc=apache,dc=org", path.toFile(), ldapTransport);
 +    ldap.start();
 +    LOG.info("LDAP port = " + ldapTransport.getPort());
 +  }
 +
 +  public static void setupGateway() throws Exception {
 +
 +    File targetDir = new File(System.getProperty("user.dir"), "target");
 +    File gatewayDir = new File(targetDir, "gateway-home-" + UUID.randomUUID());
 +    gatewayDir.mkdirs();
 +
 +    GatewayTestConfig testConfig = new GatewayTestConfig();
 +    config = testConfig;
 +    testConfig.setGatewayHomeDir(gatewayDir.getAbsolutePath());
 +
 +    File topoDir = new File(testConfig.getGatewayTopologyDir());
 +    topoDir.mkdirs();
 +
 +    File deployDir = new File(testConfig.getGatewayDeploymentDir());
 +    deployDir.mkdirs();
 +
 +    File descriptor = new File(topoDir, "test-cluster.xml");
 +    FileOutputStream stream = new FileOutputStream(descriptor);
 +    createTopology().toStream(stream);
 +    stream.close();
 +
 +    DefaultGatewayServices srvcs = new DefaultGatewayServices();
 +    Map<String, String> options = new HashMap<String, String>();
 +    options.put("persist-master", "false");
 +    options.put("master", "password");
 +    try {
 +      srvcs.init(testConfig, options);
 +    } catch (ServiceLifecycleException e) {
 +      e.printStackTrace(); // I18N not required.
 +    }
 +    gateway = GatewayServer.startGateway(testConfig, srvcs);
 +    MatcherAssert.assertThat("Failed to start gateway.", gateway, notNullValue());
 +
 +    LOG.info("Gateway port = " + gateway.getAddresses()[0].getPort());
 +
 +    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
 +    clusterUrl = gatewayUrl + "/test-cluster";
 +  }
 +
 +  private static XMLTag createTopology() {
 +    XMLTag xml = XMLDoc.newDocument(true)
 +        .addRoot("topology")
 +        .addTag("gateway")
 +        .addTag("provider")
 +        .addTag("role").addText("authentication")
 +        .addTag("name").addText("ShiroProvider")
 +        .addTag("enabled").addText("true")
 +        .addTag("param")
 +        .addTag("name").addText("main.ldapRealm")
 +        .addTag("value").addText("org.apache.knox.gateway.shirorealm.KnoxLdapRealm").gotoParent()
 +        .addTag("param")
 +        .addTag("name").addText("main.ldapRealm.userDnTemplate")
 +        .addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
 +        .addTag("param")
 +        .addTag("name").addText("main.ldapRealm.contextFactory.url")
 +        .addTag("value").addText("ldap://localhost:" + ldapTransport.getAcceptor().getLocalAddress().getPort()).gotoParent()
 +        .addTag("param")
 +        .addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
 +        .addTag("value").addText("simple").gotoParent()
 +        .addTag("param")
 +        .addTag("name").addText("urls./**")
 +        .addTag("value").addText("authcBasic").gotoParent().gotoParent()
 +        .addTag("provider")
 +        .addTag("role").addText("identity-assertion")
 +        .addTag("enabled").addText("true")
 +        .addTag("name").addText("Default").gotoParent()
 +        .addTag("provider")
 +        .gotoRoot()
 +        .addTag("service")
 +        .addTag("role").addText("HEALTH")
 +        .gotoRoot();
 +    return xml;
 +  }
 +
 +  @Test(timeout = TestUtils.MEDIUM_TIMEOUT)
 +  public void testPingResource() {
 +    TestUtils.LOG_ENTER();
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = clusterUrl + "/v1/ping";
 +    String body = given()
 +        .auth().preemptive().basic(username, password)
 +        .header("Accept", MediaType.TEXT_PLAIN)
 +        .expect()
 +        .statusCode(HttpStatus.SC_OK)
 +        .contentType(MediaType.TEXT_PLAIN)
 +        .when().get(serviceUrl).asString();
 +    Assert.assertEquals("OK", body.trim());
 +    TestUtils.LOG_EXIT();
 +  }
 +
 +  @Test(timeout = TestUtils.MEDIUM_TIMEOUT)
 +  public void testMetricsResource() {
 +    TestUtils.LOG_ENTER();
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = clusterUrl + "/v1/metrics";
 +    String body = given()
 +        .auth().preemptive().basic(username, password)
 +        .expect()
 +        .statusCode(HttpStatus.SC_OK)
 +        .contentType(MediaType.APPLICATION_JSON)
 +        .when().get(serviceUrl).asString();
 +    String version = JsonPath.from(body).getString("version");
 +    Map<String, String> hm = JsonPath.from(body).getMap("");
 +    Assert.assertTrue(hm.size() >= 6);
 +    Assert.assertTrue(hm.keySet().containsAll(new HashSet<String>(Arrays.asList(new String[]{"timers", "histograms",
 +        "counters", "gauges", "version", "meters"}))));
 +    TestUtils.LOG_EXIT();
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapDynamicGroupFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapDynamicGroupFuncTest.java
index 6886b74,0000000..b66716d
mode 100755,000000..100755
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapDynamicGroupFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapDynamicGroupFuncTest.java
@@@ -1,287 -1,0 +1,287 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.is;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +
 +import java.io.File;
 +import java.io.FileOutputStream;
 +import java.io.IOException;
 +import java.net.URL;
 +import java.nio.file.FileSystems;
 +import java.nio.file.Path;
 +import java.util.Enumeration;
 +import java.util.HashMap;
 +import java.util.Map;
 +import java.util.UUID;
 +
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.security.AliasService;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.http.HttpStatus;
 +import org.apache.log4j.Appender;
 +import org.hamcrest.MatcherAssert;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +
 +/**
 + * Functional test to verify : looking up ldap groups from directory
 + * and using them in acl authorization checks
 + *
 + */
 +public class GatewayLdapDynamicGroupFuncTest {
 +
 +  private static Logger LOG = LoggerFactory.getLogger( GatewayLdapDynamicGroupFuncTest.class );
 +
 +  public static Enumeration<Appender> appenders;
 +  public static GatewayConfig config;
 +  public static GatewayServer gateway;
 +  public static String gatewayUrl;
 +  public static String clusterUrl;
 +  public static String serviceUrl;
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    LOG_ENTER();
 +    //appenders = NoOpAppender.setUp();
 +    String basedir = System.getProperty("basedir");
 +    if (basedir == null) {
 +      basedir = new File(".").getCanonicalPath();
 +    }
 +    Path path = FileSystems.getDefault().getPath(basedir, "/src/test/resources/users-dynamic.ldif");
 +    driver.setupLdap( 0, path.toFile() );
 +    setupGateway();
 +    TestUtils.awaitNon404HttpStatus( new URL( serviceUrl ), 10000, 100 );
 +    LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    LOG_ENTER();
 +    gateway.stop();
 +    driver.cleanup();
 +    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
 +    //NoOpAppender.tearDown( appenders );
 +    LOG_EXIT();
 +  }
 +
 +  public static void setupGateway() throws IOException, Exception {
 +
 +    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
 +    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
 +    gatewayDir.mkdirs();
 +
 +    GatewayTestConfig testConfig = new GatewayTestConfig();
 +    config = testConfig;
 +    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
 +
 +    File topoDir = new File( testConfig.getGatewayTopologyDir() );
 +    topoDir.mkdirs();
 +
 +    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
 +    deployDir.mkdirs();
 +
 +    DefaultGatewayServices srvcs = new DefaultGatewayServices();
 +    Map<String,String> options = new HashMap<>();
 +    options.put( "persist-master", "false" );
 +    options.put( "master", "password" );
 +    try {
 +      srvcs.init( testConfig, options );
 +    } catch ( ServiceLifecycleException e ) {
 +      e.printStackTrace(); // I18N not required.
 +    }
 +
 +    /*
 +    System.setProperty(GatewayConfig.GATEWAY_HOME_VAR, gatewayDir.getAbsolutePath());
 +    System.err.println("GH 10: " + System.getProperty(GatewayConfig.GATEWAY_HOME_VAR));
 +    ByteArrayOutputStream outContent = new ByteArrayOutputStream();
 +    System.setOut(new PrintStream(outContent));
 +    String[] argvals = {"create-alias", "ldcSystemPassword", "--value", "guest-password", "--master", "hadoop", "--cluster", "testdg-cluster"};
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf(new GatewayConfigImpl());
 +    cli.run(argvals);
 +
 +    outContent.reset();
 +    String[] args1 = {"list-alias", "--cluster", "testdg-cluster", "--master", "hadoop"};
 +    cli = new KnoxCLI();
 +    cli.run(args1);
 +    System.err.println("ALIAS LIST: " + outContent.toString());
 +
 +    AliasService as1 = cli.getGatewayServices().getService(GatewayServices.ALIAS_SERVICE);
 +    char[] passwordChars1 = as1.getPasswordFromAliasForCluster( "test-cluster", "ldcsystemPassword");
 +    System.err.println("ALIAS value1: " + new String(passwordChars1));
 +    */
 +
 +    gateway = GatewayServer.startGateway( testConfig, srvcs );
 +    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
 +
 +    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
 +
 +    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
 +    clusterUrl = gatewayUrl + "/testdg-cluster";
 +    serviceUrl = clusterUrl + "/test-service-path/test-service-resource";
 +
 +    ///*
 +    GatewayServices services = GatewayServer.getGatewayServices();
 +    AliasService aliasService = (AliasService)services.getService(GatewayServices.ALIAS_SERVICE);
 +    aliasService.addAliasForCluster("testdg-cluster", "ldcSystemPassword", "guest-password");
 +
 +    char[] password1 = aliasService.getPasswordFromAliasForCluster( "testdg-cluster", "ldcSystemPassword");
 +    //System.err.println("SETUP password 10: " + ((password1 == null) ? "NULL" : new String(password1)));
 +
 +    File descriptor = new File( topoDir, "testdg-cluster.xml" );
 +    FileOutputStream stream = new FileOutputStream( descriptor );
 +    createTopology().toStream( stream );
 +    stream.close();
 +
 +  }
 +
 +  private static XMLTag createTopology() {
 +    XMLTag xml = XMLDoc.newDocument( true )
 +        .addRoot( "topology" )
 +        .addTag( "gateway" )
 +
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "authentication" )
 +        .addTag( "name" ).addText( "ShiroProvider" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapGroupContextFactory" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory" )
 +        .addTag( "value" ).addText( "$ldapGroupContextFactory" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
 +        .addTag( "value" ).addText( driver.getLdapUrl())
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
 +        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.authorizationEnabled" )
 +        .addTag( "value" ).addText( "true" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemAuthenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.searchBase" )
 +        .addTag( "value" ).addText( "ou=groups,dc=hadoop,dc=apache,dc=org" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.groupObjectClass" )
 +        .addTag( "value" ).addText( "groupofurls" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.memberAttribute" )
 +        .addTag( "value" ).addText( "memberurl" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.memberAttributeValueTemplate" )
 +        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemUsername" )
 +        .addTag( "value" ).addText( "uid=guest,ou=people,dc=hadoop,dc=apache,dc=org" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.clusterName" )
 +        .addTag( "value" ).addText( "testdg-cluster" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemPassword" )
 +        .addTag( "value" ).addText( "S{ALIAS=ldcSystemPassword}" )
 +        // .addTag( "value" ).addText( "guest-password" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "urls./**" )
 +        .addTag( "value" ).addText( "authcBasic" )
 +
 +        .gotoParent().gotoParent().addTag( "provider" )
 +        .addTag( "role" ).addText( "authorization" )
 +        .addTag( "name" ).addText( "AclsAuthz" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "test-service-role.acl" ) // FIXME[dilli]
 +        .addTag( "value" ).addText( "*;directors;*" )
 +
 +        .gotoParent().gotoParent().addTag( "provider" )
 +        .addTag( "role" ).addText( "identity-assertion" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "name" ).addText( "Default" ).gotoParent()
 +
 +        .gotoRoot()
 +        .addTag( "service" )
 +        .addTag( "role" ).addText( "test-service-role" )
 +        .gotoRoot();
 +         // System.out.println( "GATEWAY=" + xml.toString() );
 +    return xml;
 +  }
 +
 +  // @Test
 +  public void waitForManualTesting() throws IOException {
 +    System.in.read();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testGroupMember() throws ClassNotFoundException, Exception {
 +    LOG_ENTER();
 +    String username = "bob";
 +    String password = "bob-password";
 +    String serviceUrl = clusterUrl + "/test-service-path/test-service-resource";
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .body( is( "test-service-response" ) )
 +        .when().get( serviceUrl );
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testNonGroupMember() throws ClassNotFoundException {
 +    LOG_ENTER();
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = clusterUrl + "/test-service-path/test-service-resource";
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_FORBIDDEN )
 +        .when().get( serviceUrl );
 +    LOG_EXIT();
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapGroupFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapGroupFuncTest.java
index 435399a,0000000..60e3250
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapGroupFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapGroupFuncTest.java
@@@ -1,283 -1,0 +1,283 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.is;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +
 +import java.io.File;
 +import java.io.FileOutputStream;
 +import java.io.IOException;
 +import java.io.InputStream;
 +import java.net.URL;
 +import java.util.Enumeration;
 +import java.util.HashMap;
 +import java.util.Map;
 +import java.util.UUID;
 +
 +
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.security.AliasService;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.http.HttpStatus;
 +import org.apache.log4j.Appender;
 +import org.hamcrest.MatcherAssert;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Ignore;
 +import org.junit.Test;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +
 +/**
 + * Functional test to verify : looking up ldap groups from directory
 + * and using them in acl authorization checks
 + *
 + */
 +public class GatewayLdapGroupFuncTest {
 +
 +  private static Logger LOG = LoggerFactory.getLogger( GatewayLdapGroupFuncTest.class );
 +
 +  public static Enumeration<Appender> appenders;
 +  public static GatewayConfig config;
 +  public static GatewayServer gateway;
 +  public static String gatewayUrl;
 +  public static String clusterUrl;
 +  public static String serviceUrl;
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    LOG_ENTER();
 +    //appenders = NoOpAppender.setUp();
 +    driver.setupLdap(0);
 +    setupGateway();
 +    TestUtils.awaitNon404HttpStatus( new URL( serviceUrl ), 10000, 100 );
 +    LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    LOG_ENTER();
 +    gateway.stop();
 +    driver.cleanup();
 +    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
 +    //NoOpAppender.tearDown( appenders );
 +    LOG_EXIT();
 +  }
 +
 +  public static void setupGateway() throws Exception {
 +
 +    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
 +    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
 +    gatewayDir.mkdirs();
 +
 +    GatewayTestConfig testConfig = new GatewayTestConfig();
 +    config = testConfig;
 +    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
 +
 +    File topoDir = new File( testConfig.getGatewayTopologyDir() );
 +    topoDir.mkdirs();
 +
 +    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
 +    deployDir.mkdirs();
 +
 +    DefaultGatewayServices srvcs = new DefaultGatewayServices();
 +    Map<String,String> options = new HashMap<>();
 +    options.put( "persist-master", "true" );
 +    options.put( "master", "hadoop" );
 +
 +    try {
 +      srvcs.init( testConfig, options );
 +    } catch ( ServiceLifecycleException e ) {
 +      e.printStackTrace(); // I18N not required.
 +    }
 +
 +    /*
 +    System.setProperty(GatewayConfig.GATEWAY_HOME_VAR, gatewayDir.getAbsolutePath());
 +    System.err.println("GH 10: " + System.getProperty(GatewayConfig.GATEWAY_HOME_VAR));
 +    ByteArrayOutputStream outContent = new ByteArrayOutputStream();
 +    System.setOut(new PrintStream(outContent));
 +    String[] argvals = {"create-alias", "ldcSystemPassword", "--value", "guest-password", "--master", "hadoop", "--cluster", "test-cluster"};
 +    KnoxCLI cli = new KnoxCLI();
 +    cli.setConf(new GatewayConfigImpl());
 +    cli.run(argvals);
 +
 +    outContent.reset();
 +    String[] args1 = {"list-alias", "--cluster", "test-cluster", "--master", "hadoop"};
 +    cli = new KnoxCLI();
 +    cli.run(args1);
 +    System.err.println("ALIAS LIST: " + outContent.toString());
 +
 +    AliasService as1 = cli.getGatewayServices().getService(GatewayServices.ALIAS_SERVICE);
 +    char[] passwordChars1 = as1.getPasswordFromAliasForCluster( "test-cluster", "ldcsystemPassword");
 +    System.err.println("ALIAS value1: " + new String(passwordChars1));
 +    */
 +
 +    gateway = GatewayServer.startGateway( testConfig, srvcs );
 +    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
 +
 +    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
 +
 +    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
 +    clusterUrl = gatewayUrl + "/test-cluster";
 +    serviceUrl = clusterUrl + "/test-service-path/test-service-resource";
 +
 +    ///*
 +    GatewayServices services = GatewayServer.getGatewayServices();
 +    AliasService aliasService = (AliasService)services.getService(GatewayServices.ALIAS_SERVICE);
 +    aliasService.addAliasForCluster("test-cluster", "ldcSystemPassword", "guest-password");
 +
-     char[] password1 = aliasService.getPasswordFromAliasForCluster( "test-cluster", "ldcSystemPassword");
++    // char[] password1 = aliasService.getPasswordFromAliasForCluster( "test-cluster", "ldcSystemPassword");
 +    //System.err.println("SETUP password 10: " + ((password1 == null) ? "NULL" : new String(password1)));
 +
 +    File descriptor = new File( topoDir, "test-cluster.xml" );
 +    FileOutputStream stream = new FileOutputStream( descriptor );
 +    createTopology().toStream( stream );
 +    stream.close();
 +  }
 +
 +  private static XMLTag createTopology() {
 +    XMLTag xml = XMLDoc.newDocument( true )
 +        .addRoot( "topology" )
 +        .addTag( "gateway" )
 +
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "authentication" )
 +        .addTag( "name" ).addText( "ShiroProvider" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapGroupContextFactory" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory" )
 +        .addTag( "value" ).addText( "$ldapGroupContextFactory" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
 +        .addTag( "value" ).addText( driver.getLdapUrl())
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
 +        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.authorizationEnabled" )
 +        .addTag( "value" ).addText( "true" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemAuthenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.searchBase" )
 +        .addTag( "value" ).addText( "ou=groups,dc=hadoop,dc=apache,dc=org" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.groupObjectClass" )
 +        .addTag( "value" ).addText( "groupofnames" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.memberAttribute" )
 +        .addTag( "value" ).addText( "member" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.memberAttributeValueTemplate" )
 +        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.clusterName" )
 +        .addTag( "value" ).addText( "test-cluster" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemUsername" )
 +        .addTag( "value" ).addText( "uid=guest,ou=people,dc=hadoop,dc=apache,dc=org" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemPassword" )
 +        .addTag( "value" ).addText( "S{ALIAS=ldcSystemPassword}" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "urls./**" )
 +        .addTag( "value" ).addText( "authcBasic" )
 +
 +        .gotoParent().gotoParent().addTag( "provider" )
 +        .addTag( "role" ).addText( "authorization" )
 +        .addTag( "name" ).addText( "AclsAuthz" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "test-service-role.acl" ) // FIXME[dilli]
 +        .addTag( "value" ).addText( "*;analyst;*" )
 +
 +        .gotoParent().gotoParent().addTag( "provider" )
 +        .addTag( "role" ).addText( "identity-assertion" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "name" ).addText( "Default" ).gotoParent()
 +
 +        .gotoRoot()
 +        .addTag( "service" )
 +        .addTag( "role" ).addText( "test-service-role" )
 +        .gotoRoot();
 +         // System.out.println( "GATEWAY=" + xml.toString() );
 +    return xml;
 +  }
 +
 +  @Ignore
 +  // @Test
 +  public void waitForManualTesting() throws IOException {
 +    System.in.read();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testGroupMember() throws ClassNotFoundException, Exception {
 +    LOG_ENTER();
 +    String username = "sam";
 +    String password = "sam-password";
 +    String serviceUrl = clusterUrl + "/test-service-path/test-service-resource";
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .body( is( "test-service-response" ) )
 +        .when().get( serviceUrl );
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testNonGroupMember() throws ClassNotFoundException {
 +    LOG_ENTER();
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = clusterUrl + "/test-service-path/test-service-resource";
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_FORBIDDEN )
 +        .when().get( serviceUrl );
 +    LOG_EXIT();
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapPosixGroupFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapPosixGroupFuncTest.java
index 4ad76fb,0000000..f1dc3ad
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapPosixGroupFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLdapPosixGroupFuncTest.java
@@@ -1,273 -1,0 +1,273 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +import org.apache.directory.server.protocol.shared.transport.TcpTransport;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.security.AliasService;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.hadoop.test.category.ReleaseTest;
 +import org.apache.http.HttpStatus;
 +import org.apache.log4j.Appender;
 +import org.hamcrest.MatcherAssert;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Ignore;
 +import org.junit.Test;
 +import org.junit.experimental.categories.Category;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +import java.io.File;
 +import java.io.FileOutputStream;
 +import java.io.IOException;
 +import java.io.OutputStream;
 +import java.net.InetSocketAddress;
 +import java.net.URL;
 +import java.util.Enumeration;
 +import java.util.HashMap;
 +import java.util.Map;
 +import java.util.UUID;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.is;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +
 +/**
 + * Functional test to verify : looking up ldap groups from directory
 + * and using them in acl authorization checks
 + *
 + */
 +@Category(ReleaseTest.class)
 +public class GatewayLdapPosixGroupFuncTest {
 +
 +  private static final Class<?> RESOURCE_BASE_CLASS = GatewayLdapPosixGroupFuncTest.class;
 +  private static Logger LOG = LoggerFactory.getLogger( GatewayLdapPosixGroupFuncTest.class );
 +
 +  public static Enumeration<Appender> appenders;
 +  public static GatewayConfig config;
 +  public static GatewayServer gateway;
 +  public static String gatewayUrl;
 +  public static String clusterUrl;
 +  public static String serviceUrl;
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    LOG_ENTER();
 +    //appenders = NoOpAppender.setUp();
 +    URL usersUrl = TestUtils.getResourceUrl( GatewayLdapPosixGroupFuncTest.class, "users.ldif" );
 +    driver.setupLdap( 0, new File( usersUrl.toURI() ) );
 +    setupGateway();
 +    TestUtils.awaitNon404HttpStatus( new URL( serviceUrl ), 10000, 100 );
 +    LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    LOG_ENTER();
 +    gateway.stop();
 +    driver.cleanup();
 +    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
 +    //NoOpAppender.tearDown( appenders );
 +    LOG_EXIT();
 +  }
 +
 +  public static void setupGateway() throws Exception {
 +
 +    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
 +    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
 +    gatewayDir.mkdirs();
 +
 +    GatewayTestConfig testConfig = new GatewayTestConfig();
 +    config = testConfig;
 +    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
 +
 +    File topoDir = new File( testConfig.getGatewayTopologyDir() );
 +    topoDir.mkdirs();
 +
 +    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
 +    deployDir.mkdirs();
 +
 +    DefaultGatewayServices srvcs = new DefaultGatewayServices();
 +    Map<String,String> options = new HashMap<>();
 +    options.put( "persist-master", "true" );
 +    options.put( "master", "hadoop" );
 +
 +    try {
 +      srvcs.init( testConfig, options );
 +    } catch ( ServiceLifecycleException e ) {
 +      e.printStackTrace(); // I18N not required.
 +    }
 +
 +    gateway = GatewayServer.startGateway( testConfig, srvcs );
 +    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
 +
 +    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
 +
 +    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
 +    clusterUrl = gatewayUrl + "/test-cluster";
 +    serviceUrl = clusterUrl + "/test-service-path/test-service-resource";
 +
 +    GatewayServices services = GatewayServer.getGatewayServices();
 +    AliasService aliasService = (AliasService)services.getService(GatewayServices.ALIAS_SERVICE);
 +    aliasService.addAliasForCluster("test-cluster", "ldcSystemPassword", "guest-password");
 +
 +    char[] password1 = aliasService.getPasswordFromAliasForCluster( "test-cluster", "ldcSystemPassword");
 +
 +    File descriptor = new File( topoDir, "test-cluster.xml" );
 +    OutputStream stream = new FileOutputStream( descriptor );
 +    createTopology().toStream( stream );
 +    stream.close();
 +
 +  }
 +
 +  private static XMLTag createTopology() {
 +    XMLTag xml = XMLDoc.newDocument( true )
 +        .addRoot( "topology" )
 +        .addTag( "gateway" )
 +
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "authentication" )
 +        .addTag( "name" ).addText( "ShiroProvider" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapGroupContextFactory" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory" )
 +        .addTag( "value" ).addText( "$ldapGroupContextFactory" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
 +        .addTag( "value" ).addText( driver.getLdapUrl() )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
 +        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.authorizationEnabled" )
 +        .addTag( "value" ).addText( "true" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemAuthenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.searchBase" )
 +        .addTag( "value" ).addText( "ou=groups,dc=hadoop,dc=apache,dc=org" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.groupObjectClass" )
 +        .addTag( "value" ).addText( "posixGroup" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.memberAttribute" )
 +        .addTag( "value" ).addText( "memberUid" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.memberAttributeValueTemplate" )
 +        .addTag( "value" ).addText( "uid={0}" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.clusterName" )
 +        .addTag( "value" ).addText( "test-cluster" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemUsername" )
 +        .addTag( "value" ).addText( "uid=guest,ou=people,dc=hadoop,dc=apache,dc=org" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.systemPassword" )
 +        .addTag( "value" ).addText( "S{ALIAS=ldcSystemPassword}" )
 +        .gotoParent().addTag( "param" )
 +        .addTag( "name" ).addText( "urls./**" )
 +        .addTag( "value" ).addText( "authcBasic" )
 +
 +        .gotoParent().gotoParent().addTag( "provider" )
 +        .addTag( "role" ).addText( "authorization" )
 +        .addTag( "name" ).addText( "AclsAuthz" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "test-service-role.acl" )
 +        .addTag( "value" ).addText( "*;analyst;*" )
 +
 +        .gotoParent().gotoParent().addTag( "provider" )
 +        .addTag( "role" ).addText( "identity-assertion" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "name" ).addText( "Default" ).gotoParent()
 +
 +        .gotoRoot()
 +        .addTag( "service" )
 +        .addTag( "role" ).addText( "test-service-role" )
 +        .gotoRoot();
 +
 +    return xml;
 +  }
 +
 +  private static String getResourceName( String resource ) {
 +    return getResourceBaseName() + resource;
 +  }
 +
 +  private static String getResourceBaseName() {
 +    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
 +  }
 +
 +  @Ignore
 +  // @Test
 +  public void waitForManualTesting() throws IOException {
 +    System.in.read();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testGroupMember() throws ClassNotFoundException, Exception {
 +    LOG_ENTER();
 +    String username = "sam";
 +    String password = "sam-password";
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .body( is( "test-service-response" ) )
 +        .when().get( serviceUrl );
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testNonGroupMember() throws ClassNotFoundException {
 +    LOG_ENTER();
 +    String username = "guest";
 +    String password = "guest-password";
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_FORBIDDEN )
 +        .when().get( serviceUrl );
 +    LOG_EXIT();
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLocalServiceFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayLocalServiceFuncTest.java
index 6fca9cc,0000000..4ade255
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLocalServiceFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayLocalServiceFuncTest.java
@@@ -1,185 -1,0 +1,185 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import com.mycila.xmltool.XMLDoc;
 +import com.mycila.xmltool.XMLTag;
 +import org.apache.commons.io.FileUtils;
 +import org.apache.directory.server.protocol.shared.transport.TcpTransport;
 +import org.apache.knox.gateway.config.GatewayConfig;
 +import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.hadoop.test.log.NoOpAppender;
 +import org.apache.http.HttpStatus;
 +import org.apache.log4j.Appender;
 +import org.hamcrest.MatcherAssert;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
 +import java.io.File;
 +import java.io.FileOutputStream;
 +import java.util.Enumeration;
 +import java.util.HashMap;
 +import java.util.Map;
 +import java.util.UUID;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.is;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +import static org.junit.Assert.assertThat;
 +
 +public class GatewayLocalServiceFuncTest {
 +
 +  private static Logger LOG = LoggerFactory.getLogger( GatewayTestDriver.class );
 +
 +  public static Enumeration<Appender> appenders;
 +  public static GatewayConfig config;
 +  public static GatewayServer gateway;
 +  public static String gatewayUrl;
 +  public static String clusterUrl;
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    LOG_ENTER();
 +    appenders = NoOpAppender.setUp();
 +    driver.setupLdap(0);
 +    setupGateway();
 +    LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    LOG_ENTER();
 +    gateway.stop();
 +    driver.cleanup();
 +    FileUtils.deleteQuietly( new File( config.getGatewayConfDir() ) );
 +    FileUtils.deleteQuietly( new File( config.getGatewayDataDir() ) );
 +    NoOpAppender.tearDown( appenders );
 +    LOG_EXIT();
 +  }
 +
 +  public static void setupGateway() throws Exception {
 +
 +    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
 +    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
 +    gatewayDir.mkdirs();
 +
 +    GatewayTestConfig testConfig = new GatewayTestConfig();
 +    config = testConfig;
 +    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
 +
 +    File topoDir = new File( testConfig.getGatewayTopologyDir() );
 +    topoDir.mkdirs();
 +
 +    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
 +    deployDir.mkdirs();
 +
 +    File descriptor = new File( topoDir, "cluster.xml" );
 +    FileOutputStream stream = new FileOutputStream( descriptor );
 +    createTopology().toStream( stream );
 +    stream.close();
 +
 +    DefaultGatewayServices srvcs = new DefaultGatewayServices();
 +    Map<String,String> options = new HashMap<>();
 +    options.put( "persist-master", "false" );
 +    options.put( "master", "password" );
 +    try {
 +      srvcs.init( testConfig, options );
 +    } catch ( ServiceLifecycleException e ) {
 +      e.printStackTrace(); // I18N not required.
 +    }
 +    gateway = GatewayServer.startGateway( testConfig, srvcs );
 +    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
 +
 +    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
 +
 +    gatewayUrl = "http://localhost:" + gateway.getAddresses()[0].getPort() + "/" + config.getGatewayPath();
 +    clusterUrl = gatewayUrl + "/cluster";
 +  }
 +
 +  private static XMLTag createTopology() {
 +    XMLTag xml = XMLDoc.newDocument( true )
 +        .addRoot( "topology" )
 +        .addTag( "gateway" )
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "authentication" )
 +        .addTag( "name" ).addText( "ShiroProvider" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm" )
 +        .addTag( "value" ).addText( "org.apache.knox.gateway.shirorealm.KnoxLdapRealm" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.userDnTemplate" )
 +        .addTag( "value" ).addText( "uid={0},ou=people,dc=hadoop,dc=apache,dc=org" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.url" )
 +        .addTag( "value" ).addText( driver.getLdapUrl() ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "main.ldapRealm.contextFactory.authenticationMechanism" )
 +        .addTag( "value" ).addText( "simple" ).gotoParent()
 +        .addTag( "param" )
 +        .addTag( "name" ).addText( "urls./**" )
 +        .addTag( "value" ).addText( "authcBasic" ).gotoParent().gotoParent()
 +        .addTag( "provider" )
 +        .addTag( "role" ).addText( "identity-assertion" )
 +        .addTag( "enabled" ).addText( "true" )
 +        .addTag( "name" ).addText( "Default" ).gotoParent()
 +        .addTag( "provider" )
 +        .gotoRoot()
 +        .addTag( "service" )
 +        .addTag( "role" ).addText( "test-jersey-service-role" )
 +        .gotoRoot();
 +    // System.out.println( "GATEWAY=" + xml.toString() );
 +    return xml;
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testJerseyService() throws ClassNotFoundException {
 +    LOG_ENTER();
 +    assertThat( ClassLoader.getSystemClassLoader().loadClass( "org.glassfish.jersey.servlet.ServletContainer" ), notNullValue() );
 +    assertThat( ClassLoader.getSystemClassLoader().loadClass(
 +        "org.apache.knox.gateway.jersey.JerseyDispatchDeploymentContributor"), notNullValue() );
 +    assertThat( ClassLoader.getSystemClassLoader().loadClass(
 +        "org.apache.knox.gateway.jersey.JerseyServiceDeploymentContributorBase"), notNullValue() );
 +    assertThat( ClassLoader.getSystemClassLoader().loadClass(
 +        "org.apache.knox.gateway.TestJerseyService"), notNullValue() );
 +
 +    String username = "guest";
 +    String password = "guest-password";
 +    String serviceUrl = clusterUrl + "/test-jersey-service/test-jersey-resource-path";
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( username, password )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .body( is( "test-jersey-resource-response" ) )
 +        .when().get( serviceUrl );
 +    LOG_EXIT();
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/knox/blob/582cc7e3/gateway-test/src/test/java/org/apache/knox/gateway/GatewayMultiFuncTest.java
----------------------------------------------------------------------
diff --cc gateway-test/src/test/java/org/apache/knox/gateway/GatewayMultiFuncTest.java
index a89ac82,0000000..01fdd84
mode 100644,000000..100644
--- a/gateway-test/src/test/java/org/apache/knox/gateway/GatewayMultiFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/knox/gateway/GatewayMultiFuncTest.java
@@@ -1,443 -1,0 +1,443 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *     http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing, software
 + * distributed under the License is distributed on an "AS IS" BASIS,
 + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 + * See the License for the specific language governing permissions and
 + * limitations under the License.
 + */
 +package org.apache.knox.gateway;
 +
 +import java.io.File;
 +import java.net.URL;
 +import java.nio.charset.Charset;
 +import java.util.Enumeration;
 +import java.util.HashMap;
 +import java.util.Map;
 +import java.util.Properties;
 +import java.util.UUID;
 +
 +import org.apache.commons.io.FileUtils;
 +import org.apache.commons.io.IOUtils;
 +import org.apache.directory.server.protocol.shared.transport.TcpTransport;
 +import org.apache.knox.gateway.security.ldap.SimpleLdapDirectoryServer;
 +import org.apache.knox.gateway.services.DefaultGatewayServices;
 +import org.apache.knox.gateway.services.GatewayServices;
 +import org.apache.knox.gateway.services.ServiceLifecycleException;
 +import org.apache.knox.gateway.services.topology.TopologyService;
 +import org.apache.hadoop.test.TestUtils;
 +import org.apache.hadoop.test.category.ReleaseTest;
 +import org.apache.hadoop.test.mock.MockServer;
 +import org.apache.http.HttpHost;
 +import org.apache.http.HttpStatus;
 +import org.apache.http.auth.AuthScope;
 +import org.apache.http.auth.UsernamePasswordCredentials;
 +import org.apache.http.client.AuthCache;
 +import org.apache.http.client.CredentialsProvider;
 +import org.apache.http.client.methods.CloseableHttpResponse;
 +import org.apache.http.client.methods.HttpPut;
 +import org.apache.http.client.protocol.HttpClientContext;
 +import org.apache.http.impl.auth.BasicScheme;
 +import org.apache.http.impl.client.BasicAuthCache;
 +import org.apache.http.impl.client.BasicCredentialsProvider;
 +import org.apache.http.impl.client.CloseableHttpClient;
 +import org.apache.http.impl.client.HttpClients;
 +import org.apache.log4j.Appender;
 +import org.hamcrest.MatcherAssert;
 +import org.junit.AfterClass;
 +import org.junit.BeforeClass;
 +import org.junit.Test;
 +import org.junit.experimental.categories.Category;
 +import org.slf4j.Logger;
 +import org.slf4j.LoggerFactory;
 +
- import static com.jayway.restassured.RestAssured.given;
++import static io.restassured.RestAssured.given;
 +import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 +import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 +import static org.hamcrest.CoreMatchers.endsWith;
 +import static org.hamcrest.CoreMatchers.equalTo;
 +import static org.hamcrest.CoreMatchers.notNullValue;
 +import static org.hamcrest.core.Is.is;
 +import static org.junit.Assert.assertThat;
 +import static org.xmlmatchers.XmlMatchers.hasXPath;
 +import static org.xmlmatchers.transform.XmlConverters.the;
 +
 +@Category(ReleaseTest.class)
 +public class GatewayMultiFuncTest {
 +
 +  private static Logger LOG = LoggerFactory.getLogger( GatewayMultiFuncTest.class );
 +  private static Class<?> DAT = GatewayMultiFuncTest.class;
 +
 +  private static Enumeration<Appender> appenders;
 +  private static GatewayTestConfig config;
 +  private static DefaultGatewayServices services;
 +  private static GatewayServer gateway;
 +  private static int gatewayPort;
 +  private static String gatewayUrl;
 +  private static TcpTransport ldapTransport;
 +  private static Properties params;
 +  private static TopologyService topos;
 +  private static GatewayTestDriver driver = new GatewayTestDriver();
 +
 +  @BeforeClass
 +  public static void setupSuite() throws Exception {
 +    LOG_ENTER();
 +    //appenders = NoOpAppender.setUp();
 +    driver.setupLdap(0);
 +    setupGateway();
 +    LOG_EXIT();
 +  }
 +
 +  @AfterClass
 +  public static void cleanupSuite() throws Exception {
 +    LOG_ENTER();
 +    gateway.stop();
 +    driver.cleanup();
 +    FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
 +    //NoOpAppender.tearDown( appenders );
 +    LOG_EXIT();
 +  }
 +
 +  public static void setupGateway() throws Exception {
 +
 +    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
 +    File gatewayDir = new File( targetDir, "gateway-home-" + UUID.randomUUID() );
 +    gatewayDir.mkdirs();
 +
 +    config = new GatewayTestConfig();
 +    config.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
 +
 +    URL svcsFileUrl = TestUtils.getResourceUrl( DAT, "services/readme.txt" );
 +    File svcsFile = new File( svcsFileUrl.getFile() );
 +    File svcsDir = svcsFile.getParentFile();
 +    config.setGatewayServicesDir( svcsDir.getAbsolutePath() );
 +
 +    URL appsFileUrl = TestUtils.getResourceUrl( DAT, "applications/readme.txt" );
 +    File appsFile = new File( appsFileUrl.getFile() );
 +    File appsDir = appsFile.getParentFile();
 +    config.setGatewayApplicationsDir( appsDir.getAbsolutePath() );
 +
 +    File topoDir = new File( config.getGatewayTopologyDir() );
 +    topoDir.mkdirs();
 +
 +    File deployDir = new File( config.getGatewayDeploymentDir() );
 +    deployDir.mkdirs();
 +
 +    startGatewayServer();
 +  }
 +
 +  public static void startGatewayServer() throws Exception {
 +    services = new DefaultGatewayServices();
 +    Map<String,String> options = new HashMap<>();
 +    options.put( "persist-master", "false" );
 +    options.put( "master", "password" );
 +    try {
 +      services.init( config, options );
 +    } catch ( ServiceLifecycleException e ) {
 +      e.printStackTrace(); // I18N not required.
 +    }
 +    topos = services.getService(GatewayServices.TOPOLOGY_SERVICE);
 +
 +    gateway = GatewayServer.startGateway( config, services );
 +    MatcherAssert.assertThat( "Failed to start gateway.", gateway, notNullValue() );
 +
 +    gatewayPort = gateway.getAddresses()[0].getPort();
 +    gatewayUrl = "http://localhost:" + gatewayPort + "/" + config.getGatewayPath();
 +
 +    LOG.info( "Gateway port = " + gateway.getAddresses()[ 0 ].getPort() );
 +
 +    params = new Properties();
 +    params.put( "LDAP_URL", driver.getLdapUrl() );
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testDefaultJsonMimeTypeHandlingKnox678() throws Exception {
 +    LOG_ENTER();
 +
 +    MockServer mock = new MockServer( "REPEAT", true );
 +
 +    params = new Properties();
 +    params.put( "LDAP_URL", driver.getLdapUrl() );
 +    params.put( "MOCK_SERVER_PORT", mock.getPort() );
 +
 +    String topoStr = TestUtils.merge( DAT, "topologies/test-knox678-utf8-chars-topology.xml", params );
 +    File topoFile = new File( config.getGatewayTopologyDir(), "knox678.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +
 +    topos.reloadTopologies();
 +
 +    String uname = "guest";
 +    String pword = uname + "-password";
 +
 +    mock.expect().method( "GET" )
 +        .respond().contentType( "application/json" ).contentLength( -1 ).content( "{\"msg\":\"H\u00eallo\"}", Charset.forName( "UTF-8" ) );
 +    String json = given()
 +        //.log().all()
 +        .auth().preemptive().basic( uname, pword )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "application/json; charset=UTF-8" )
-         .when().log().ifError().get( gatewayUrl + "/knox678/repeat" ).andReturn().asString();
++        .when().get( gatewayUrl + "/knox678/repeat" ).andReturn().asString();
 +    assertThat( json, is("{\"msg\":\"H\u00eallo\"}") );
 +    assertThat( mock.isEmpty(), is(true) );
 +
 +    mock.expect().method( "GET" )
 +        .respond().contentType( "application/octet-stream" ).contentLength( -1 ).content( "H\u00eallo".getBytes() );
 +    byte[] bytes = given()
 +        //.log().all()
 +        .auth().preemptive().basic( uname, pword )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "application/octet-stream" )
-         .when().log().ifError().get( gatewayUrl + "/knox678/repeat" ).andReturn().asByteArray();
++        .when().get( gatewayUrl + "/knox678/repeat" ).andReturn().asByteArray();
 +    assertThat( bytes, is(equalTo("H\u00eallo".getBytes())) );
 +    assertThat( mock.isEmpty(), is(true) );
 +
 +    mock.stop();
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testPostWithContentTypeKnox681() throws Exception {
 +    LOG_ENTER();
 +
 +    MockServer mock = new MockServer( "REPEAT", true );
 +
 +    params = new Properties();
 +    params.put( "MOCK_SERVER_PORT", mock.getPort() );
 +    params.put( "LDAP_URL", driver.getLdapUrl() );
 +
 +    String topoStr = TestUtils.merge( DAT, "topologies/test-knox678-utf8-chars-topology.xml", params );
 +    File topoFile = new File( config.getGatewayTopologyDir(), "knox681.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +
 +    topos.reloadTopologies();
 +
 +    mock
 +        .expect()
 +        .method( "PUT" )
 +        .pathInfo( "/repeat-context/" )
 +        .respond()
 +        .status( HttpStatus.SC_CREATED )
 +        .content( "{\"name\":\"value\"}".getBytes() )
 +        .contentLength( -1 )
 +        .contentType( "application/json; charset=UTF-8" )
 +        .header( "Location", gatewayUrl + "/knox681/repeat" );
 +
 +    String uname = "guest";
 +    String pword = uname + "-password";
 +
 +    HttpHost targetHost = new HttpHost( "localhost", gatewayPort, "http" );
 +    CredentialsProvider credsProvider = new BasicCredentialsProvider();
 +    credsProvider.setCredentials(
 +        new AuthScope( targetHost.getHostName(), targetHost.getPort() ),
 +        new UsernamePasswordCredentials( uname, pword ) );
 +
 +    AuthCache authCache = new BasicAuthCache();
 +    BasicScheme basicAuth = new BasicScheme();
 +    authCache.put( targetHost, basicAuth );
 +
 +    HttpClientContext context = HttpClientContext.create();
 +    context.setCredentialsProvider( credsProvider );
 +    context.setAuthCache( authCache );
 +
 +    CloseableHttpClient client = HttpClients.createDefault();
 +    HttpPut request = new HttpPut( gatewayUrl + "/knox681/repeat" );
 +    request.addHeader( "X-XSRF-Header", "jksdhfkhdsf" );
 +    request.addHeader( "Content-Type", "application/json" );
 +    CloseableHttpResponse response = client.execute( request, context );
 +    assertThat( response.getStatusLine().getStatusCode(), is( HttpStatus.SC_CREATED ) );
 +    assertThat( response.getFirstHeader( "Location" ).getValue(), endsWith("/gateway/knox681/repeat" ) );
 +    assertThat( response.getFirstHeader( "Content-Type" ).getValue(), is("application/json; charset=UTF-8") );
 +    String body = new String( IOUtils.toByteArray( response.getEntity().getContent() ), Charset.forName( "UTF-8" ) );
 +    assertThat( body, is( "{\"name\":\"value\"}" ) );
 +    response.close();
 +    client.close();
 +
 +    mock
 +        .expect()
 +        .method( "PUT" )
 +        .pathInfo( "/repeat-context/" )
 +        .respond()
 +        .status( HttpStatus.SC_CREATED )
 +        .content( "<test-xml/>".getBytes() )
 +        .contentType( "application/xml; charset=UTF-8" )
 +        .header( "Location", gatewayUrl + "/knox681/repeat" );
 +
 +    client = HttpClients.createDefault();
 +    request = new HttpPut( gatewayUrl + "/knox681/repeat" );
 +    request.addHeader( "X-XSRF-Header", "jksdhfkhdsf" );
 +    request.addHeader( "Content-Type", "application/xml" );
 +    response = client.execute( request, context );
 +    assertThat( response.getStatusLine().getStatusCode(), is( HttpStatus.SC_CREATED ) );
 +    assertThat( response.getFirstHeader( "Location" ).getValue(), endsWith("/gateway/knox681/repeat" ) );
 +    assertThat( response.getFirstHeader( "Content-Type" ).getValue(), is("application/xml; charset=UTF-8") );
 +    body = new String( IOUtils.toByteArray( response.getEntity().getContent() ), Charset.forName( "UTF-8" ) );
 +    assertThat( the(body), hasXPath( "/test-xml" ) );
 +    response.close();
 +    client.close();
 +
 +    mock.stop();
 +
 +    LOG_EXIT();
 +  }
 +
 +  @Test( timeout = TestUtils.MEDIUM_TIMEOUT )
 +  public void testLdapSearchConfigEnhancementsKnox694() throws Exception {
 +    LOG_ENTER();
 +
 +    String topoStr;
 +    File topoFile;
 +
 +    String adminUName = "uid=admin,ou=people,dc=hadoop,dc=apache,dc=org";
 +    String adminPWord = "admin-password";
 +    String uname = "people\\guest";
 +    String pword = "guest-password";
 +    String invalidPword = "invalid-guest-password";
 +
 +    params = new Properties();
 +    params.put( "LDAP_URL", driver.getLdapUrl() );
 +    params.put( "LDAP_SYSTEM_USERNAME", adminUName );
 +    params.put( "LDAP_SYSTEM_PASSWORD", adminPWord );
 +
 +    topoStr = TestUtils.merge( DAT, "topologies/test-knox694-principal-regex-user-dn-template.xml", params );
 +    topoFile = new File( config.getGatewayTopologyDir(), "knox694-1.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +    topos.reloadTopologies();
 +
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( uname, pword )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .body( is( "test-service-response" ) )
-         .when().log().ifError().get( gatewayUrl + "/knox694-1/test-service-path/test-resource-path" );
++        .when().get( gatewayUrl + "/knox694-1/test-service-path/test-resource-path" );
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( uname, invalidPword )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_UNAUTHORIZED )
 +        .when().get( gatewayUrl + "/knox694-1/test-service-path/test-resource-path" );
 +
 +    topoStr = TestUtils.merge( DAT, "topologies/test-knox694-principal-regex-search-attribute.xml", params );
 +    topoFile = new File( config.getGatewayTopologyDir(), "knox694-2.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +    topos.reloadTopologies();
 +
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( uname, pword )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .body( is( "test-service-response" ) )
-         .when().log().ifError().get( gatewayUrl + "/knox694-2/test-service-path/test-resource-path" );
++        .when().get( gatewayUrl + "/knox694-2/test-service-path/test-resource-path" );
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( uname, invalidPword )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_UNAUTHORIZED )
 +        .when().get( gatewayUrl + "/knox694-2/test-service-path/test-resource-path" );
 +
 +    topoStr = TestUtils.merge( DAT, "topologies/test-knox694-principal-regex-search-filter.xml", params );
 +    topoFile = new File( config.getGatewayTopologyDir(), "knox694-3.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +    topos.reloadTopologies();
 +
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( uname, pword )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .body( is( "test-service-response" ) )
-         .when().log().ifError().get( gatewayUrl + "/knox694-3/test-service-path/test-resource-path" );
++        .when().get( gatewayUrl + "/knox694-3/test-service-path/test-resource-path" );
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( uname, invalidPword )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_UNAUTHORIZED )
 +        .when().get( gatewayUrl + "/knox694-3/test-service-path/test-resource-path" );
 +
 +    topoStr = TestUtils.merge( DAT, "topologies/test-knox694-principal-regex-search-scope-object.xml", params );
 +    topoFile = new File( config.getGatewayTopologyDir(), "knox694-4.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +    topos.reloadTopologies();
 +
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( uname, pword )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .body( is( "test-service-response" ) )
-         .when().log().ifError().get( gatewayUrl + "/knox694-4/test-service-path/test-resource-path" );
++        .when().get( gatewayUrl + "/knox694-4/test-service-path/test-resource-path" );
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( uname, invalidPword )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_UNAUTHORIZED )
 +        .when().get( gatewayUrl + "/knox694-4/test-service-path/test-resource-path" );
 +
 +    topoStr = TestUtils.merge( DAT, "topologies/test-knox694-principal-regex-search-scope-onelevel-positive.xml", params );
 +    topoFile = new File( config.getGatewayTopologyDir(), "knox694-5.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +    topos.reloadTopologies();
 +
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( uname, pword )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_OK )
 +        .contentType( "text/plain" )
 +        .body( is( "test-service-response" ) )
-         .when().log().ifError().get( gatewayUrl + "/knox694-5/test-service-path/test-resource-path" );
++        .when().get( gatewayUrl + "/knox694-5/test-service-path/test-resource-path" );
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( uname, invalidPword )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_UNAUTHORIZED )
 +        .when().get( gatewayUrl + "/knox694-5/test-service-path/test-resource-path" );
 +
 +    topoStr = TestUtils.merge( DAT, "topologies/test-knox694-principal-regex-search-scope-onelevel-negative.xml", params );
 +    topoFile = new File( config.getGatewayTopologyDir(), "knox694-6.xml" );
 +    FileUtils.writeStringToFile( topoFile, topoStr );
 +    topos.reloadTopologies();
 +
 +    given()
 +        //.log().all()
 +        .auth().preemptive().basic( uname, pword )
 +        .expect()
 +        //.log().all()
 +        .statusCode( HttpStatus.SC_UNAUTHORIZED )
 +        .when().get( gatewayUrl + "/knox694-6/test-service-path/test-resource-path" );
 +
 +    LOG_EXIT();
 +  }
 +
 +}
 +
 +


[04/11] knox git commit: KNOX-1037 - Upgrade rest-assured dependency

Posted by mo...@apache.org.
KNOX-1037 - Upgrade rest-assured dependency


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/e2e12514
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/e2e12514
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/e2e12514

Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: e2e1251466c26ea70c145f1b456eaa42d2002f24
Parents: 751f764
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Wed Sep 13 12:01:31 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Wed Sep 13 14:20:21 2017 +0100

----------------------------------------------------------------------
 gateway-server/pom.xml                          | 22 -----
 gateway-test/pom.xml                            | 12 +--
 .../gateway/AmbariServiceDefinitionTest.java    |  2 +-
 .../hadoop/gateway/GatewayAdminFuncTest.java    |  2 +-
 .../gateway/GatewayAdminTopologyFuncTest.java   | 43 ++++-----
 .../hadoop/gateway/GatewayAppFuncTest.java      |  2 +-
 .../hadoop/gateway/GatewayBasicFuncTest.java    | 94 +++++++++++---------
 .../hadoop/gateway/GatewayDeployFuncTest.java   |  4 +-
 .../hadoop/gateway/GatewayHealthFuncTest.java   |  4 +-
 .../GatewayLdapDynamicGroupFuncTest.java        |  2 +-
 .../gateway/GatewayLdapGroupFuncTest.java       |  4 +-
 .../gateway/GatewayLdapPosixGroupFuncTest.java  |  2 +-
 .../gateway/GatewayLocalServiceFuncTest.java    |  2 +-
 .../hadoop/gateway/GatewayMultiFuncTest.java    | 16 ++--
 .../GatewayPortMappingDisableFeatureTest.java   |  2 +-
 .../gateway/GatewayPortMappingFailTest.java     |  2 +-
 .../gateway/GatewayPortMappingFuncTest.java     |  2 +-
 .../hadoop/gateway/GatewaySampleFuncTest.java   |  2 +-
 .../apache/hadoop/gateway/Knox242FuncTest.java  |  2 +-
 .../hadoop/gateway/WebHdfsHaFuncTest.java       |  2 +-
 pom.xml                                         | 14 +--
 21 files changed, 102 insertions(+), 135 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-server/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-server/pom.xml b/gateway-server/pom.xml
index 6200bab..437d22d 100644
--- a/gateway-server/pom.xml
+++ b/gateway-server/pom.xml
@@ -271,28 +271,6 @@
             <scope>test</scope>
         </dependency>
 
-        <dependency>
-            <groupId>com.jayway.restassured</groupId>
-            <artifactId>rest-assured</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>com.jayway.jsonpath</groupId>
-            <artifactId>json-path</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>com.jayway.jsonpath</groupId>
-            <artifactId>json-path-assert</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.xmlmatchers</groupId>
-            <artifactId>xml-matchers</artifactId>
-            <scope>test</scope>
-        </dependency>
-
         <!-- This must be after restassured otherwise is messes up the hamcrest dependencies. -->
         <dependency>
             <groupId>junit</groupId>

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-test/pom.xml b/gateway-test/pom.xml
index d3ad73b..24e894b 100644
--- a/gateway-test/pom.xml
+++ b/gateway-test/pom.xml
@@ -85,20 +85,10 @@
 
 
         <dependency>
-            <groupId>com.jayway.restassured</groupId>
+            <groupId>io.rest-assured</groupId>
             <artifactId>rest-assured</artifactId>
             <scope>test</scope>
         </dependency>
-        <dependency>
-            <groupId>com.jayway.jsonpath</groupId>
-            <artifactId>json-path</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>com.jayway.jsonpath</groupId>
-            <artifactId>json-path-assert</artifactId>
-            <scope>test</scope>
-        </dependency>
 
         <dependency>
             <groupId>org.xmlmatchers</groupId>

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/AmbariServiceDefinitionTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/AmbariServiceDefinitionTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/AmbariServiceDefinitionTest.java
index e1b6d2b..2d02966 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/AmbariServiceDefinitionTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/AmbariServiceDefinitionTest.java
@@ -47,7 +47,7 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.UUID;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.notNullValue;

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminFuncTest.java
index c19d2ef..ca36248 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminFuncTest.java
@@ -39,7 +39,7 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.UUID;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.hamcrest.CoreMatchers.notNullValue;
 
 public class GatewayAdminFuncTest {

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
index ec02b1f..3a5cd84 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAdminTopologyFuncTest.java
@@ -29,7 +29,7 @@ import java.util.Map;
 import java.util.UUID;
 import javax.ws.rs.core.MediaType;
 
-import com.jayway.restassured.http.ContentType;
+import io.restassured.http.ContentType;
 import com.mycila.xmltool.XMLDoc;
 import com.mycila.xmltool.XMLTag;
 import org.apache.hadoop.gateway.config.GatewayConfig;
@@ -54,7 +54,7 @@ import org.slf4j.LoggerFactory;
 import org.w3c.dom.Document;
 import org.xml.sax.InputSource;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.containsString;
@@ -277,10 +277,11 @@ public class GatewayAdminTopologyFuncTest {
         //.log().all()
         .statusCode(HttpStatus.SC_OK)
         .contentType(MediaType.APPLICATION_XML)
-        .get(serviceUrl);
+        .when().get(serviceUrl);
 
 
     given().auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_JSON)
         .expect()
         //.log().all()
         .statusCode(HttpStatus.SC_OK)
@@ -364,6 +365,7 @@ public class GatewayAdminTopologyFuncTest {
     given()
         //.log().all()
         .auth().preemptive().basic(adminUser, adminPass)
+        .header("Accept", MediaType.APPLICATION_JSON)
         .expect()
         //.log().all()
         .statusCode(HttpStatus.SC_OK)
@@ -376,7 +378,7 @@ public class GatewayAdminTopologyFuncTest {
         .body("topologies.topology[1].href", not(nullValue()))
         .body("topologies.topology[0].timestamp", not(nullValue()))
         .body("topologies.topology[1].timestamp", not(nullValue()))
-        .get(url);
+        .when().get(url);
 
     LOG_EXIT();
   }
@@ -395,7 +397,7 @@ public class GatewayAdminTopologyFuncTest {
         .expect()
         //.log().all()
         .statusCode(HttpStatus.SC_FORBIDDEN)
-        .get(url);
+        .when().get(url);
 
     LOG_EXIT();
   }
@@ -534,11 +536,12 @@ public class GatewayAdminTopologyFuncTest {
 
     given()
         .auth().preemptive().basic(username, password)
+        .header("Accept", MediaType.APPLICATION_JSON)
         .expect()
         //.log().all()
         .statusCode(HttpStatus.SC_OK)
         .contentType(MediaType.APPLICATION_JSON)
-        .get(url);
+        .when().get(url);
 
     given()
         .auth().preemptive().basic(username, password)
@@ -546,7 +549,7 @@ public class GatewayAdminTopologyFuncTest {
         //.log().all()
         .statusCode(HttpStatus.SC_OK)
         .contentType(MediaType.APPLICATION_JSON)
-        .delete(url);
+        .when().delete(url);
 
     given()
         //.log().all()
@@ -554,7 +557,7 @@ public class GatewayAdminTopologyFuncTest {
         .expect()
         //.log().all()
         .statusCode(HttpStatus.SC_NO_CONTENT)
-        .get(url);
+        .when().get(url);
 
     LOG_EXIT();
   }
@@ -583,7 +586,7 @@ public class GatewayAdminTopologyFuncTest {
         .expect()
         .statusCode(HttpStatus.SC_OK)
         //.log().all()
-        .put(url).getBody().asString();
+        .when().put(url).getBody().asString();
 
     InputSource source = new InputSource( new StringReader( XML ) );
     Document doc = XmlUtils.readXml( source );
@@ -597,7 +600,7 @@ public class GatewayAdminTopologyFuncTest {
             .expect()
             .statusCode(HttpStatus.SC_OK)
             .body(equalTo(XML))
-            .get(url)
+            .when().get(url)
             .getBody().asString();
 
     String XmlPut =
@@ -616,7 +619,7 @@ public class GatewayAdminTopologyFuncTest {
         .expect()
         .statusCode(HttpStatus.SC_OK)
             //.log().all()
-        .put(url).getBody().asString();
+        .when().put(url).getBody().asString();
 
     given()
         .auth().preemptive().basic(username, password)
@@ -624,7 +627,7 @@ public class GatewayAdminTopologyFuncTest {
         .expect()
         .statusCode(HttpStatus.SC_OK)
         .body(equalTo(JSON))
-        .get(url)
+        .when().get(url)
         .getBody().asString();
 
     LOG_EXIT();
@@ -661,7 +664,7 @@ public class GatewayAdminTopologyFuncTest {
         .body(containsString(newUrl))
         .body(containsString("test-cluster"))
         .body(containsString("admin"))
-        .get(url);
+        .when().get(url);
 
 
 //     Case 2: add in x-forward headers (host, server, proto, context)
@@ -680,7 +683,7 @@ public class GatewayAdminTopologyFuncTest {
         .body(containsString(host))
         .body(containsString("test-cluster"))
         .body(containsString("admin"))
-        .get(url);
+        .when().get(url);
 
 //     Case 3: add in x-forward headers (host, proto, port, context)
     given()
@@ -698,7 +701,7 @@ public class GatewayAdminTopologyFuncTest {
         .body(containsString(proto))
         .body(containsString("test-cluster"))
         .body(containsString("admin"))
-        .get(url);
+        .when().get(url);
 
 //     Case 4: add in x-forward headers (host, proto, port, context) no port in host.
     given()
@@ -716,7 +719,7 @@ public class GatewayAdminTopologyFuncTest {
         .body(containsString(proto))
         .body(containsString("test-cluster"))
         .body(containsString("admin"))
-        .get(url);
+        .when().get(url);
 
 //     Case 5: add in x-forward headers (host, port)
     given()
@@ -730,7 +733,7 @@ public class GatewayAdminTopologyFuncTest {
         .body(containsString(port))
         .body(containsString("test-cluster"))
         .body(containsString("admin"))
-        .get(url);
+        .when().get(url);
 
 //     Case 6: Normal Request
     given()
@@ -741,7 +744,7 @@ public class GatewayAdminTopologyFuncTest {
         .body(containsString(url))
         .body(containsString("test-cluster"))
         .body(containsString("admin"))
-        .get(url);
+        .when().get(url);
 
     LOG_EXIT();
   }
@@ -760,7 +763,7 @@ public class GatewayAdminTopologyFuncTest {
         .expect()
         .statusCode(HttpStatus.SC_OK)
         .body(containsString(url + "/test-cluster"))
-        .get(url);
+        .when().get(url);
 
 
 //     Case 2: Change gateway.path to another String. Ensure HTTP OK resp + valid URL.
@@ -779,7 +782,7 @@ public class GatewayAdminTopologyFuncTest {
          .expect()
          .statusCode(HttpStatus.SC_OK)
          .body(containsString(newUrl + "/test-cluster"))
-         .get(newUrl);
+         .when().get(newUrl);
    } catch(Exception e){
      fail(e.getMessage());
    }

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAppFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAppFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAppFuncTest.java
index cd7a9fe..ed44d15 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAppFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayAppFuncTest.java
@@ -46,7 +46,7 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.equalTo;

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
index 576721d..cbe12a8 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayBasicFuncTest.java
@@ -35,12 +35,13 @@ import java.util.Map;
 import java.util.Map.Entry;
 import javax.ws.rs.core.MediaType;
 
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.path.json.JsonPath;
-import com.jayway.restassured.response.Cookie;
-import com.jayway.restassured.response.Header;
-import com.jayway.restassured.response.Response;
-import com.jayway.restassured.specification.ResponseSpecification;
+import io.restassured.RestAssured;
+import io.restassured.http.ContentType;
+import io.restassured.http.Cookie;
+import io.restassured.http.Header;
+import io.restassured.path.json.JsonPath;
+import io.restassured.response.Response;
+import io.restassured.specification.ResponseSpecification;
 import com.mycila.xmltool.XMLDoc;
 import com.mycila.xmltool.XMLTag;
 import org.apache.commons.io.filefilter.WildcardFileFilter;
@@ -85,7 +86,7 @@ import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.*;
@@ -312,12 +313,11 @@ public class GatewayBasicFuncTest {
         .expect()
         //.log().all()
         .statusCode( HttpStatus.SC_OK )
-        .header( "Set-Cookie", containsString( "JSESSIONID" ) )
-        .header( "Set-Cookie", containsString( "HttpOnly" ) )
         .contentType( "application/json" )
         .content( "boolean", is( true ) )
         .when().put( driver.getUrl( "WEBHDFS" ) + "/v1" + root + "/dir" ).getDetailedCookie( "JSESSIONID" );
     assertThat( cookie.isSecured(), is( true ) );
+    assertThat( cookie.isHttpOnly(), is( true ) );
     assertThat( cookie.getPath(), is( "/gateway/cluster" ) );
     assertThat( cookie.getValue().length(), greaterThan( 16 ) );
     driver.assertComplete();
@@ -427,40 +427,47 @@ public class GatewayBasicFuncTest {
         .queryParam( "recursive", "true" )
         .respond()
         .status( HttpStatus.SC_OK );
-    given()
-        //.log().all()
-        .auth().preemptive().basic( username, password )
-        .header("X-XSRF-Header", "jksdhfkhdsf")
-        .queryParam( "op", "DELETE" )
-        .queryParam( "recursive", "true" )
-        .expect()
-        //.log().all()
-        .statusCode( HttpStatus.SC_OK )
-        .when().delete( driver.getUrl( "WEBHDFS" ) + "/v1/~" + root + ( driver.isUseGateway() ? "" : "?user.name=" + username ) );
-    driver.assertComplete();
 
-    driver.getMock( "WEBHDFS" )
-        .expect()
-        .method( "PUT" )
-        .pathInfo( "/v1/user/hdfs/dir" )
-        .queryParam( "op", "MKDIRS" )
-        .queryParam( "user.name", username )
-        .respond()
-        .status( HttpStatus.SC_OK )
-        .content( driver.getResourceBytes( "webhdfs-success.json" ) )
-        .contentType("application/json");
-    given()
-        //.log().all()
-        .auth().preemptive().basic( username, password )
-        .header("X-XSRF-Header", "jksdhfkhdsf")
-        .queryParam( "op", "MKDIRS" )
-        .expect()
-        //.log().all();
-        .statusCode( HttpStatus.SC_OK )
-        .contentType( "application/json" )
-        .content( "boolean", is( true ) )
-        .when().put( driver.getUrl( "WEBHDFS" ) + "/v1/~/dir" );
-    driver.assertComplete();
+    try {
+      // Need to turn off URL encoding here or otherwise the tilde gets encoded and the rewrite rules fail
+      RestAssured.urlEncodingEnabled = false;
+      given()
+          //.log().all()
+          .auth().preemptive().basic( username, password )
+          .header("X-XSRF-Header", "jksdhfkhdsf")
+          .queryParam( "op", "DELETE" )
+          .queryParam( "recursive", "true" )
+          .expect()
+          //.log().all()
+          .statusCode( HttpStatus.SC_OK )
+          .when().delete( driver.getUrl( "WEBHDFS" ) + "/v1/~" + root + ( driver.isUseGateway() ? "" : "?user.name=" + username ) );
+      driver.assertComplete();
+
+      driver.getMock( "WEBHDFS" )
+          .expect()
+          .method( "PUT" )
+          .pathInfo( "/v1/user/hdfs/dir" )
+          .queryParam( "op", "MKDIRS" )
+          .queryParam( "user.name", username )
+          .respond()
+          .status( HttpStatus.SC_OK )
+          .content( driver.getResourceBytes( "webhdfs-success.json" ) )
+          .contentType("application/json");
+      given()
+          //.log().all()
+          .auth().preemptive().basic( username, password )
+          .header("X-XSRF-Header", "jksdhfkhdsf")
+          .queryParam( "op", "MKDIRS" )
+          .expect()
+          //.log().all();
+          .statusCode( HttpStatus.SC_OK )
+          .contentType( "application/json" )
+          .content( "boolean", is( true ) )
+          .when().put( driver.getUrl( "WEBHDFS" ) + "/v1/~/dir" );
+      driver.assertComplete();
+    } finally {
+      RestAssured.urlEncodingEnabled = true;
+    }
     LOG_EXIT();
   }
 
@@ -2015,7 +2022,6 @@ public class GatewayBasicFuncTest {
     .expect()
     .method("PUT")
     .pathInfo(scannerPath)
-    .header("Content-Type", ContentType.XML.toString())
     .respond()
     .status(HttpStatus.SC_CREATED);
 
@@ -2023,7 +2029,7 @@ public class GatewayBasicFuncTest {
     .auth().preemptive().basic( username, password )
     .header("X-XSRF-Header", "jksdhfkhdsf")
     .header( "Content-Type", ContentType.XML.toString() )
-    .content( driver.getResourceBytes( scannerDefinitionResourceName + ".xml" ) )
+    .body( driver.getResourceBytes( scannerDefinitionResourceName + ".xml" ) )
     .expect()
     //TODO: Add "Location" header check  when issue with incorrect outbound rewrites will be resolved
     //.header( "Location", startsWith( driver.getUrl( "WEBHBASE" ) + createScannerPath ) )

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayDeployFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayDeployFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayDeployFuncTest.java
index c1c0ffa..744fd85 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayDeployFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayDeployFuncTest.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.gateway;
 
-import com.jayway.restassured.response.Response;
+import io.restassured.response.Response;
 import com.mycila.xmltool.XMLDoc;
 import com.mycila.xmltool.XMLTag;
 import org.apache.commons.io.FileUtils;
@@ -48,7 +48,7 @@ import java.util.Map;
 import java.util.UUID;
 import java.util.regex.Pattern;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.containsString;

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayHealthFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayHealthFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayHealthFuncTest.java
index 159b952..84d43b8 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayHealthFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayHealthFuncTest.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.gateway;
 
-import com.jayway.restassured.path.json.JsonPath;
+import io.restassured.path.json.JsonPath;
 import com.mycila.xmltool.XMLDoc;
 import com.mycila.xmltool.XMLTag;
 import org.apache.directory.server.protocol.shared.transport.TcpTransport;
@@ -46,7 +46,7 @@ import java.util.HashSet;
 import java.util.Map;
 import java.util.UUID;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.hamcrest.CoreMatchers.notNullValue;
 
 public class GatewayHealthFuncTest {

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapDynamicGroupFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapDynamicGroupFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapDynamicGroupFuncTest.java
index 2b5f0df..5567321 100755
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapDynamicGroupFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapDynamicGroupFuncTest.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.gateway;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.is;

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest.java
index 66cb311..68b9b5e 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapGroupFuncTest.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.gateway;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.is;
@@ -150,7 +150,7 @@ public class GatewayLdapGroupFuncTest {
     AliasService aliasService = (AliasService)services.getService(GatewayServices.ALIAS_SERVICE);
     aliasService.addAliasForCluster("test-cluster", "ldcSystemPassword", "guest-password");
 
-    char[] password1 = aliasService.getPasswordFromAliasForCluster( "test-cluster", "ldcSystemPassword");
+    // char[] password1 = aliasService.getPasswordFromAliasForCluster( "test-cluster", "ldcSystemPassword");
     //System.err.println("SETUP password 10: " + ((password1 == null) ? "NULL" : new String(password1)));
 
     File descriptor = new File( topoDir, "test-cluster.xml" );

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapPosixGroupFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapPosixGroupFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapPosixGroupFuncTest.java
index 6ca0c15..42674e8 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapPosixGroupFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLdapPosixGroupFuncTest.java
@@ -47,7 +47,7 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.UUID;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.is;

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLocalServiceFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLocalServiceFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLocalServiceFuncTest.java
index 613eea7..dff3182 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLocalServiceFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayLocalServiceFuncTest.java
@@ -41,7 +41,7 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.UUID;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.is;

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayMultiFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayMultiFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayMultiFuncTest.java
index 67852b9..2c2371f 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayMultiFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayMultiFuncTest.java
@@ -59,7 +59,7 @@ import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.endsWith;
@@ -186,7 +186,7 @@ public class GatewayMultiFuncTest {
         //.log().all()
         .statusCode( HttpStatus.SC_OK )
         .contentType( "application/json; charset=UTF-8" )
-        .when().log().ifError().get( gatewayUrl + "/knox678/repeat" ).andReturn().asString();
+        .when().get( gatewayUrl + "/knox678/repeat" ).andReturn().asString();
     assertThat( json, is("{\"msg\":\"H\u00eallo\"}") );
     assertThat( mock.isEmpty(), is(true) );
 
@@ -199,7 +199,7 @@ public class GatewayMultiFuncTest {
         //.log().all()
         .statusCode( HttpStatus.SC_OK )
         .contentType( "application/octet-stream" )
-        .when().log().ifError().get( gatewayUrl + "/knox678/repeat" ).andReturn().asByteArray();
+        .when().get( gatewayUrl + "/knox678/repeat" ).andReturn().asByteArray();
     assertThat( bytes, is(equalTo("H\u00eallo".getBytes())) );
     assertThat( mock.isEmpty(), is(true) );
 
@@ -324,7 +324,7 @@ public class GatewayMultiFuncTest {
         .statusCode( HttpStatus.SC_OK )
         .contentType( "text/plain" )
         .body( is( "test-service-response" ) )
-        .when().log().ifError().get( gatewayUrl + "/knox694-1/test-service-path/test-resource-path" );
+        .when().get( gatewayUrl + "/knox694-1/test-service-path/test-resource-path" );
     given()
         //.log().all()
         .auth().preemptive().basic( uname, invalidPword )
@@ -346,7 +346,7 @@ public class GatewayMultiFuncTest {
         .statusCode( HttpStatus.SC_OK )
         .contentType( "text/plain" )
         .body( is( "test-service-response" ) )
-        .when().log().ifError().get( gatewayUrl + "/knox694-2/test-service-path/test-resource-path" );
+        .when().get( gatewayUrl + "/knox694-2/test-service-path/test-resource-path" );
     given()
         //.log().all()
         .auth().preemptive().basic( uname, invalidPword )
@@ -368,7 +368,7 @@ public class GatewayMultiFuncTest {
         .statusCode( HttpStatus.SC_OK )
         .contentType( "text/plain" )
         .body( is( "test-service-response" ) )
-        .when().log().ifError().get( gatewayUrl + "/knox694-3/test-service-path/test-resource-path" );
+        .when().get( gatewayUrl + "/knox694-3/test-service-path/test-resource-path" );
     given()
         //.log().all()
         .auth().preemptive().basic( uname, invalidPword )
@@ -390,7 +390,7 @@ public class GatewayMultiFuncTest {
         .statusCode( HttpStatus.SC_OK )
         .contentType( "text/plain" )
         .body( is( "test-service-response" ) )
-        .when().log().ifError().get( gatewayUrl + "/knox694-4/test-service-path/test-resource-path" );
+        .when().get( gatewayUrl + "/knox694-4/test-service-path/test-resource-path" );
     given()
         //.log().all()
         .auth().preemptive().basic( uname, invalidPword )
@@ -412,7 +412,7 @@ public class GatewayMultiFuncTest {
         .statusCode( HttpStatus.SC_OK )
         .contentType( "text/plain" )
         .body( is( "test-service-response" ) )
-        .when().log().ifError().get( gatewayUrl + "/knox694-5/test-service-path/test-resource-path" );
+        .when().get( gatewayUrl + "/knox694-5/test-service-path/test-resource-path" );
     given()
         //.log().all()
         .auth().preemptive().basic( uname, invalidPword )

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayPortMappingDisableFeatureTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayPortMappingDisableFeatureTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayPortMappingDisableFeatureTest.java
index 094d984..f5b6f78 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayPortMappingDisableFeatureTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayPortMappingDisableFeatureTest.java
@@ -35,7 +35,7 @@ import java.io.IOException;
 import java.net.ConnectException;
 import java.util.concurrent.ConcurrentHashMap;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.is;

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayPortMappingFailTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayPortMappingFailTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayPortMappingFailTest.java
index a8ed307..c4e7794 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayPortMappingFailTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayPortMappingFailTest.java
@@ -30,7 +30,7 @@ import org.junit.experimental.categories.Category;
 import java.io.IOException;
 import java.util.concurrent.ConcurrentHashMap;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayPortMappingFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayPortMappingFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayPortMappingFuncTest.java
index 0f7cc68..a8199d1 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayPortMappingFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayPortMappingFuncTest.java
@@ -32,7 +32,7 @@ import org.junit.experimental.categories.Category;
 import java.io.IOException;
 import java.util.concurrent.ConcurrentHashMap;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.is;

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewaySampleFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewaySampleFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewaySampleFuncTest.java
index 5e71be2..9e66887 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewaySampleFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewaySampleFuncTest.java
@@ -40,7 +40,7 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.UUID;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.is;

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/Knox242FuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/Knox242FuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/Knox242FuncTest.java
index 4745640..7026407 100755
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/Knox242FuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/Knox242FuncTest.java
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.gateway;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.is;

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
----------------------------------------------------------------------
diff --git a/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java b/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
index 32aa11d..2f4574f 100644
--- a/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
+++ b/gateway-test/src/test/java/org/apache/hadoop/gateway/WebHdfsHaFuncTest.java
@@ -31,7 +31,7 @@ import org.junit.experimental.categories.Category;
 
 import java.io.IOException;
 
-import static com.jayway.restassured.RestAssured.given;
+import static io.restassured.RestAssured.given;
 import static org.apache.hadoop.test.TestUtils.LOG_ENTER;
 import static org.apache.hadoop.test.TestUtils.LOG_EXIT;
 import static org.hamcrest.CoreMatchers.is;

http://git-wip-us.apache.org/repos/asf/knox/blob/e2e12514/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 71abc92..d2f4176 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1253,20 +1253,10 @@
 
             <!-- JUnit must be before Junit otherwise it is messes up the hamcrest dependencies -->
             <dependency>
-                <groupId>com.jayway.restassured</groupId>
+                <groupId>io.rest-assured</groupId>
                 <artifactId>rest-assured</artifactId>
-                <version>1.8.1</version>
+                <version>3.0.3</version>
                 <scope>test</scope>
-                <exclusions>
-                    <exclusion>
-                        <groupId>org.codehaus.jackson</groupId>
-                        <artifactId>jackson-core-asl</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.codehaus.jackson</groupId>
-                        <artifactId>jackson-mapper-asl</artifactId>
-                    </exclusion>
-                </exclusions>
             </dependency>
 
             <dependency>