You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by mo...@apache.org on 2017/10/16 17:05:55 UTC
[01/23] knox git commit: KNOX-1071 - Replace java.util.Random with
SecureRandom
Repository: knox
Updated Branches:
refs/heads/KNOX-998-Package_Restructuring cdac6ff07 -> 7d0bff16e
KNOX-1071 - Replace java.util.Random with SecureRandom
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/145ed5dd
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/145ed5dd
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/145ed5dd
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 145ed5dd673e89db278ad5b7257690e3b23e3109
Parents: 10b3473
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Thu Sep 28 12:39:00 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Thu Sep 28 12:39:00 2017 +0100
----------------------------------------------------------------------
.../impl/DefaultServiceRegistryService.java | 50 ++++++++++----------
.../security/impl/DefaultAliasService.java | 12 ++---
2 files changed, 31 insertions(+), 31 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/145ed5dd/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java
index c590f0d..ec08597 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/registry/impl/DefaultServiceRegistryService.java
@@ -35,14 +35,14 @@ import org.apache.hadoop.gateway.services.security.CryptoService;
import java.io.File;
import java.io.IOException;
+import java.security.SecureRandom;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Random;
public class DefaultServiceRegistryService implements ServiceRegistry, Service {
private static GatewayMessages LOG = MessagesFactory.get( GatewayMessages.class );
-
+
protected char[] chars = { 'a', 'b', 'c', 'd', 'e', 'f', 'g',
'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w',
'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K',
@@ -53,31 +53,31 @@ public class DefaultServiceRegistryService implements ServiceRegistry, Service {
private Registry registry = new Registry();
private String registryFileName;
-
+
public DefaultServiceRegistryService() {
}
-
+
public void setCryptoService(CryptoService crypto) {
this.crypto = crypto;
}
-
+
public String getRegistrationCode(String clusterName) {
String code = generateRegCode(16);
byte[] signature = crypto.sign("SHA256withRSA","gateway-identity",code);
String encodedSig = Base64.encodeBase64URLSafeString(signature);
-
+
return code + "::" + encodedSig;
}
-
+
private String generateRegCode(int length) {
- StringBuffer sb = new StringBuffer();
- Random r = new Random();
+ StringBuilder sb = new StringBuilder();
+ SecureRandom r = new SecureRandom();
for (int i = 0; i < length; i++) {
sb.append(chars[r.nextInt(chars.length)]);
}
return sb.toString();
}
-
+
public void removeClusterServices(String clusterName) {
registry.remove(clusterName);
}
@@ -89,7 +89,7 @@ public class DefaultServiceRegistryService implements ServiceRegistry, Service {
throw new IllegalArgumentException("Registration Code must not be null.");
}
String[] parts = regCode.split("::");
-
+
// part one is the code and part two is the signature
boolean verified = crypto.verify("SHA256withRSA", "gateway-identity", parts[0], Base64.decodeBase64(parts[1]));
if (verified) {
@@ -114,24 +114,24 @@ public class DefaultServiceRegistryService implements ServiceRegistry, Service {
e.printStackTrace(); //TODO: I18N
}
}
-
+
return rc;
}
-
+
private String renderAsJsonString(HashMap<String,HashMap<String,RegEntry>> registry) {
String json = null;
ObjectMapper mapper = new ObjectMapper();
-
+
try {
// write JSON to a file
json = mapper.writeValueAsString((Object)registry);
-
+
} catch ( JsonProcessingException e ) {
e.printStackTrace(); //TODO: I18N
}
return json;
}
-
+
@Override
public String lookupServiceURL(String clusterName, String serviceName) {
List<String> urls = lookupServiceURLs( clusterName, serviceName );
@@ -144,22 +144,22 @@ public class DefaultServiceRegistryService implements ServiceRegistry, Service {
@Override
public List<String> lookupServiceURLs( String clusterName, String serviceName ) {
RegEntry entry = null;
- HashMap clusterServices = registry.get(clusterName);
+ HashMap<String, RegEntry> clusterServices = registry.get(clusterName);
if (clusterServices != null) {
- entry = (RegEntry) clusterServices.get(serviceName);
+ entry = clusterServices.get(serviceName);
if( entry != null ) {
return entry.getUrls();
}
}
return null;
}
-
+
private HashMap<String, HashMap<String,RegEntry>> getMapFromJsonString(String json) {
Registry map = null;
- JsonFactory factory = new JsonFactory();
- ObjectMapper mapper = new ObjectMapper(factory);
- TypeReference<Registry> typeRef
- = new TypeReference<Registry>() {};
+ JsonFactory factory = new JsonFactory();
+ ObjectMapper mapper = new ObjectMapper(factory);
+ TypeReference<Registry> typeRef
+ = new TypeReference<Registry>() {};
try {
map = mapper.readValue(json, typeRef);
} catch (JsonParseException e) {
@@ -168,9 +168,9 @@ public class DefaultServiceRegistryService implements ServiceRegistry, Service {
LOG.failedToGetMapFromJsonString( json, e );
} catch (IOException e) {
LOG.failedToGetMapFromJsonString( json, e );
- }
+ }
return map;
- }
+ }
@Override
public void init(GatewayConfig config, Map<String, String> options)
http://git-wip-us.apache.org/repos/asf/knox/blob/145ed5dd/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/DefaultAliasService.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/DefaultAliasService.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/DefaultAliasService.java
index 8cf31a5..4a3e754 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/DefaultAliasService.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/services/security/impl/DefaultAliasService.java
@@ -19,12 +19,12 @@ package org.apache.hadoop.gateway.services.security.impl;
import java.security.KeyStore;
import java.security.KeyStoreException;
+import java.security.SecureRandom;
import java.security.cert.Certificate;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
import java.util.Map;
-import java.util.Random;
import org.apache.hadoop.gateway.GatewayMessages;
import org.apache.hadoop.gateway.config.GatewayConfig;
@@ -39,7 +39,7 @@ import org.apache.hadoop.gateway.services.security.MasterService;
public class DefaultAliasService implements AliasService {
private static final GatewayMessages LOG = MessagesFactory.get( GatewayMessages.class );
- private static final String GATEWAY_IDENTITY_PASSPHRASE = "gateway-identity-passphrase";
+ private static final String GATEWAY_IDENTITY_PASSPHRASE = "gateway-identity-passphrase";
protected char[] chars = { 'a', 'b', 'c', 'd', 'e', 'f', 'g',
'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w',
@@ -104,21 +104,21 @@ public class DefaultAliasService implements AliasService {
}
private String generatePassword(int length) {
- StringBuffer sb = new StringBuffer();
- Random r = new Random();
+ StringBuilder sb = new StringBuilder();
+ SecureRandom r = new SecureRandom();
for (int i = 0; i < length; i++) {
sb.append(chars[r.nextInt(chars.length)]);
}
return sb.toString();
}
-
+
public void setKeystoreService(KeystoreService ks) {
this.keystoreService = ks;
}
public void setMasterService(MasterService ms) {
this.masterService = ms;
-
+
}
@Override
[14/23] knox git commit: KNOX-1081 - Remove Picketlink Provider Module
Posted by mo...@apache.org.
KNOX-1081 - Remove Picketlink Provider Module
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/92b1505a
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/92b1505a
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/92b1505a
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 92b1505a70057aef762ac20bf80a7249d947e3e9
Parents: 0719da3
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Oct 12 17:28:40 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Oct 12 17:28:40 2017 -0400
----------------------------------------------------------------------
gateway-provider-security-picketlink/pom.xml | 76 --------
.../gateway/picketlink/PicketlinkMessages.java | 40 ----
.../picketlink/deploy/PicketlinkConf.java | 194 -------------------
...PicketlinkFederationProviderContributor.java | 132 -------------
.../filter/CaptureOriginalURLFilter.java | 89 ---------
.../filter/PicketlinkIdentityAdapter.java | 102 ----------
...gateway.deploy.ProviderDeploymentContributor | 19 --
.../gateway/picketlink/PicketlinkTest.java | 30 ---
gateway-release/pom.xml | 4 -
pom.xml | 16 --
10 files changed, 702 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/pom.xml b/gateway-provider-security-picketlink/pom.xml
deleted file mode 100644
index 0e6f1a5..0000000
--- a/gateway-provider-security-picketlink/pom.xml
+++ /dev/null
@@ -1,76 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.apache.knox</groupId>
- <artifactId>gateway</artifactId>
- <version>0.14.0-SNAPSHOT</version>
- </parent>
- <artifactId>gateway-provider-security-picketlink</artifactId>
-
- <name>gateway-provider-security-picketlink</name>
- <description>An extension of the gateway introducing picketlink for SAML integration.</description>
-
- <licenses>
- <license>
- <name>The Apache Software License, Version 2.0</name>
- <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
-
- <dependencies>
- <dependency>
- <groupId>${gateway-group}</groupId>
- <artifactId>gateway-spi</artifactId>
- </dependency>
- <dependency>
- <groupId>${gateway-group}</groupId>
- <artifactId>gateway-util-common</artifactId>
- </dependency>
- <dependency>
- <groupId>org.picketlink</groupId>
- <artifactId>picketlink-federation</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.jboss.logging</groupId>
- <artifactId>jboss-logging</artifactId>
- </dependency>
-
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.easymock</groupId>
- <artifactId>easymock</artifactId>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.knox</groupId>
- <artifactId>gateway-test-utils</artifactId>
- <scope>test</scope>
- </dependency>
-
- </dependencies>
-
-</project>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkMessages.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkMessages.java b/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkMessages.java
deleted file mode 100644
index c49030f..0000000
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/PicketlinkMessages.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.picketlink;
-
-import org.apache.hadoop.gateway.i18n.messages.Message;
-import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
-import org.apache.hadoop.gateway.i18n.messages.StackTrace;
-
-@Messages(logger="org.apache.hadoop.gateway.picketlink")
-public interface PicketlinkMessages {
-
- @Message( level = MessageLevel.DEBUG, text = "Found Original URL in reequest: {0}")
- public void foundOriginalURLInRequest(String url);
-
- @Message( level = MessageLevel.DEBUG, text = "setting cookie for original-url")
- public void settingCookieForOriginalURL();
-
- @Message( level = MessageLevel.DEBUG, text = "Secure Flag is set to False for cookie")
- public void secureFlagFalseForCookie();
-
- @Message( level = MessageLevel.ERROR, text = "Unable to get the gateway identity passphrase: {0}")
- public void unableToGetGatewayIdentityPassphrase(@StackTrace( level = MessageLevel.DEBUG) Exception e);
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkConf.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkConf.java b/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkConf.java
deleted file mode 100644
index 59203c6..0000000
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkConf.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.picketlink.deploy;
-
-/**
- * Provides a serializable configuration file for adding to
- * the webapp as an XML string for picketlink.xml
- *
- */
-public class PicketlinkConf {
- public static final String INDENT = " ";
- public static final String LT_OPEN = "<";
- public static final String LT_CLOSE = "</";
- public static final String GT = ">";
- public static final String GT_CLOSE = "/>";
- public static final String NL = "\n";
- public static final String PICKETLINK_XMLNS = "urn:picketlink:identity-federation:config:2.1";
- public static final String PICKETLINK_SP_XMLNS = "urn:picketlink:identity-federation:config:1.0";
- public static final String C14N_METHOD = "http://www.w3.org/2001/10/xml-exc-c14n#";
- public static final String KEYPROVIDER_ELEMENT = "KeyProvider";
- public static final String KEYPROVIDER_CLASSNAME = "org.picketlink.identity.federation.core.impl.KeyStoreKeyManager";
- public static final String AUTH_HANDLER_CLASSNAME = "org.picketlink.identity.federation.web.handlers.saml2.SAML2AuthenticationHandler";
- public static final String ROLE_GEN_HANDLER_CLASSNAME = "org.picketlink.identity.federation.web.handlers.saml2.RolesGenerationHandler";
- public static final String PICKETLINK_ELEMENT = "PicketLink";
- public static final String PICKETLINKSP_ELEMENT = "PicketLinkSP";
- public static final String HANDLERS_ELEMENT = "Handlers";
- public static final String HANDLER_ELEMENT = "Handler";
- public static final String OPTION_ELEMENT = "Option";
- public static final String VAL_ALIAS_ELEMENT = "ValidatingAlias";
- public static final String AUTH_ELEMENT = "Auth";
-
- private String serverEnvironment = "jetty";
- private String bindingType = "POST";
- private String idpUsesPostingBinding = "true";
- private String supportsSignatures = "true";
- private String identityURL = null;
- private String serviceURL = null;
- private String keystoreURL = null;
- private String keystorePass = null;
- private String signingKeyAlias = null;
- private String signingKeyPass = null;
- private String validatingKeyAlias = null;
- private String validatingKeyValue = null;
- private String nameIDFormat = "urn:oasis:names:tc:SAML:2.0:nameid-format:persistent";
- private String clockSkewMilis = null;
- private String assertionSessionAttributeName = "org.picketlink.sp.assertion";
-
- public String getServerEnvironment() {
- return serverEnvironment;
- }
- public void setServerEnvironment(String serverEnvironment) {
- this.serverEnvironment = serverEnvironment;
- }
- public String getBindingType() {
- return bindingType;
- }
- public void setBindingType(String bindingType) {
- this.bindingType = bindingType;
- }
- public String getIdpUsesPostingBinding() {
- return idpUsesPostingBinding;
- }
- public void setIdpUsesPostingBinding(String idpUsesPostingBinding) {
- this.idpUsesPostingBinding = idpUsesPostingBinding;
- }
- public String getSupportsSignatures() {
- return supportsSignatures;
- }
- public void setSupportsSignatures(String supportsSignatures) {
- this.supportsSignatures = supportsSignatures;
- }
- public String getIdentityURL() {
- return identityURL;
- }
- public void setIdentityURL(String identityURL) {
- this.identityURL = identityURL;
- }
- public String getServiceURL() {
- return serviceURL;
- }
- public void setServiceURL(String serviceURL) {
- this.serviceURL = serviceURL;
- }
- public String getKeystoreURL() {
- return keystoreURL;
- }
- public void setKeystoreURL(String keystoreURL) {
- this.keystoreURL = keystoreURL;
- }
- public String getKeystorePass() {
- return keystorePass;
- }
- public void setKeystorePass(String keystorePass) {
- this.keystorePass = keystorePass;
- }
- public String getSigningKeyAlias() {
- return signingKeyAlias;
- }
- public void setSigningKeyAlias(String signingKeyAlias) {
- this.signingKeyAlias = signingKeyAlias;
- }
- public String getSigningKeyPass() {
- return signingKeyPass;
- }
- public void setSigningKeyPass(String signingKeyPass) {
- this.signingKeyPass = signingKeyPass;
- }
- public String getValidatingKeyAlias() {
- return validatingKeyAlias;
- }
- public void setValidatingAliasKey(String validatingKeyAlias) {
- this.validatingKeyAlias = validatingKeyAlias;
- }
- public String getValidatingKeyValue() {
- return validatingKeyValue;
- }
- public void setValidatingAliasValue(String validatingKeyValue) {
- this.validatingKeyValue = validatingKeyValue;
- }
- public String getNameIDFormat() {
- return nameIDFormat;
- }
- public void setNameIDFormat(String nameIDFormat) {
- this.nameIDFormat = nameIDFormat;
- }
- public String getClockSkewMilis() {
- return clockSkewMilis;
- }
- public void setClockSkewMilis(String clockSkewMilis) {
- this.clockSkewMilis = clockSkewMilis;
- }
- public String getAssertionSessionAttributeName() {
- return assertionSessionAttributeName;
- }
- public void setAssertionSessionAttributeName(
- String assertionSessionAttributeName) {
- this.assertionSessionAttributeName = assertionSessionAttributeName;
- }
- @Override
- public String toString() {
- // THIS IS HORRID REPLACE WITH DOM+TRANSFORM
- StringBuffer xml = new StringBuffer();
- xml.append("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>").append(NL)
- .append(LT_OPEN).append(PICKETLINK_ELEMENT).append(" xmlns=\"").append(PICKETLINK_XMLNS).append("\"" + GT).append(NL)
- .append(INDENT).append(LT_OPEN).append(PICKETLINKSP_ELEMENT).append(" xmlns=\"").append(PICKETLINK_SP_XMLNS + "\"").append(NL)
- .append(INDENT).append(INDENT).append("ServerEnvironment").append("=\"").append(serverEnvironment).append("\"").append(NL)
- .append(INDENT).append(INDENT).append("BindingType").append("=\"").append(bindingType).append("\"").append(NL)
- .append(INDENT).append(INDENT).append("IDPUsesPostBinding").append("=\"").append(idpUsesPostingBinding).append("\"").append(NL)
- .append(INDENT).append(INDENT).append("SupportsSignatures").append("=\"").append(supportsSignatures).append("\"").append(NL)
- .append(INDENT).append(INDENT).append("CanonicalizationMethod").append("=\"").append(C14N_METHOD).append("\"").append(GT).append(NL).append(NL)
- .append(INDENT).append(INDENT).append(LT_OPEN).append("IdentityURL").append(GT).append(identityURL).append(LT_CLOSE).append("IdentityURL").append(GT).append(NL)
- .append(INDENT).append(INDENT).append(LT_OPEN).append("ServiceURL").append(GT).append(serviceURL).append(LT_CLOSE).append("ServiceURL").append(GT).append(NL)
- .append(INDENT).append(INDENT).append(LT_OPEN).append(KEYPROVIDER_ELEMENT).append(" ").append("ClassName=\"").append(KEYPROVIDER_CLASSNAME + "\"" + GT).append(NL)
- .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(AUTH_ELEMENT).append(" Key=\"KeyStoreURL\" Value=\"").append(keystoreURL).append("\"").append(GT_CLOSE).append(NL)
- .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(AUTH_ELEMENT).append(" Key=\"KeyStorePass\" Value=\"").append(keystorePass).append("\"").append(GT_CLOSE).append(NL)
- .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(AUTH_ELEMENT).append(" Key=\"SigningKeyAlias\" Value=\"").append(signingKeyAlias).append("\"").append(GT_CLOSE).append(NL)
- .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(AUTH_ELEMENT).append(" Key=\"SigningKeyPass\" Value=\"").append(signingKeyPass).append("\"").append(GT_CLOSE).append(NL)
- .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(VAL_ALIAS_ELEMENT).append(" Key=\"").append(validatingKeyAlias).append("\" Value=\"").append(validatingKeyValue).append("\"").append(GT_CLOSE).append(NL)
- .append(INDENT).append(INDENT).append(LT_CLOSE).append(KEYPROVIDER_ELEMENT).append(GT).append(NL)
- .append(INDENT).append(LT_CLOSE).append(PICKETLINKSP_ELEMENT).append(GT).append(NL)
- .append(INDENT).append(LT_OPEN).append(HANDLERS_ELEMENT).append(GT).append(NL)
- .append(INDENT).append(INDENT).append(LT_OPEN).append(HANDLER_ELEMENT).append(" class=\"").append(AUTH_HANDLER_CLASSNAME).append("\">").append(NL)
- .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(OPTION_ELEMENT).append(" Key=\"NAMEID_FORMAT\" Value=\"").append(nameIDFormat).append("\"").append(GT_CLOSE).append(NL)
- .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(OPTION_ELEMENT).append(" Key=\"CLOCK_SKEW_MILIS\" Value=\"").append(clockSkewMilis).append("\"").append(GT_CLOSE).append(NL)
- .append(INDENT).append(INDENT).append(INDENT).append(LT_OPEN).append(OPTION_ELEMENT).append(" Key=\"ASSERTION_SESSION_ATTRIBUTE_NAME\" Value=\"").append(assertionSessionAttributeName).append("\"").append(GT_CLOSE).append(NL)
- .append(INDENT).append(INDENT).append(LT_CLOSE).append(HANDLER_ELEMENT).append(GT).append(NL)
- .append(INDENT).append(INDENT).append(LT_OPEN).append(HANDLER_ELEMENT).append(" class=\"").append(ROLE_GEN_HANDLER_CLASSNAME).append("\"/>").append(NL)
- .append(INDENT).append(LT_CLOSE).append(HANDLERS_ELEMENT).append(GT).append(NL)
- .append(LT_CLOSE).append(PICKETLINK_ELEMENT).append(GT).append(NL);
-
- return xml.toString();
- }
-
- public static void main(String[] args) {
- PicketlinkConf conf = new PicketlinkConf();
- System.out.println(conf.toString());
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java b/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java
deleted file mode 100644
index 4f90a41..0000000
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.picketlink.deploy;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributorBase;
-import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
-import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.picketlink.PicketlinkMessages;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.services.security.AliasServiceException;
-import org.apache.hadoop.gateway.services.security.MasterService;
-import org.apache.hadoop.gateway.topology.Provider;
-import org.apache.hadoop.gateway.topology.Service;
-import org.jboss.shrinkwrap.api.asset.StringAsset;
-import org.picketlink.identity.federation.web.filters.ServiceProviderContextInitializer;
-
-public class PicketlinkFederationProviderContributor extends
- ProviderDeploymentContributorBase {
- private static final String ROLE = "federation";
- private static final String NAME = "Picketlink";
- private static final String PICKETLINK_FILTER_CLASSNAME = "org.picketlink.identity.federation.web.filters.SPFilter";
- private static final String CAPTURE_URL_FILTER_CLASSNAME = "org.apache.hadoop.gateway.picketlink.filter.CaptureOriginalURLFilter";
- private static final String IDENTITY_ADAPTER_CLASSNAME = "org.apache.hadoop.gateway.picketlink.filter.PicketlinkIdentityAdapter";
- private static final String IDENTITY_URL_PARAM = "identity.url";
- private static final String SERVICE_URL_PARAM = "service.url";
- private static final String KEYSTORE_URL_PARAM = "keystore.url";
- private static final String SIGNINGKEY_ALIAS = "gateway-identity";
- private static final String VALIDATING_ALIAS_KEY = "validating.alias.key";
- private static final String VALIDATING_ALIAS_VALUE = "validating.alias.value";
- private static final String CLOCK_SKEW_MILIS = "clock.skew.milis";
- private static PicketlinkMessages log = MessagesFactory.get( PicketlinkMessages.class );
-
- private MasterService ms = null;
- private AliasService as = null;
-
- @Override
- public String getRole() {
- return ROLE;
- }
-
- @Override
- public String getName() {
- return NAME;
- }
-
- public void setMasterService(MasterService ms) {
- this.ms = ms;
- }
-
- public void setAliasService(AliasService as) {
- this.as = as;
- }
-
- @Override
- public void initializeContribution(DeploymentContext context) {
- super.initializeContribution(context);
- }
-
- @Override
- public void contributeProvider(DeploymentContext context, Provider provider) {
- // LJM TODO: consider creating a picketlink configuration provider to
- // handle the keystore secrets without putting them in a config file directly.
- // Once that is done then we can remove the unneeded gateway services from those
- // that are available to providers.
- context.getWebAppDescriptor().createListener().listenerClass( ServiceProviderContextInitializer.class.getName());
-
- PicketlinkConf config = new PicketlinkConf( );
- Map<String,String> params = provider.getParams();
- config.setIdentityURL(params.get(IDENTITY_URL_PARAM));
- config.setServiceURL(params.get(SERVICE_URL_PARAM));
- config.setKeystoreURL(params.get(KEYSTORE_URL_PARAM));
- if (ms != null) {
- config.setKeystorePass(new String(ms.getMasterSecret()));
- }
- config.setSigningKeyAlias(SIGNINGKEY_ALIAS);
- if (as != null) {
- char[] passphrase = null;
- try {
- passphrase = as.getGatewayIdentityPassphrase();
- config.setSigningKeyPass(new String(passphrase));
- } catch (AliasServiceException e) {
- log.unableToGetGatewayIdentityPassphrase(e);
- }
- }
- config.setValidatingAliasKey(params.get(VALIDATING_ALIAS_KEY));
- config.setValidatingAliasValue(params.get(VALIDATING_ALIAS_VALUE));
- config.setClockSkewMilis(params.get(CLOCK_SKEW_MILIS));
- String configStr = config.toString();
- if( config != null ) {
- context.getWebArchive().addAsWebInfResource( new StringAsset( configStr ), "picketlink.xml" );
- }
- }
-
- @Override
- public void contributeFilter(DeploymentContext context, Provider provider, Service service,
- ResourceDescriptor resource, List<FilterParamDescriptor> params) {
- // blindly add all the provider params as filter init params
- if (params == null) {
- params = new ArrayList<FilterParamDescriptor>();
- }
- Map<String, String> providerParams = provider.getParams();
- for(Entry<String, String> entry : providerParams.entrySet()) {
- params.add( resource.createFilterParam().name( entry.getKey().toLowerCase() ).value( entry.getValue() ) );
- }
- resource.addFilter().name( getName() ).role( getRole() ).impl( CAPTURE_URL_FILTER_CLASSNAME ).params( params );
- resource.addFilter().name( getName() ).role( getRole() ).impl( PICKETLINK_FILTER_CLASSNAME ).params( params );
- resource.addFilter().name( getName() ).role( getRole() ).impl( IDENTITY_ADAPTER_CLASSNAME ).params( params );
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/CaptureOriginalURLFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/CaptureOriginalURLFilter.java b/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/CaptureOriginalURLFilter.java
deleted file mode 100644
index 66da6c4..0000000
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/CaptureOriginalURLFilter.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.picketlink.filter;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.Cookie;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.picketlink.PicketlinkMessages;
-
-import java.io.IOException;
-
-public class CaptureOriginalURLFilter implements Filter {
- private static PicketlinkMessages log = MessagesFactory.get( PicketlinkMessages.class );
- private static final String COOKIE_PATH = "cookie.path";
- private static final String COOKIE_SECURE = "cookie.secure";
- private String cookiePath = null;
- private String cookieSecure = null;
-
- @Override
- public void init( FilterConfig filterConfig ) throws ServletException {
- cookiePath = filterConfig.getInitParameter(COOKIE_PATH);
- if (cookiePath == null) {
- cookiePath = "/gateway/idp/knoxsso/api/v1/websso";
- }
- cookieSecure = filterConfig.getInitParameter(COOKIE_SECURE);
- if (cookieSecure == null) {
- cookieSecure = "true";
- }
- }
-
- @Override
- public void doFilter( ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain ) throws IOException, ServletException {
- String original = null;
- HttpServletRequest request = (HttpServletRequest)servletRequest;
- String url = request.getParameter("originalUrl");
- if (url != null) {
- log.foundOriginalURLInRequest(url);
- original = request.getParameter("originalUrl");
- log.settingCookieForOriginalURL();
- addCookie(servletResponse, original);
- }
- filterChain.doFilter(request, servletResponse);
- }
-
- @Override
- public void destroy() {
-
- }
-
- private void addCookie(ServletResponse servletResponse, String original) {
- Cookie c = new Cookie("original-url", original);
- c.setPath(cookiePath);
- c.setHttpOnly(true);
- boolean secureOnly = true;
- if (cookieSecure != null) {
- secureOnly = ("false".equals(cookieSecure) ? false : true);
- if (!secureOnly) {
- log.secureFlagFalseForCookie();
- }
- }
- c.setSecure(secureOnly);
- c.setMaxAge(60);
- ((HttpServletResponse)servletResponse).addCookie(c);
- }
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/PicketlinkIdentityAdapter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/PicketlinkIdentityAdapter.java b/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/PicketlinkIdentityAdapter.java
deleted file mode 100644
index 333f91d..0000000
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/hadoop/gateway/picketlink/filter/PicketlinkIdentityAdapter.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.picketlink.filter;
-
-import java.io.IOException;
-import java.security.Principal;
-import java.security.PrivilegedActionException;
-import java.security.PrivilegedExceptionAction;
-import javax.security.auth.Subject;
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.hadoop.gateway.audit.api.Action;
-import org.apache.hadoop.gateway.audit.api.ActionOutcome;
-import org.apache.hadoop.gateway.audit.api.AuditService;
-import org.apache.hadoop.gateway.audit.api.AuditServiceFactory;
-import org.apache.hadoop.gateway.audit.api.Auditor;
-import org.apache.hadoop.gateway.audit.api.ResourceType;
-import org.apache.hadoop.gateway.audit.log4j.audit.AuditConstants;
-import org.apache.hadoop.gateway.filter.AbstractGatewayFilter;
-import org.apache.hadoop.gateway.security.PrimaryPrincipal;
-
-public class PicketlinkIdentityAdapter implements Filter {
-
- private static AuditService auditService = AuditServiceFactory.getAuditService();
- private static Auditor auditor = auditService.getAuditor(
- AuditConstants.DEFAULT_AUDITOR_NAME, AuditConstants.KNOX_SERVICE_NAME,
- AuditConstants.KNOX_COMPONENT_NAME );
-
-
- @Override
- public void init( FilterConfig filterConfig ) throws ServletException {
- }
-
- public void destroy() {
- }
-
- public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
- throws IOException, ServletException {
-
- HttpServletRequest httpRequest = (HttpServletRequest) request;
- String username = httpRequest.getUserPrincipal().getName();
- PrimaryPrincipal pp = new PrimaryPrincipal(username);
- Subject subject = new Subject();
- subject.getPrincipals().add(pp);
-
- Principal principal = (Principal) subject.getPrincipals(PrimaryPrincipal.class);
- auditService.getContext().setUsername( principal.getName() );
- String sourceUri = (String)request.getAttribute( AbstractGatewayFilter.SOURCE_REQUEST_CONTEXT_URL_ATTRIBUTE_NAME );
- auditor.audit( Action.AUTHENTICATION , sourceUri, ResourceType.URI, ActionOutcome.SUCCESS );
-
- doAs(request, response, chain, subject);
- }
-
- private void doAs(final ServletRequest request,
- final ServletResponse response, final FilterChain chain, Subject subject)
- throws IOException, ServletException {
- try {
- Subject.doAs(
- subject,
- new PrivilegedExceptionAction<Object>() {
- public Object run() throws Exception {
- chain.doFilter(request, response);
- return null;
- }
- }
- );
- }
- catch (PrivilegedActionException e) {
- Throwable t = e.getCause();
- if (t instanceof IOException) {
- throw (IOException) t;
- }
- else if (t instanceof ServletException) {
- throw (ServletException) t;
- }
- else {
- throw new ServletException(t);
- }
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor b/gateway-provider-security-picketlink/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
deleted file mode 100644
index ec4affc..0000000
--- a/gateway-provider-security-picketlink/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.picketlink.deploy.PicketlinkFederationProviderContributor
http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/picketlink/PicketlinkTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/picketlink/PicketlinkTest.java b/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/picketlink/PicketlinkTest.java
deleted file mode 100644
index 0631eeb..0000000
--- a/gateway-provider-security-picketlink/src/test/java/org/apache/hadoop/gateway/picketlink/PicketlinkTest.java
+++ /dev/null
@@ -1,30 +0,0 @@
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.picketlink;
-
-
-import org.apache.hadoop.gateway.services.security.token.impl.JWTToken;
-import org.junit.Test;
-
-public class PicketlinkTest extends org.junit.Assert {
- @Test
- public void testPicketlink() throws Exception {
- assertTrue(true);
- }
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/gateway-release/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-release/pom.xml b/gateway-release/pom.xml
index cbff307..ad07225 100644
--- a/gateway-release/pom.xml
+++ b/gateway-release/pom.xml
@@ -241,10 +241,6 @@
<dependency>
<groupId>${gateway-group}</groupId>
- <artifactId>gateway-provider-security-picketlink</artifactId>
- </dependency>
- <dependency>
- <groupId>${gateway-group}</groupId>
<artifactId>gateway-provider-security-shiro</artifactId>
</dependency>
<dependency>
http://git-wip-us.apache.org/repos/asf/knox/blob/92b1505a/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index e314415..30a052b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -68,7 +68,6 @@
<module>gateway-provider-identity-assertion-hadoop-groups</module>
<module>gateway-provider-identity-assertion-regex</module>
<module>gateway-provider-identity-assertion-switchcase</module>
- <module>gateway-provider-security-picketlink</module>
<module>gateway-provider-identity-assertion-pseudo</module>
<module>gateway-provider-jersey</module>
<module>gateway-provider-ha</module>
@@ -499,11 +498,6 @@
</dependency>
<dependency>
<groupId>${gateway-group}</groupId>
- <artifactId>gateway-provider-security-picketlink</artifactId>
- <version>${gateway-version}</version>
- </dependency>
- <dependency>
- <groupId>${gateway-group}</groupId>
<artifactId>gateway-provider-security-preauth</artifactId>
<version>${gateway-version}</version>
</dependency>
@@ -733,16 +727,6 @@
<version>${gateway-version}</version>
</dependency>
<dependency>
- <groupId>org.picketlink</groupId>
- <artifactId>picketlink-federation</artifactId>
- <version>2.7.0.CR3</version>
- </dependency>
- <dependency>
- <groupId>org.jboss.logging</groupId>
- <artifactId>jboss-logging</artifactId>
- <version>3.2.0.Final</version>
- </dependency>
- <dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet</artifactId>
<version>2.6</version>
[11/23] knox git commit: KNOX-1064 - Externalize Hadoop Service
Configuration Details and Service URL Creation (Phil Zampino via Sandeep
More)
Posted by mo...@apache.org.
KNOX-1064 - Externalize Hadoop Service Configuration Details and Service URL Creation (Phil Zampino via Sandeep More)
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/7b401def
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/7b401def
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/7b401def
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 7b401def625630cbf1f9ee5f8993bbcb3269c222
Parents: d762ed3
Author: Sandeep More <mo...@apache.org>
Authored: Thu Oct 5 10:19:07 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Thu Oct 5 10:19:07 2017 -0400
----------------------------------------------------------------------
.../discovery/ambari/AmbariCluster.java | 7 +-
.../discovery/ambari/AmbariComponent.java | 27 +-
.../ambari/AmbariDynamicServiceURLCreator.java | 151 ++++
.../ambari/AmbariServiceDiscovery.java | 58 +-
.../ambari/AmbariServiceDiscoveryMessages.java | 64 +-
.../ambari/AmbariServiceURLCreator.java | 184 ----
.../ambari/ConditionalValueHandler.java | 24 +
.../discovery/ambari/PropertyEqualsHandler.java | 76 ++
.../ambari/ServiceURLPropertyConfig.java | 324 +++++++
.../discovery/ambari/SimpleValueHandler.java | 32 +
...iscovery-component-config-mapping.properties | 36 +
.../ambari-service-discovery-url-mappings.xml | 398 +++++++++
.../AmbariDynamicServiceURLCreatorTest.java | 876 +++++++++++++++++++
.../ambari/AmbariServiceDiscoveryTest.java | 4 +-
.../simple/SimpleDescriptorHandler.java | 68 +-
.../simple/SimpleDescriptorMessages.java | 8 +-
.../simple/SimpleDescriptorHandlerTest.java | 327 +++++--
17 files changed, 2335 insertions(+), 329 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
index 6eaabd3..eb84433 100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariCluster.java
@@ -27,7 +27,7 @@ class AmbariCluster implements ServiceDiscovery.Cluster {
private String name = null;
- private AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
+ private AmbariDynamicServiceURLCreator urlCreator;
private Map<String, Map<String, ServiceConfiguration>> serviceConfigurations = new HashMap<>();
@@ -36,7 +36,8 @@ class AmbariCluster implements ServiceDiscovery.Cluster {
AmbariCluster(String name) {
this.name = name;
- components = new HashMap<String, AmbariComponent>();
+ components = new HashMap<>();
+ urlCreator = new AmbariDynamicServiceURLCreator(this);
}
void addServiceConfiguration(String serviceName, String configurationType, ServiceConfiguration serviceConfig) {
@@ -81,7 +82,7 @@ class AmbariCluster implements ServiceDiscovery.Cluster {
@Override
public List<String> getServiceURLs(String serviceName) {
List<String> urls = new ArrayList<>();
- urls.addAll(urlCreator.create(this, serviceName));
+ urls.addAll(urlCreator.create(serviceName));
return urls;
}
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
index 55257fb..d9d5b03 100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariComponent.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.gateway.topology.discovery.ambari;
+import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -27,7 +28,7 @@ class AmbariComponent {
private String name = null;
private String version = null;
- private List<String> hostNames = null;
+ private List<String> hostNames = new ArrayList<>();
private Map<String, String> properties = null;
@@ -41,35 +42,43 @@ class AmbariComponent {
this.serviceName = service;
this.clusterName = cluster;
this.version = version;
- this.hostNames = hostNames;
this.properties = properties;
+
+ if (hostNames != null) {
+ // Add the hostnames individually to prevent adding any null values
+ for (String hostName : hostNames) {
+ if (hostName != null) {
+ this.hostNames.add(hostName);
+ }
+ }
+ }
}
- public String getVersion() {
+ String getVersion() {
return version;
}
- public String getName() {
+ String getName() {
return name;
}
- public String getServiceName() {
+ String getServiceName() {
return serviceName;
}
- public String getClusterName() {
+ String getClusterName() {
return clusterName;
}
- public List<String> getHostNames() {
+ List<String> getHostNames() {
return hostNames;
}
- public Map<String, String> getConfigProperties() {
+ Map<String, String> getConfigProperties() {
return properties;
}
- public String getConfigProperty(String propertyName) {
+ String getConfigProperty(String propertyName) {
return properties.get(propertyName);
}
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
new file mode 100644
index 0000000..ed5d3e7
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+
+class AmbariDynamicServiceURLCreator {
+
+ static final String MAPPING_CONFIG_OVERRIDE_PROPERTY = "org.apache.gateway.topology.discovery.ambari.config";
+
+ private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+ private AmbariCluster cluster = null;
+ private ServiceURLPropertyConfig config;
+
+ AmbariDynamicServiceURLCreator(AmbariCluster cluster) {
+ this.cluster = cluster;
+
+ String mappingConfiguration = System.getProperty(MAPPING_CONFIG_OVERRIDE_PROPERTY);
+ if (mappingConfiguration != null) {
+ File mappingConfigFile = new File(mappingConfiguration);
+ if (mappingConfigFile.exists()) {
+ try {
+ config = new ServiceURLPropertyConfig(mappingConfigFile);
+ log.loadedComponentConfigMappings(mappingConfigFile.getAbsolutePath());
+ } catch (Exception e) {
+ log.failedToLoadComponentConfigMappings(mappingConfigFile.getAbsolutePath(), e);
+ }
+ }
+ }
+
+ // If there is no valid override configured, fall-back to the internal mapping configuration
+ if (config == null) {
+ config = new ServiceURLPropertyConfig();
+ }
+ }
+
+ AmbariDynamicServiceURLCreator(AmbariCluster cluster, File mappingConfiguration) throws IOException {
+ this.cluster = cluster;
+ config = new ServiceURLPropertyConfig(new FileInputStream(mappingConfiguration));
+ }
+
+ AmbariDynamicServiceURLCreator(AmbariCluster cluster, String mappings) {
+ this.cluster = cluster;
+ config = new ServiceURLPropertyConfig(new ByteArrayInputStream(mappings.getBytes()));
+ }
+
+ List<String> create(String serviceName) {
+ List<String> urls = new ArrayList<>();
+
+ Map<String, String> placeholderValues = new HashMap<>();
+ List<String> componentHostnames = new ArrayList<>();
+ String hostNamePlaceholder = null;
+
+ ServiceURLPropertyConfig.URLPattern pattern = config.getURLPattern(serviceName);
+ if (pattern != null) {
+ for (String propertyName : pattern.getPlaceholders()) {
+ ServiceURLPropertyConfig.Property configProperty = config.getConfigProperty(serviceName, propertyName);
+
+ String propertyValue = null;
+ String propertyType = configProperty.getType();
+ if (ServiceURLPropertyConfig.Property.TYPE_SERVICE.equals(propertyType)) {
+ log.lookingUpServiceConfigProperty(configProperty.getService(), configProperty.getServiceConfig(), configProperty.getValue());
+ AmbariCluster.ServiceConfiguration svcConfig =
+ cluster.getServiceConfiguration(configProperty.getService(), configProperty.getServiceConfig());
+ if (svcConfig != null) {
+ propertyValue = svcConfig.getProperties().get(configProperty.getValue());
+ }
+ } else if (ServiceURLPropertyConfig.Property.TYPE_COMPONENT.equals(propertyType)) {
+ String compName = configProperty.getComponent();
+ if (compName != null) {
+ AmbariComponent component = cluster.getComponent(compName);
+ if (component != null) {
+ if (ServiceURLPropertyConfig.Property.PROP_COMP_HOSTNAME.equals(configProperty.getValue())) {
+ log.lookingUpComponentHosts(compName);
+ componentHostnames.addAll(component.getHostNames());
+ hostNamePlaceholder = propertyName; // Remember the host name placeholder
+ } else {
+ log.lookingUpComponentConfigProperty(compName, configProperty.getValue());
+ propertyValue = component.getConfigProperty(configProperty.getValue());
+ }
+ }
+ }
+ } else { // Derived property
+ log.handlingDerivedProperty(serviceName, configProperty.getType(), configProperty.getName());
+ ServiceURLPropertyConfig.Property p = config.getConfigProperty(serviceName, configProperty.getName());
+ propertyValue = p.getValue();
+ if (propertyValue == null) {
+ if (p.getConditionHandler() != null) {
+ propertyValue = p.getConditionHandler().evaluate(config, cluster);
+ }
+ }
+ }
+
+ log.determinedPropertyValue(configProperty.getName(), propertyValue);
+ placeholderValues.put(configProperty.getName(), propertyValue);
+ }
+
+ // For patterns with a placeholder value for the hostname (e.g., multiple URL scenarios)
+ if (!componentHostnames.isEmpty()) {
+ for (String componentHostname : componentHostnames) {
+ String url = pattern.get().replace("{" + hostNamePlaceholder + "}", componentHostname);
+ urls.add(createURL(url, placeholderValues));
+ }
+ } else { // Single URL result case
+ urls.add(createURL(pattern.get(), placeholderValues));
+ }
+ }
+
+ return urls;
+ }
+
+ private String createURL(String pattern, Map<String, String> placeholderValues) {
+ String url = null;
+ if (pattern != null) {
+ url = pattern;
+ for (String placeHolder : placeholderValues.keySet()) {
+ String value = placeholderValues.get(placeHolder);
+ if (value != null) {
+ url = url.replace("{" + placeHolder + "}", value);
+ }
+ }
+ }
+ return url;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
index 34f20a7..37f68ae 100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
@@ -16,6 +16,13 @@
*/
package org.apache.hadoop.gateway.topology.discovery.ambari;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
import net.minidev.json.JSONArray;
import net.minidev.json.JSONObject;
import net.minidev.json.JSONValue;
@@ -34,9 +41,6 @@ import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.message.BasicHeader;
import org.apache.http.util.EntityUtils;
-import java.io.IOException;
-import java.util.*;
-
class AmbariServiceDiscovery implements ServiceDiscovery {
@@ -50,31 +54,33 @@ class AmbariServiceDiscovery implements ServiceDiscovery {
static final String AMBARI_SERVICECONFIGS_URI =
AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
+ private static final String COMPONENT_CONFIG_MAPPING_FILE =
+ "ambari-service-discovery-component-config-mapping.properties";
+
+ private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
// Map of component names to service configuration types
private static Map<String, String> componentServiceConfigs = new HashMap<>();
static {
- componentServiceConfigs.put("NAMENODE", "hdfs-site");
- componentServiceConfigs.put("RESOURCEMANAGER", "yarn-site");
- componentServiceConfigs.put("OOZIE_SERVER", "oozie-site");
- componentServiceConfigs.put("HIVE_SERVER", "hive-site");
- componentServiceConfigs.put("WEBHCAT_SERVER", "webhcat-site");
- componentServiceConfigs.put("HBASE_MASTER", "hbase-site");
- } // TODO: Are there other service components, for which the endpoints can be discovered via Ambari?
+ try {
+ Properties configMapping = new Properties();
+ configMapping.load(AmbariServiceDiscovery.class.getClassLoader().getResourceAsStream(COMPONENT_CONFIG_MAPPING_FILE));
+ for (String componentName : configMapping.stringPropertyNames()) {
+ componentServiceConfigs.put(componentName, configMapping.getProperty(componentName));
+ }
+ } catch (Exception e) {
+ log.failedToLoadServiceDiscoveryConfiguration(COMPONENT_CONFIG_MAPPING_FILE, e);
+ }
+ }
private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
private static final String DEFAULT_PWD_ALIAS = "ambari.discovery.password";
- private static AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
-
- private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
-
@GatewayService
private AliasService aliasService;
private CloseableHttpClient httpClient = null;
- private Map<String, Map<String, String>> serviceConfiguration = new HashMap<>();
-
AmbariServiceDiscovery() {
httpClient = org.apache.http.impl.client.HttpClients.createDefault();
@@ -141,13 +147,21 @@ class AmbariServiceDiscovery implements ServiceDiscovery {
serviceComponents.put(componentName, serviceName);
-// String hostName = (String) hostRoles.get("host_name");
- String hostName = (String) hostRoles.get("public_host_name"); // Assuming public host name is most applicable
- log.discoveredServiceHost(serviceName, hostName);
- if (!componentHostNames.containsKey(componentName)) {
- componentHostNames.put(componentName, new ArrayList<String>());
+ // Assuming public host name is more applicable than host_name
+ String hostName = (String) hostRoles.get("public_host_name");
+ if (hostName == null) {
+ // Some (even slightly) older versions of Ambari/HDP do not return public_host_name,
+ // so fall back to host_name in those cases.
+ hostName = (String) hostRoles.get("host_name");
+ }
+
+ if (hostName != null) {
+ log.discoveredServiceHost(serviceName, hostName);
+ if (!componentHostNames.containsKey(componentName)) {
+ componentHostNames.put(componentName, new ArrayList<String>());
+ }
+ componentHostNames.get(componentName).add(hostName);
}
- componentHostNames.get(componentName).add(hostName);
}
}
}
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
index caa16ed..0661224 100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
@@ -21,61 +21,101 @@ import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
import org.apache.hadoop.gateway.i18n.messages.Messages;
import org.apache.hadoop.gateway.i18n.messages.StackTrace;
-@Messages(logger="org.apache.gateway.topology.discovery.ambari")
+@Messages(logger="org.apache.hadoop.gateway.topology.discovery.ambari")
public interface AmbariServiceDiscoveryMessages {
@Message(level = MessageLevel.ERROR,
- text = "Encountered an error during cluster {0} discovery: {1}")
+ text = "Failed to load service discovery configuration: {1}")
+ void failedToLoadServiceDiscoveryConfiguration(@StackTrace(level = MessageLevel.ERROR) Exception e);
+
+ @Message(level = MessageLevel.ERROR,
+ text = "Failed to load service discovery configuration {0}: {1}")
+ void failedToLoadServiceDiscoveryConfiguration(final String configuration,
+ @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+ @Message(level = MessageLevel.ERROR,
+ text = "Encountered an error during cluster {0} discovery: {1}")
void clusterDiscoveryError(final String clusterName,
@StackTrace(level = MessageLevel.ERROR) Exception e);
@Message(level = MessageLevel.DEBUG,
- text = "REST invocation {0} failed: {1}")
+ text = "REST invocation {0} failed: {1}")
void restInvocationError(final String url,
@StackTrace(level = MessageLevel.ERROR) Exception e);
@Message(level = MessageLevel.ERROR,
- text = "Encountered an error attempting to determine the user for alias {0} : {1}")
+ text = "Encountered an error attempting to determine the user for alias {0} : {1}")
void aliasServiceUserError(final String alias, final String error);
@Message(level = MessageLevel.ERROR,
- text = "Encountered an error attempting to determine the password for alias {0} : {1}")
+ text = "Encountered an error attempting to determine the password for alias {0} : {1}")
void aliasServicePasswordError(final String alias, final String error);
@Message(level = MessageLevel.ERROR,
- text = "No user configured for Ambari service discovery.")
+ text = "No user configured for Ambari service discovery.")
void aliasServiceUserNotFound();
@Message(level = MessageLevel.ERROR,
- text = "No password configured for Ambari service discovery.")
+ text = "No password configured for Ambari service discovery.")
void aliasServicePasswordNotFound();
@Message(level = MessageLevel.ERROR,
- text = "Unexpected REST invocation response code for {0} : {1}")
+ text = "Unexpected REST invocation response code for {0} : {1}")
void unexpectedRestResponseStatusCode(final String url, int responseStatusCode);
@Message(level = MessageLevel.ERROR,
- text = "REST invocation {0} yielded a response without any JSON.")
+ text = "REST invocation {0} yielded a response without any JSON.")
void noJSON(final String url);
@Message(level = MessageLevel.DEBUG,
- text = "REST invocation result: {0}")
+ text = "REST invocation result: {0}")
void debugJSON(final String json);
+ @Message(level = MessageLevel.DEBUG,
+ text = "Loaded component configuration mappings: {0}")
+ void loadedComponentConfigMappings(final String mappings);
- @Message(level = MessageLevel.INFO,
- text = "Discovered: Service: {0}, Host: {1}")
+ @Message(level = MessageLevel.ERROR,
+ text = "Failed to load component configuration property mappings {0}: {1}")
+ void failedToLoadComponentConfigMappings(final String mappings,
+ @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+ @Message(level = MessageLevel.DEBUG,
+ text = "Discovered: Service: {0}, Host: {1}")
void discoveredServiceHost(final String serviceName, final String hostName);
+ @Message(level = MessageLevel.DEBUG,
+ text = "Querying the cluster for the {0} configuration ({1}) property: {2}")
+ void lookingUpServiceConfigProperty(final String serviceName, final String configType, final String propertyName);
+
+
+ @Message(level = MessageLevel.DEBUG,
+ text = "Querying the cluster for the {0} component configuration property: {1}")
+ void lookingUpComponentConfigProperty(final String componentName, final String propertyName);
+
+
+ @Message(level = MessageLevel.DEBUG,
+ text = "Querying the cluster for the {0} component's hosts")
+ void lookingUpComponentHosts(final String componentName);
+
+
+ @Message(level = MessageLevel.DEBUG,
+ text = "Handling a derived service URL mapping property for the {0} service: type = {1}, name = {2}")
+ void handlingDerivedProperty(final String serviceName, final String propertyType, final String propertyName);
+
+
+ @Message(level = MessageLevel.DEBUG,
+ text = "Determined the service URL mapping property {0} value: {1}")
+ void determinedPropertyValue(final String propertyName, final String propertyValue);
}
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
deleted file mode 100644
index 0674642..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-
-import java.util.ArrayList;
-import java.util.List;
-
-class AmbariServiceURLCreator {
-
- private static final String NAMENODE_SERVICE = "NAMENODE";
- private static final String JOBTRACKER_SERVICE = "JOBTRACKER";
- private static final String WEBHDFS_SERVICE = "WEBHDFS";
- private static final String WEBHCAT_SERVICE = "WEBHCAT";
- private static final String OOZIE_SERVICE = "OOZIE";
- private static final String WEBHBASE_SERVICE = "WEBHBASE";
- private static final String HIVE_SERVICE = "HIVE";
- private static final String RESOURCEMANAGER_SERVICE = "RESOURCEMANAGER";
-
-
- /**
- * Derive the endpoint URL(s) for the specified service, based on the info from the specified Cluster.
- *
- * @param cluster The cluster discovery results
- * @param serviceName The name of a Hadoop service
- *
- * @return One or more endpoint URLs for the specified service.
- */
- public List<String> create(AmbariCluster cluster, String serviceName) {
- List<String> result = null;
-
- if (NAMENODE_SERVICE.equals(serviceName)) {
- result = createNameNodeURL(cluster);
- } else if (JOBTRACKER_SERVICE.equals(serviceName)) {
- result = createJobTrackerURL(cluster);
- } else if (WEBHDFS_SERVICE.equals(serviceName)) {
- result = createWebHDFSURL(cluster);
- } else if (WEBHCAT_SERVICE.equals(serviceName)) {
- result = createWebHCatURL(cluster);
- } else if (OOZIE_SERVICE.equals(serviceName)) {
- result = createOozieURL(cluster);
- } else if (WEBHBASE_SERVICE.equals(serviceName)) {
- result = createWebHBaseURL(cluster);
- } else if (HIVE_SERVICE.equals(serviceName)) {
- result = createHiveURL(cluster);
- } else if (RESOURCEMANAGER_SERVICE.equals(serviceName)) {
- result = createResourceManagerURL(cluster);
- }
-
- return result;
- }
-
-
- private List<String> createNameNodeURL(AmbariCluster cluster) {
- List<String> result = new ArrayList<>();
-
- AmbariComponent comp = cluster.getComponent("NAMENODE");
- if (comp != null) {
- result.add("hdfs://" + comp.getConfigProperty("dfs.namenode.rpc-address"));
- }
-
- return result;
- }
-
-
- private List<String> createJobTrackerURL(AmbariCluster cluster) {
- List<String> result = new ArrayList<>();
-
- AmbariComponent comp = cluster.getComponent("RESOURCEMANAGER");
- if (comp != null) {
- result.add("rpc://" + comp.getConfigProperty("yarn.resourcemanager.address"));
- }
-
- return result;
- }
-
-
- private List<String> createWebHDFSURL(AmbariCluster cluster) {
- List<String> result = new ArrayList<>();
-
- AmbariCluster.ServiceConfiguration sc = cluster.getServiceConfiguration("HDFS", "hdfs-site");
- if (sc != null) {
- String address = sc.getProperties().get("dfs.namenode.http-address");
- result.add("http://" + address + "/webhdfs");
- }
-
- return result;
- }
-
-
- private List<String> createWebHCatURL(AmbariCluster cluster) {
- List<String> result = new ArrayList<>();
-
- AmbariComponent webhcat = cluster.getComponent("WEBHCAT_SERVER");
- if (webhcat != null) {
- String port = webhcat.getConfigProperty("templeton.port");
- String host = webhcat.getHostNames().get(0);
-
- result.add("http://" + host + ":" + port + "/templeton");
- }
- return result;
- }
-
-
- private List<String> createOozieURL(AmbariCluster cluster) {
- List<String> result = new ArrayList<>();
-
- AmbariComponent comp = cluster.getComponent("OOZIE_SERVER");
- if (comp != null) {
- result.add(comp.getConfigProperty("oozie.base.url"));
- }
-
- return result;
- }
-
-
- private List<String> createWebHBaseURL(AmbariCluster cluster) {
- List<String> result = new ArrayList<>();
-
- AmbariComponent comp = cluster.getComponent("HBASE_MASTER");
- if (comp != null) {
- for (String host : comp.getHostNames()) {
- result.add("http://" + host + ":60080");
- }
- }
-
- return result;
- }
-
-
- private List<String> createHiveURL(AmbariCluster cluster) {
- List<String> result = new ArrayList<>();
-
- AmbariComponent hive = cluster.getComponent("HIVE_SERVER");
- if (hive != null) {
- String path = hive.getConfigProperty("hive.server2.thrift.http.path");
- String port = hive.getConfigProperty("hive.server2.thrift.http.port");
- String transport = hive.getConfigProperty("hive.server2.transport.mode");
- String useSSL = hive.getConfigProperty("hive.server2.use.SSL");
- String host = hive.getHostNames().get(0);
-
- String scheme = null; // What is the scheme for the binary transport mode?
- if ("http".equals(transport)) {
- scheme = Boolean.valueOf(useSSL) ? "https" : "http";
- }
-
- result.add(scheme + "://" + host + ":" + port + "/" + path);
- }
- return result;
- }
-
-
- private List<String> createResourceManagerURL(AmbariCluster cluster) {
- List<String> result = new ArrayList<>();
-
- AmbariComponent resMan = cluster.getComponent("RESOURCEMANAGER");
- if (resMan != null) {
- String webappAddress = resMan.getConfigProperty("yarn.resourcemanager.webapp.address");
- String httpPolicy = resMan.getConfigProperty("yarn.http.policy");
- String scheme = ("HTTPS_ONLY".equalsIgnoreCase(httpPolicy)) ? "https" : "http";
-
- result.add(scheme + "://" + webappAddress + "/ws");
- }
-
- return result;
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ConditionalValueHandler.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ConditionalValueHandler.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ConditionalValueHandler.java
new file mode 100644
index 0000000..d76a161
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ConditionalValueHandler.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+
+interface ConditionalValueHandler {
+
+ String evaluate(ServiceURLPropertyConfig config, AmbariCluster cluster);
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/PropertyEqualsHandler.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/PropertyEqualsHandler.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/PropertyEqualsHandler.java
new file mode 100644
index 0000000..642a676
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/PropertyEqualsHandler.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+
+class PropertyEqualsHandler implements ConditionalValueHandler {
+
+ private String serviceName = null;
+ private String propertyName = null;
+ private String propertyValue = null;
+ private ConditionalValueHandler affirmativeResult = null;
+ private ConditionalValueHandler negativeResult = null;
+
+ PropertyEqualsHandler(String serviceName,
+ String propertyName,
+ String propertyValue,
+ ConditionalValueHandler affirmativeResult,
+ ConditionalValueHandler negativeResult) {
+ this.serviceName = serviceName;
+ this.propertyName = propertyName;
+ this.propertyValue = propertyValue;
+ this.affirmativeResult = affirmativeResult;
+ this.negativeResult = negativeResult;
+ }
+
+ @Override
+ public String evaluate(ServiceURLPropertyConfig config, AmbariCluster cluster) {
+ String result = null;
+
+ ServiceURLPropertyConfig.Property p = config.getConfigProperty(serviceName, propertyName);
+ if (p != null) {
+ String value = getActualPropertyValue(cluster, p);
+ if (propertyValue.equals(value)) {
+ result = affirmativeResult.evaluate(config, cluster);
+ } else if (negativeResult != null) {
+ result = negativeResult.evaluate(config, cluster);
+ }
+
+ // Check if the result is a reference to a local derived property
+ ServiceURLPropertyConfig.Property derived = config.getConfigProperty(serviceName, result);
+ if (derived != null) {
+ result = getActualPropertyValue(cluster, derived);
+ }
+ }
+
+ return result;
+ }
+
+ private String getActualPropertyValue(AmbariCluster cluster, ServiceURLPropertyConfig.Property property) {
+ String value = null;
+ String propertyType = property.getType();
+ if (ServiceURLPropertyConfig.Property.TYPE_COMPONENT.equals(propertyType)) {
+ AmbariComponent component = cluster.getComponent(property.getComponent());
+ if (component != null) {
+ value = component.getConfigProperty(property.getValue());
+ }
+ } else if (ServiceURLPropertyConfig.Property.TYPE_SERVICE.equals(propertyType)) {
+ value = cluster.getServiceConfiguration(property.getService(), property.getServiceConfig()).getProperties().get(property.getValue());
+ }
+ return value;
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
new file mode 100644
index 0000000..3330cc3
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
@@ -0,0 +1,324 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
+import org.apache.hadoop.gateway.util.XmlUtils;
+import org.w3c.dom.Document;
+import org.w3c.dom.NamedNodeMap;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathExpression;
+import javax.xml.xpath.XPathExpressionException;
+import javax.xml.xpath.XPathFactory;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Service URL pattern mapping configuration model.
+ */
+class ServiceURLPropertyConfig {
+
+ private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+ private static final String ATTR_NAME = "name";
+
+ private static XPathExpression SERVICE_URL_PATTERN_MAPPINGS;
+ private static XPathExpression URL_PATTERN;
+ private static XPathExpression PROPERTIES;
+ static {
+ XPath xpath = XPathFactory.newInstance().newXPath();
+ try {
+ SERVICE_URL_PATTERN_MAPPINGS = xpath.compile("/service-discovery-url-mappings/service");
+ URL_PATTERN = xpath.compile("url-pattern/text()");
+ PROPERTIES = xpath.compile("properties/property");
+ } catch (XPathExpressionException e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static final String DEFAULT_SERVICE_URL_MAPPINGS = "ambari-service-discovery-url-mappings.xml";
+
+ private Map<String, URLPattern> urlPatterns = new HashMap<>();
+
+ private Map<String, Map<String, Property>> properties = new HashMap<>();
+
+
+ /**
+ * The default service URL pattern to property mapping configuration will be used.
+ */
+ ServiceURLPropertyConfig() {
+ this(ServiceURLPropertyConfig.class.getClassLoader().getResourceAsStream(DEFAULT_SERVICE_URL_MAPPINGS));
+ }
+
+ /**
+ * The default service URL pattern to property mapping configuration will be used.
+ */
+ ServiceURLPropertyConfig(File mappingConfigurationFile) throws Exception {
+ this(new FileInputStream(mappingConfigurationFile));
+ }
+
+ /**
+ *
+ * @param source An InputStream for the XML content
+ */
+ ServiceURLPropertyConfig(InputStream source) {
+ // Parse the XML, and build the model
+ try {
+ Document doc = XmlUtils.readXml(source);
+
+ NodeList serviceNodes =
+ (NodeList) SERVICE_URL_PATTERN_MAPPINGS.evaluate(doc, XPathConstants.NODESET);
+ for (int i=0; i < serviceNodes.getLength(); i++) {
+ Node serviceNode = serviceNodes.item(i);
+ String serviceName = serviceNode.getAttributes().getNamedItem(ATTR_NAME).getNodeValue();
+ properties.put(serviceName, new HashMap<String, Property>());
+
+ Node urlPatternNode = (Node) URL_PATTERN.evaluate(serviceNode, XPathConstants.NODE);
+ if (urlPatternNode != null) {
+ urlPatterns.put(serviceName, new URLPattern(urlPatternNode.getNodeValue()));
+ }
+
+ NodeList propertiesNode = (NodeList) PROPERTIES.evaluate(serviceNode, XPathConstants.NODESET);
+ if (propertiesNode != null) {
+ processProperties(serviceName, propertiesNode);
+ }
+ }
+ } catch (Exception e) {
+ log.failedToLoadServiceDiscoveryConfiguration(e);
+ } finally {
+ try {
+ source.close();
+ } catch (IOException e) {
+ // Ignore
+ }
+ }
+ }
+
+ private void processProperties(String serviceName, NodeList propertyNodes) {
+ for (int i = 0; i < propertyNodes.getLength(); i++) {
+ Property p = Property.createProperty(serviceName, propertyNodes.item(i));
+ properties.get(serviceName).put(p.getName(), p);
+ }
+ }
+
+ URLPattern getURLPattern(String service) {
+ return urlPatterns.get(service);
+ }
+
+ Property getConfigProperty(String service, String property) {
+ return properties.get(service).get(property);
+ }
+
+ static class URLPattern {
+ String pattern;
+ List<String> placeholders = new ArrayList<>();
+
+ URLPattern(String pattern) {
+ this.pattern = pattern;
+
+ final Pattern regex = Pattern.compile("\\{(.*?)}", Pattern.DOTALL);
+ final Matcher matcher = regex.matcher(pattern);
+ while( matcher.find() ){
+ placeholders.add(matcher.group(1));
+ }
+ }
+
+ String get() {return pattern; }
+ List<String> getPlaceholders() {
+ return placeholders;
+ }
+ }
+
+ static class Property {
+ static final String TYPE_SERVICE = "SERVICE";
+ static final String TYPE_COMPONENT = "COMPONENT";
+ static final String TYPE_DERIVED = "DERIVED";
+
+ static final String PROP_COMP_HOSTNAME = "component.host.name";
+
+ static final String ATTR_NAME = "name";
+ static final String ATTR_PROPERTY = "property";
+ static final String ATTR_VALUE = "value";
+
+ static XPathExpression HOSTNAME;
+ static XPathExpression SERVICE_CONFIG;
+ static XPathExpression COMPONENT;
+ static XPathExpression CONFIG_PROPERTY;
+ static XPathExpression IF;
+ static XPathExpression THEN;
+ static XPathExpression ELSE;
+ static XPathExpression TEXT;
+ static {
+ XPath xpath = XPathFactory.newInstance().newXPath();
+ try {
+ HOSTNAME = xpath.compile("hostname");
+ SERVICE_CONFIG = xpath.compile("service-config");
+ COMPONENT = xpath.compile("component");
+ CONFIG_PROPERTY = xpath.compile("config-property");
+ IF = xpath.compile("if");
+ THEN = xpath.compile("then");
+ ELSE = xpath.compile("else");
+ TEXT = xpath.compile("text()");
+ } catch (XPathExpressionException e) {
+ e.printStackTrace();
+ }
+ }
+
+
+ String type;
+ String name;
+ String component;
+ String service;
+ String serviceConfig;
+ String value;
+ ConditionalValueHandler conditionHandler = null;
+
+ private Property(String type,
+ String propertyName,
+ String component,
+ String service,
+ String configType,
+ String value,
+ ConditionalValueHandler pch) {
+ this.type = type;
+ this.name = propertyName;
+ this.service = service;
+ this.component = component;
+ this.serviceConfig = configType;
+ this.value = value;
+ conditionHandler = pch;
+ }
+
+ static Property createProperty(String serviceName, Node propertyNode) {
+ String propertyName = propertyNode.getAttributes().getNamedItem(ATTR_NAME).getNodeValue();
+ String propertyType = null;
+ String serviceType = null;
+ String configType = null;
+ String componentType = null;
+ String value = null;
+ ConditionalValueHandler pch = null;
+
+ try {
+ Node hostNameNode = (Node) HOSTNAME.evaluate(propertyNode, XPathConstants.NODE);
+ if (hostNameNode != null) {
+ value = PROP_COMP_HOSTNAME;
+ }
+
+ // Check for a service-config node
+ Node scNode = (Node) SERVICE_CONFIG.evaluate(propertyNode, XPathConstants.NODE);
+ if (scNode != null) {
+ // Service config property
+ propertyType = Property.TYPE_SERVICE;
+ serviceType = scNode.getAttributes().getNamedItem(ATTR_NAME).getNodeValue();
+ Node scTextNode = (Node) TEXT.evaluate(scNode, XPathConstants.NODE);
+ configType = scTextNode.getNodeValue();
+ } else { // If not service-config node, check for a component config node
+ Node cNode = (Node) COMPONENT.evaluate(propertyNode, XPathConstants.NODE);
+ if (cNode != null) {
+ // Component config property
+ propertyType = Property.TYPE_COMPONENT;
+ componentType = cNode.getFirstChild().getNodeValue();
+ Node cTextNode = (Node) TEXT.evaluate(cNode, XPathConstants.NODE);
+ configType = cTextNode.getNodeValue();
+ componentType = cTextNode.getNodeValue();
+ }
+ }
+
+ // Check for a config property node
+ Node cpNode = (Node) CONFIG_PROPERTY.evaluate(propertyNode, XPathConstants.NODE);
+ if (cpNode != null) {
+ // Check for a condition element
+ Node ifNode = (Node) IF.evaluate(cpNode, XPathConstants.NODE);
+ if (ifNode != null) {
+ propertyType = TYPE_DERIVED;
+ pch = getConditionHandler(serviceName, ifNode);
+ } else {
+ Node cpTextNode = (Node) TEXT.evaluate(cpNode, XPathConstants.NODE);
+ value = cpTextNode.getNodeValue();
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create and return the property representation
+ return new Property(propertyType, propertyName, componentType, serviceType, configType, value, pch);
+ }
+
+ private static ConditionalValueHandler getConditionHandler(String serviceName, Node ifNode) throws Exception {
+ ConditionalValueHandler result = null;
+
+ if (ifNode != null) {
+ NamedNodeMap attrs = ifNode.getAttributes();
+ String comparisonPropName = attrs.getNamedItem(ATTR_PROPERTY).getNodeValue();
+ String comparisonValue = attrs.getNamedItem(ATTR_VALUE).getNodeValue();
+
+ ConditionalValueHandler affirmativeResult = null;
+ Node thenNode = (Node) THEN.evaluate(ifNode, XPathConstants.NODE);
+ if (thenNode != null) {
+ Node subIfNode = (Node) IF.evaluate(thenNode, XPathConstants.NODE);
+ if (subIfNode != null) {
+ affirmativeResult = getConditionHandler(serviceName, subIfNode);
+ } else {
+ affirmativeResult = new SimpleValueHandler(thenNode.getFirstChild().getNodeValue());
+ }
+ }
+
+ ConditionalValueHandler negativeResult = null;
+ Node elseNode = (Node) ELSE.evaluate(ifNode, XPathConstants.NODE);
+ if (elseNode != null) {
+ Node subIfNode = (Node) IF.evaluate(elseNode, XPathConstants.NODE);
+ if (subIfNode != null) {
+ negativeResult = getConditionHandler(serviceName, subIfNode);
+ } else {
+ negativeResult = new SimpleValueHandler(elseNode.getFirstChild().getNodeValue());
+ }
+ }
+
+ result = new PropertyEqualsHandler(serviceName,
+ comparisonPropName,
+ comparisonValue,
+ affirmativeResult,
+ negativeResult);
+ }
+
+ return result;
+ }
+
+ String getType() { return type; }
+ String getName() { return name; }
+ String getComponent() { return component; }
+ String getService() { return service; }
+ String getServiceConfig() { return serviceConfig; }
+ String getValue() {
+ return value;
+ }
+ ConditionalValueHandler getConditionHandler() { return conditionHandler; }
+ }
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/SimpleValueHandler.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/SimpleValueHandler.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/SimpleValueHandler.java
new file mode 100644
index 0000000..8e0cd75
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/SimpleValueHandler.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+
+class SimpleValueHandler implements ConditionalValueHandler {
+ private String value;
+
+ SimpleValueHandler(String value) {
+ this.value = value;
+ }
+
+ @Override
+ public String evaluate(ServiceURLPropertyConfig config, AmbariCluster cluster) {
+ return value;
+ }
+}
+
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-component-config-mapping.properties
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-component-config-mapping.properties b/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-component-config-mapping.properties
new file mode 100644
index 0000000..a48b28c
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-component-config-mapping.properties
@@ -0,0 +1,36 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+##########################################################################
+# Hadoop service component configuration mappings
+# Service component name = service component configuration type
+##########################################################################
+NAMENODE=hdfs-site
+RESOURCEMANAGER=yarn-site
+OOZIE_SERVER=oozie-site
+HIVE_SERVER=hive-site
+WEBHCAT_SERVER=webhcat-site
+HBASE_MASTER=hbase-site
+DRUID_COORDINATOR=druid-coordinator
+DRUID_BROKER=druid-broker
+DRUID_ROUTER=druid-router
+DRUID_OVERLORD=druid-overlord
+DRUID_SUPERSET=druid-superset
+ATLAS_SERVER=application-properties
+ZEPPELIN_MASTER=zeppelin-config
+#RANGER=TODO
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-url-mappings.xml
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-url-mappings.xml b/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-url-mappings.xml
new file mode 100644
index 0000000..8953b8d
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/resources/ambari-service-discovery-url-mappings.xml
@@ -0,0 +1,398 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<!-- ==================================================================== -->
+<!-- Externalized mapping of Hadoop service URL patterns and the Ambari -->
+<!-- ServiceDiscovery model properties. This configuration is used by -->
+<!-- the Ambari ServiceDiscovery implementation to construct service -->
+<!-- URLs suitable for use in a Knox topology file. -->
+<!-- ==================================================================== -->
+<service-discovery-url-mappings>
+
+ <service name="NAMENODE">
+ <url-pattern>hdfs://{DFS_NAMENODE_RPC_ADDRESS}</url-pattern>
+ <properties>
+ <property name="DFS_NAMENODE_RPC_ADDRESS">
+ <component>NAMENODE</component>
+ <config-property>dfs.namenode.rpc-address</config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="JOBTRACKER">
+ <url-pattern>rpc://{YARN_RM_ADDRESS}</url-pattern>
+ <properties>
+ <property name="YARN_RM_ADDRESS">
+ <component>RESOURCEMANAGER</component>
+ <config-property>yarn.resourcemanager.address</config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="WEBHDFS">
+ <url-pattern>http://{WEBHDFS_ADDRESS}/webhdfs</url-pattern>
+ <properties>
+ <property name="WEBHDFS_ADDRESS">
+ <service-config name="HDFS">hdfs-site</service-config>
+ <config-property>dfs.namenode.http-address</config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="WEBHCAT">
+ <url-pattern>http://{HOST}:{PORT}/templeton</url-pattern>
+ <properties>
+ <property name="HOST">
+ <component>WEBHCAT_SERVER</component>
+ <hostname/>
+ </property>
+ <property name="PORT">
+ <component>WEBHCAT_SERVER</component>
+ <config-property>templeton.port</config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="OOZIE">
+ <url-pattern>{OOZIE_ADDRESS}</url-pattern>
+ <properties>
+ <property name="OOZIE_ADDRESS">
+ <component>OOZIE_SERVER</component>
+ <config-property>oozie.base.url</config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="WEBHBASE">
+ <url-pattern>http://{HOST}:60080</url-pattern>
+ <properties>
+ <property name="HOST">
+ <component>HBASE_MASTER</component>
+ <hostname/>
+ </property>
+ </properties>
+ </service>
+
+ <service name="RESOURCEMANAGER">
+ <url-pattern>{SCHEME}://{WEBAPP_ADDRESS}/ws</url-pattern>
+ <properties>
+ <property name="WEBAPP_HTTP_ADDRESS">
+ <component>RESOURCEMANAGER</component>
+ <config-property>yarn.resourcemanager.webapp.address</config-property>
+ </property>
+ <property name="WEBAPP_HTTPS_ADDRESS">
+ <component>RESOURCEMANAGER</component>
+ <config-property>yarn.resourcemanager.webapp.https.address</config-property>
+ </property>
+ <property name="HTTP_POLICY">
+ <component>RESOURCEMANAGER</component>
+ <config-property>yarn.http.policy</config-property>
+ </property>
+ <property name="SCHEME">
+ <config-property>
+ <if property="HTTP_POLICY" value="HTTPS_ONLY">
+ <then>https</then>
+ <else>http</else>
+ </if>
+ </config-property>
+ </property>
+ <property name="WEBAPP_ADDRESS">
+ <config-property>
+ <if property="HTTP_POLICY" value="HTTPS_ONLY">
+ <then>WEBAPP_HTTPS_ADDRESS</then>
+ <else>WEBAPP_HTTP_ADDRESS</else>
+ </if>
+ </config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="HIVE">
+ <url-pattern>{SCHEME}://{HOST}:{PORT}/{PATH}</url-pattern>
+ <properties>
+ <property name="HOST">
+ <component>HIVE_SERVER</component>
+ <hostname/>
+ </property>
+ <property name="USE_SSL">
+ <component>HIVE_SERVER</component>
+ <config-property>hive.server2.use.SSL</config-property>
+ </property>
+ <property name="PATH">
+ <component>HIVE_SERVER</component>
+ <config-property>hive.server2.thrift.http.path</config-property>
+ </property>
+ <property name="PORT">
+ <component>HIVE_SERVER</component>
+ <config-property>hive.server2.thrift.http.port</config-property>
+ </property>
+ <property name="SCHEME">
+ <config-property>
+ <if property="USE_SSL" value="true">
+ <then>https</then>
+ <else>http</else>
+ </if>
+ </config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="DRUID-COORDINATOR">
+ <url-pattern>http://{HOST}:{PORT}</url-pattern>
+ <properties>
+ <property name="HOST">
+ <component>DRUID_COORDINATOR</component>
+ <hostname/>
+ </property>
+ <property name="PORT">
+ <component>DRUID_COORDINATOR</component>
+ <config-property>druid.port</config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="DRUID-BROKER">
+ <url-pattern>http://{HOST}:{PORT}</url-pattern>
+ <properties>
+ <property name="HOST">
+ <component>DRUID_BROKER</component>
+ <hostname/>
+ </property>
+ <property name="PORT">
+ <component>DRUID_BROKER</component>
+ <config-property>druid.port</config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="DRUID-ROUTER">
+ <url-pattern>http://{HOST}:{PORT}</url-pattern>
+ <properties>
+ <property name="HOST">
+ <component>DRUID_ROUTER</component>
+ <hostname/>
+ </property>
+ <property name="PORT">
+ <component>DRUID_ROUTER</component>
+ <config-property>druid.port</config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="DRUID-OVERLORD">
+ <url-pattern>http://{HOST}:{PORT}</url-pattern>
+ <properties>
+ <property name="HOST">
+ <component>DRUID_OVERLORD</component>
+ <hostname/>
+ </property>
+ <property name="PORT">
+ <component>DRUID_OVERLORD</component>
+ <config-property>druid.port</config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="SUPERSET">
+ <url-pattern>http://{HOST}:{PORT}</url-pattern>
+ <properties>
+ <property name="HOST">
+ <component>DRUID_SUPERSET</component>
+ <hostname/>
+ </property>
+ <property name="PORT">
+ <component>DRUID_SUPERSET</component>
+ <config-property>SUPERSET_WEBSERVER_PORT</config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="ATLAS-API">
+ <url-pattern>{REST_ADDRESS}</url-pattern>
+ <properties>
+ <property name="REST_ADDRESS">
+ <component>ATLAS_SERVER</component>
+ <config-property>atlas.rest.address</config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="ATLAS">
+ <url-pattern>{SCHEME}://{HOST}:{PORT}</url-pattern>
+ <properties>
+ <property name="HOST">
+ <component>ATLAS_SERVER</component>
+ <hostname/>
+ </property>
+ <property name="TLS_ENABLED">
+ <component>ATLAS_SERVER</component>
+ <config-property>atlas.enableTLS</config-property>
+ </property>
+ <property name="HTTP_PORT">
+ <component>ATLAS_SERVER</component>
+ <config-property>atlas.server.http.port</config-property>
+ </property>
+ <property name="HTTPS_PORT">
+ <component>ATLAS_SERVER</component>
+ <config-property>atlas.server.https.port</config-property>
+ </property>
+ <property name="PORT">
+ <config-property>
+ <if property="TLS_ENABLED" value="true">
+ <then>HTTPS_PORT</then>
+ <else>HTTP_PORT</else>
+ </if>
+ </config-property>
+ </property>
+ <property name="SCHEME">
+ <config-property>
+ <if property="TLS_ENABLED" value="true">
+ <then>https</then>
+ <else>http</else>
+ </if>
+ </config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="ZEPPELIN">
+ <url-pattern>{SCHEME}://{HOST}:{PORT}</url-pattern>
+ <properties>
+ <property name="HOST">
+ <component>ZEPPELIN_MASTER</component>
+ <hostname/>
+ </property>
+ <property name="SSL">
+ <component>ZEPPELIN_MASTER</component>
+ <config-property>zeppelin.ssl</config-property>
+ </property>
+ <property name="SCHEME">
+ <config-property>
+ <if property="SSL" value="true">
+ <then>https</then>
+ <else>http</else>
+ </if>
+ </config-property>
+ </property>
+ <property name="HTTPS_PORT">
+ <component>ZEPPELIN_MASTER</component>
+ <config-property>zeppelin.server.ssl.port</config-property>
+ </property>
+ <property name="HTTP_PORT">
+ <component>ZEPPELIN_MASTER</component>
+ <config-property>zeppelin.server.port</config-property>
+ </property>
+ <property name="PORT">
+ <config-property>
+ <if property="SSL" value="true">
+ <then>HTTPS_PORT</then>
+ <else>HTTP_PORT</else>
+ </if>
+ </config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="ZEPPELINUI">
+ <url-pattern>{SCHEME}://{HOST}:{PORT}</url-pattern>
+ <properties>
+ <property name="HOST">
+ <component>ZEPPELIN_MASTER</component>
+ <hostname/>
+ </property>
+ <property name="SSL">
+ <component>ZEPPELIN_MASTER</component>
+ <config-property>zeppelin.ssl</config-property>
+ </property>
+ <property name="SCHEME">
+ <config-property>
+ <if property="SSL" value="true">
+ <then>https</then>
+ <else>http</else>
+ </if>
+ </config-property>
+ </property>
+ <property name="HTTPS_PORT">
+ <component>ZEPPELIN_MASTER</component>
+ <config-property>zeppelin.server.ssl.port</config-property>
+ </property>
+ <property name="HTTP_PORT">
+ <component>ZEPPELIN_MASTER</component>
+ <config-property>zeppelin.server.port</config-property>
+ </property>
+ <property name="PORT">
+ <config-property>
+ <if property="SSL" value="true">
+ <then>HTTPS_PORT</then>
+ <else>HTTP_PORT</else>
+ </if>
+ </config-property>
+ </property>
+ </properties>
+ </service>
+
+ <service name="ZEPPELINWS">
+ <url-pattern>{SCHEME}://{HOST}:{PORT}</url-pattern>
+ <properties>
+ <property name="HOST">
+ <component>ZEPPELIN_MASTER</component>
+ <hostname/>
+ </property>
+ <property name="SSL">
+ <component>ZEPPELIN_MASTER</component>
+ <config-property>zeppelin.ssl</config-property>
+ </property>
+ <property name="HTTPS_PORT">
+ <component>ZEPPELIN_MASTER</component>
+ <config-property>zeppelin.server.ssl.port</config-property>
+ </property>
+ <property name="HTTP_PORT">
+ <component>ZEPPELIN_MASTER</component>
+ <config-property>zeppelin.server.port</config-property>
+ </property>
+ <property name="SCHEME">
+ <config-property>
+ <if property="SSL" value="true">
+ <then>wss</then>
+ <else>ws</else>
+ </if>
+ </config-property>
+ </property>
+ <property name="PORT">
+ <config-property>
+ <if property="SSL" value="true">
+ <then>HTTPS_PORT</then>
+ <else>HTTP_PORT</else>
+ </if>
+ </config-property>
+ </property>
+ </properties>
+ </service>
+
+
+<!-- TODO:
+ <service name="YARNUI">
+ </service>
+
+ <service name="RANGER">
+ </service>
+
+ <service name="RANGERUI">
+ </service>
+-->
+
+</service-discovery-url-mappings>
[21/23] knox git commit: KNOX-998 - package name refactoring
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-server/src/main/java/org/apache/knox/gateway/websockets/ProxyInboundClient.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/knox/gateway/websockets/ProxyInboundClient.java b/gateway-server/src/main/java/org/apache/knox/gateway/websockets/ProxyInboundClient.java
new file mode 100644
index 0000000..c12ee53
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/websockets/ProxyInboundClient.java
@@ -0,0 +1,107 @@
+package org.apache.knox.gateway.websockets;
+
+import javax.websocket.CloseReason;
+import javax.websocket.Endpoint;
+import javax.websocket.EndpointConfig;
+import javax.websocket.MessageHandler;
+import javax.websocket.Session;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+/**
+ * A Websocket client with callback which is not annotation based.
+ * This handler accepts String and binary messages.
+ * @since 0.14.0
+ */
+public class ProxyInboundClient extends Endpoint {
+
+ /**
+ * Callback to be called once we have events on our socket.
+ */
+ private MessageEventCallback callback;
+
+ protected Session session;
+ protected EndpointConfig config;
+
+
+ public ProxyInboundClient(final MessageEventCallback callback) {
+ super();
+ this.callback = callback;
+ }
+
+ /**
+ * Developers must implement this method to be notified when a new
+ * conversation has just begun.
+ *
+ * @param backendSession the session that has just been activated.
+ * @param config the configuration used to configure this endpoint.
+ */
+ @Override
+ public void onOpen(final javax.websocket.Session backendSession, final EndpointConfig config) {
+ this.session = backendSession;
+ this.config = config;
+
+ /* Set the max message size */
+ session.setMaxBinaryMessageBufferSize(Integer.MAX_VALUE);
+ session.setMaxTextMessageBufferSize(Integer.MAX_VALUE);
+
+ /* Add message handler for binary data */
+ session.addMessageHandler(new MessageHandler.Whole<byte[]>() {
+
+ /**
+ * Called when the message has been fully received.
+ *
+ * @param message the message data.
+ */
+ @Override
+ public void onMessage(final byte[] message) {
+ callback.onMessageBinary(message, true, session);
+ }
+
+ });
+
+ /* Add message handler for text data */
+ session.addMessageHandler(new MessageHandler.Whole<String>() {
+
+ /**
+ * Called when the message has been fully received.
+ *
+ * @param message the message data.
+ */
+ @Override
+ public void onMessage(final String message) {
+ callback.onMessageText(message, session);
+ }
+
+ });
+
+ callback.onConnectionOpen(backendSession);
+ }
+
+ @Override
+ public void onClose(final javax.websocket.Session backendSession, final CloseReason closeReason) {
+ callback.onConnectionClose(closeReason);
+ this.session = null;
+ }
+
+ @Override
+ public void onError(final javax.websocket.Session backendSession, final Throwable cause) {
+ callback.onError(cause);
+ this.session = null;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/ProxyInboundClientTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/ProxyInboundClientTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/ProxyInboundClientTest.java
deleted file mode 100644
index 69b45dd..0000000
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/ProxyInboundClientTest.java
+++ /dev/null
@@ -1,374 +0,0 @@
-package org.apache.hadoop.gateway.websockets;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-
-import org.apache.commons.lang.RandomStringUtils;
-import org.eclipse.jetty.server.Handler;
-import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.server.ServerConnector;
-import org.eclipse.jetty.server.handler.ContextHandler;
-import org.junit.AfterClass;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-import javax.websocket.CloseReason;
-import javax.websocket.ContainerProvider;
-import javax.websocket.DeploymentException;
-import javax.websocket.Session;
-import javax.websocket.WebSocketContainer;
-import java.io.IOException;
-import java.net.URI;
-import java.nio.ByteBuffer;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-import static org.hamcrest.Matchers.instanceOf;
-
-/**
- * Test {@link ProxyInboundClient} class.
- * @since 0.14.0
- */
-public class ProxyInboundClientTest {
-
- private static Server server;
- private static URI serverUri;
- private static Handler handler;
-
- String recievedMessage = null;
-
- byte[] recievedBinaryMessage = null;
-
-
- /* create an instance */
- public ProxyInboundClientTest() {
- super();
- }
-
- @BeforeClass
- public static void startWSServer() throws Exception
- {
- server = new Server();
- ServerConnector connector = new ServerConnector(server);
- server.addConnector(connector);
-
- handler = new WebsocketEchoHandler();
-
- ContextHandler context = new ContextHandler();
- context.setContextPath("/");
- context.setHandler(handler);
- server.setHandler(context);
-
- server.start();
-
- String host = connector.getHost();
- if (host == null)
- {
- host = "localhost";
- }
- int port = connector.getLocalPort();
- serverUri = new URI(String.format("ws://%s:%d/",host,port));
- }
-
- @AfterClass
- public static void stopServer()
- {
- try
- {
- server.stop();
- }
- catch (Exception e)
- {
- e.printStackTrace(System.err);
- }
- }
-
- //@Test(timeout = 3000)
- @Test
- public void testClientInstance() throws IOException, DeploymentException {
-
- final String textMessage = "Echo";
- final ByteBuffer binarymessage = ByteBuffer.wrap(textMessage.getBytes());
-
- final AtomicBoolean isTestComplete = new AtomicBoolean(false);
-
- final WebSocketContainer container = ContainerProvider.getWebSocketContainer();
- final ProxyInboundClient client = new ProxyInboundClient( new MessageEventCallback() {
-
- /**
- * A generic callback, can be left un-implemented
- *
- * @param message
- */
- @Override
- public void doCallback(String message) {
-
- }
-
- /**
- * Callback when connection is established.
- *
- * @param session
- */
- @Override
- public void onConnectionOpen(Object session) {
-
- }
-
- /**
- * Callback when connection is closed.
- *
- * @param reason
- */
- @Override
- public void onConnectionClose(CloseReason reason) {
- isTestComplete.set(true);
- }
-
- /**
- * Callback when there is an error in connection.
- *
- * @param cause
- */
- @Override
- public void onError(Throwable cause) {
- isTestComplete.set(true);
- }
-
- /**
- * Callback when a text message is received.
- *
- * @param message
- * @param session
- */
- @Override
- public void onMessageText(String message, Object session) {
- recievedMessage = message;
- isTestComplete.set(true);
- }
-
- /**
- * Callback when a binary message is received.
- *
- * @param message
- * @param last
- * @param session
- */
- @Override
- public void onMessageBinary(byte[] message, boolean last,
- Object session) {
-
- }
- } );
-
- Assert.assertThat(client, instanceOf(javax.websocket.Endpoint.class));
-
- Session session = container.connectToServer(client, serverUri);
-
- session.getBasicRemote().sendText(textMessage);
-
- while(!isTestComplete.get()) {
- /* just wait for the test to finish */
- }
-
- Assert.assertEquals("The received text message is not the same as the sent", textMessage, recievedMessage);
- }
-
- @Test(timeout = 3000)
- public void testBinarymessage() throws IOException, DeploymentException {
-
- final String textMessage = "Echo";
- final ByteBuffer binarymessage = ByteBuffer.wrap(textMessage.getBytes());
-
- final AtomicBoolean isTestComplete = new AtomicBoolean(false);
-
- final WebSocketContainer container = ContainerProvider.getWebSocketContainer();
- final ProxyInboundClient client = new ProxyInboundClient( new MessageEventCallback() {
-
- /**
- * A generic callback, can be left un-implemented
- *
- * @param message
- */
- @Override
- public void doCallback(String message) {
-
- }
-
- /**
- * Callback when connection is established.
- *
- * @param session
- */
- @Override
- public void onConnectionOpen(Object session) {
-
- }
-
- /**
- * Callback when connection is closed.
- *
- * @param reason
- */
- @Override
- public void onConnectionClose(CloseReason reason) {
- isTestComplete.set(true);
- }
-
- /**
- * Callback when there is an error in connection.
- *
- * @param cause
- */
- @Override
- public void onError(Throwable cause) {
- isTestComplete.set(true);
- }
-
- /**
- * Callback when a text message is received.
- *
- * @param message
- * @param session
- */
- @Override
- public void onMessageText(String message, Object session) {
- recievedMessage = message;
- isTestComplete.set(true);
- }
-
- /**
- * Callback when a binary message is received.
- *
- * @param message
- * @param last
- * @param session
- */
- @Override
- public void onMessageBinary(byte[] message, boolean last,
- Object session) {
- recievedBinaryMessage = message;
- isTestComplete.set(true);
- }
- } );
-
- Assert.assertThat(client, instanceOf(javax.websocket.Endpoint.class));
-
- Session session = container.connectToServer(client, serverUri);
-
- session.getBasicRemote().sendBinary(binarymessage);
-
- while(!isTestComplete.get()) {
- /* just wait for the test to finish */
- }
-
- Assert.assertEquals("Binary message does not match", textMessage, new String(recievedBinaryMessage));
- }
-
- @Test(timeout = 3000)
- public void testTextMaxBufferLimit() throws IOException, DeploymentException {
-
- final String longMessage = RandomStringUtils.random(100000);
-
- final AtomicBoolean isTestComplete = new AtomicBoolean(false);
-
- final WebSocketContainer container = ContainerProvider.getWebSocketContainer();
- final ProxyInboundClient client = new ProxyInboundClient( new MessageEventCallback() {
-
- /**
- * A generic callback, can be left un-implemented
- *
- * @param message
- */
- @Override
- public void doCallback(String message) {
-
- }
-
- /**
- * Callback when connection is established.
- *
- * @param session
- */
- @Override
- public void onConnectionOpen(Object session) {
-
- }
-
- /**
- * Callback when connection is closed.
- *
- * @param reason
- */
- @Override
- public void onConnectionClose(CloseReason reason) {
- isTestComplete.set(true);
- }
-
- /**
- * Callback when there is an error in connection.
- *
- * @param cause
- */
- @Override
- public void onError(Throwable cause) {
- isTestComplete.set(true);
- }
-
- /**
- * Callback when a text message is received.
- *
- * @param message
- * @param session
- */
- @Override
- public void onMessageText(String message, Object session) {
- recievedMessage = message;
- isTestComplete.set(true);
- }
-
- /**
- * Callback when a binary message is received.
- *
- * @param message
- * @param last
- * @param session
- */
- @Override
- public void onMessageBinary(byte[] message, boolean last,
- Object session) {
-
- }
- } );
-
- Assert.assertThat(client, instanceOf(javax.websocket.Endpoint.class));
-
- Session session = container.connectToServer(client, serverUri);
-
- session.getBasicRemote().sendText(longMessage);
-
- while(!isTestComplete.get()) {
- /* just wait for the test to finish */
- }
-
- Assert.assertEquals(longMessage, recievedMessage);
-
- }
-
-
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-server/src/test/java/org/apache/knox/gateway/websockets/ProxyInboundClientTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/knox/gateway/websockets/ProxyInboundClientTest.java b/gateway-server/src/test/java/org/apache/knox/gateway/websockets/ProxyInboundClientTest.java
new file mode 100644
index 0000000..f8dd167
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/websockets/ProxyInboundClientTest.java
@@ -0,0 +1,374 @@
+package org.apache.knox.gateway.websockets;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.eclipse.jetty.server.Handler;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.handler.ContextHandler;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import javax.websocket.CloseReason;
+import javax.websocket.ContainerProvider;
+import javax.websocket.DeploymentException;
+import javax.websocket.Session;
+import javax.websocket.WebSocketContainer;
+import java.io.IOException;
+import java.net.URI;
+import java.nio.ByteBuffer;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static org.hamcrest.Matchers.instanceOf;
+
+/**
+ * Test {@link ProxyInboundClient} class.
+ * @since 0.14.0
+ */
+public class ProxyInboundClientTest {
+
+ private static Server server;
+ private static URI serverUri;
+ private static Handler handler;
+
+ String recievedMessage = null;
+
+ byte[] recievedBinaryMessage = null;
+
+
+ /* create an instance */
+ public ProxyInboundClientTest() {
+ super();
+ }
+
+ @BeforeClass
+ public static void startWSServer() throws Exception
+ {
+ server = new Server();
+ ServerConnector connector = new ServerConnector(server);
+ server.addConnector(connector);
+
+ handler = new WebsocketEchoHandler();
+
+ ContextHandler context = new ContextHandler();
+ context.setContextPath("/");
+ context.setHandler(handler);
+ server.setHandler(context);
+
+ server.start();
+
+ String host = connector.getHost();
+ if (host == null)
+ {
+ host = "localhost";
+ }
+ int port = connector.getLocalPort();
+ serverUri = new URI(String.format("ws://%s:%d/",host,port));
+ }
+
+ @AfterClass
+ public static void stopServer()
+ {
+ try
+ {
+ server.stop();
+ }
+ catch (Exception e)
+ {
+ e.printStackTrace(System.err);
+ }
+ }
+
+ //@Test(timeout = 3000)
+ @Test
+ public void testClientInstance() throws IOException, DeploymentException {
+
+ final String textMessage = "Echo";
+ final ByteBuffer binarymessage = ByteBuffer.wrap(textMessage.getBytes());
+
+ final AtomicBoolean isTestComplete = new AtomicBoolean(false);
+
+ final WebSocketContainer container = ContainerProvider.getWebSocketContainer();
+ final ProxyInboundClient client = new ProxyInboundClient( new MessageEventCallback() {
+
+ /**
+ * A generic callback, can be left un-implemented
+ *
+ * @param message
+ */
+ @Override
+ public void doCallback(String message) {
+
+ }
+
+ /**
+ * Callback when connection is established.
+ *
+ * @param session
+ */
+ @Override
+ public void onConnectionOpen(Object session) {
+
+ }
+
+ /**
+ * Callback when connection is closed.
+ *
+ * @param reason
+ */
+ @Override
+ public void onConnectionClose(CloseReason reason) {
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when there is an error in connection.
+ *
+ * @param cause
+ */
+ @Override
+ public void onError(Throwable cause) {
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when a text message is received.
+ *
+ * @param message
+ * @param session
+ */
+ @Override
+ public void onMessageText(String message, Object session) {
+ recievedMessage = message;
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when a binary message is received.
+ *
+ * @param message
+ * @param last
+ * @param session
+ */
+ @Override
+ public void onMessageBinary(byte[] message, boolean last,
+ Object session) {
+
+ }
+ } );
+
+ Assert.assertThat(client, instanceOf(javax.websocket.Endpoint.class));
+
+ Session session = container.connectToServer(client, serverUri);
+
+ session.getBasicRemote().sendText(textMessage);
+
+ while(!isTestComplete.get()) {
+ /* just wait for the test to finish */
+ }
+
+ Assert.assertEquals("The received text message is not the same as the sent", textMessage, recievedMessage);
+ }
+
+ @Test(timeout = 3000)
+ public void testBinarymessage() throws IOException, DeploymentException {
+
+ final String textMessage = "Echo";
+ final ByteBuffer binarymessage = ByteBuffer.wrap(textMessage.getBytes());
+
+ final AtomicBoolean isTestComplete = new AtomicBoolean(false);
+
+ final WebSocketContainer container = ContainerProvider.getWebSocketContainer();
+ final ProxyInboundClient client = new ProxyInboundClient( new MessageEventCallback() {
+
+ /**
+ * A generic callback, can be left un-implemented
+ *
+ * @param message
+ */
+ @Override
+ public void doCallback(String message) {
+
+ }
+
+ /**
+ * Callback when connection is established.
+ *
+ * @param session
+ */
+ @Override
+ public void onConnectionOpen(Object session) {
+
+ }
+
+ /**
+ * Callback when connection is closed.
+ *
+ * @param reason
+ */
+ @Override
+ public void onConnectionClose(CloseReason reason) {
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when there is an error in connection.
+ *
+ * @param cause
+ */
+ @Override
+ public void onError(Throwable cause) {
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when a text message is received.
+ *
+ * @param message
+ * @param session
+ */
+ @Override
+ public void onMessageText(String message, Object session) {
+ recievedMessage = message;
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when a binary message is received.
+ *
+ * @param message
+ * @param last
+ * @param session
+ */
+ @Override
+ public void onMessageBinary(byte[] message, boolean last,
+ Object session) {
+ recievedBinaryMessage = message;
+ isTestComplete.set(true);
+ }
+ } );
+
+ Assert.assertThat(client, instanceOf(javax.websocket.Endpoint.class));
+
+ Session session = container.connectToServer(client, serverUri);
+
+ session.getBasicRemote().sendBinary(binarymessage);
+
+ while(!isTestComplete.get()) {
+ /* just wait for the test to finish */
+ }
+
+ Assert.assertEquals("Binary message does not match", textMessage, new String(recievedBinaryMessage));
+ }
+
+ @Test(timeout = 3000)
+ public void testTextMaxBufferLimit() throws IOException, DeploymentException {
+
+ final String longMessage = RandomStringUtils.random(100000);
+
+ final AtomicBoolean isTestComplete = new AtomicBoolean(false);
+
+ final WebSocketContainer container = ContainerProvider.getWebSocketContainer();
+ final ProxyInboundClient client = new ProxyInboundClient( new MessageEventCallback() {
+
+ /**
+ * A generic callback, can be left un-implemented
+ *
+ * @param message
+ */
+ @Override
+ public void doCallback(String message) {
+
+ }
+
+ /**
+ * Callback when connection is established.
+ *
+ * @param session
+ */
+ @Override
+ public void onConnectionOpen(Object session) {
+
+ }
+
+ /**
+ * Callback when connection is closed.
+ *
+ * @param reason
+ */
+ @Override
+ public void onConnectionClose(CloseReason reason) {
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when there is an error in connection.
+ *
+ * @param cause
+ */
+ @Override
+ public void onError(Throwable cause) {
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when a text message is received.
+ *
+ * @param message
+ * @param session
+ */
+ @Override
+ public void onMessageText(String message, Object session) {
+ recievedMessage = message;
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when a binary message is received.
+ *
+ * @param message
+ * @param last
+ * @param session
+ */
+ @Override
+ public void onMessageBinary(byte[] message, boolean last,
+ Object session) {
+
+ }
+ } );
+
+ Assert.assertThat(client, instanceOf(javax.websocket.Endpoint.class));
+
+ Session session = container.connectToServer(client, serverUri);
+
+ session.getBasicRemote().sendText(longMessage);
+
+ while(!isTestComplete.get()) {
+ /* just wait for the test to finish */
+ }
+
+ Assert.assertEquals(longMessage, recievedMessage);
+
+ }
+
+
+
+}
[04/23] knox git commit: KNOX-998 - Minor fixes
Posted by mo...@apache.org.
KNOX-998 - Minor fixes
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/557d569f
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/557d569f
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/557d569f
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 557d569faef9b9e894ac380f8488cd03eb52823d
Parents: cdac6ff
Author: Sandeep More <mo...@apache.org>
Authored: Mon Oct 2 10:43:09 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Mon Oct 2 10:43:09 2017 -0400
----------------------------------------------------------------------
gateway-adapter/pom.xml | 2 +-
.../hadoop/gateway/audit/log4j/layout/AuditLayout.java | 2 +-
.../hadoop/gateway/shirorealm/KnoxLdapContextFactory.java | 2 +-
.../org/apache/hadoop/gateway/shirorealm/KnoxLdapRealm.java | 2 +-
.../org/apache/hadoop/gateway/shirorealm/KnoxPamRealm.java | 8 +++++++-
5 files changed, 11 insertions(+), 5 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/557d569f/gateway-adapter/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-adapter/pom.xml b/gateway-adapter/pom.xml
index 0f1ae4c..4bb62e3 100644
--- a/gateway-adapter/pom.xml
+++ b/gateway-adapter/pom.xml
@@ -27,7 +27,7 @@
</parent>
<artifactId>gateway-adapter</artifactId>
<name>gateway-adapter</name>
- <description>Adapter/Shims for old package structure to ensure compatibility.</description>
+ <description>Adapter for old package structure to ensure compatibility.</description>
<licenses>
<license>
http://git-wip-us.apache.org/repos/asf/knox/blob/557d569f/gateway-adapter/src/main/java/org/apache/hadoop/gateway/audit/log4j/layout/AuditLayout.java
----------------------------------------------------------------------
diff --git a/gateway-adapter/src/main/java/org/apache/hadoop/gateway/audit/log4j/layout/AuditLayout.java b/gateway-adapter/src/main/java/org/apache/hadoop/gateway/audit/log4j/layout/AuditLayout.java
index 303618f..612026f 100644
--- a/gateway-adapter/src/main/java/org/apache/hadoop/gateway/audit/log4j/layout/AuditLayout.java
+++ b/gateway-adapter/src/main/java/org/apache/hadoop/gateway/audit/log4j/layout/AuditLayout.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.gateway.audit.log4j.layout;
import org.apache.log4j.spi.LoggingEvent;
/**
- * An adapter class that deligates calls to {@link org.apache.knox.gateway.audit.log4j.layout.AuditLayout}
+ * An adapter class that delegate calls to {@link org.apache.knox.gateway.audit.log4j.layout.AuditLayout}
* for backwards compatability with package structure.
*
* @since 0.14.0
http://git-wip-us.apache.org/repos/asf/knox/blob/557d569f/gateway-adapter/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapContextFactory.java
----------------------------------------------------------------------
diff --git a/gateway-adapter/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapContextFactory.java b/gateway-adapter/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapContextFactory.java
index e06cd9a..acc726f 100644
--- a/gateway-adapter/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapContextFactory.java
+++ b/gateway-adapter/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapContextFactory.java
@@ -22,7 +22,7 @@ import javax.naming.ldap.LdapContext;
import java.util.Hashtable;
/**
- * An adapter class that deligates calls to {@link org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory}
+ * An adapter class that delegate calls to {@link org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory}
* for backwards compatability with package structure.
* @since 0.14.0
*/
http://git-wip-us.apache.org/repos/asf/knox/blob/557d569f/gateway-adapter/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealm.java
----------------------------------------------------------------------
diff --git a/gateway-adapter/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealm.java b/gateway-adapter/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealm.java
index 17dffdd..75204b0 100644
--- a/gateway-adapter/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealm.java
+++ b/gateway-adapter/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealm.java
@@ -29,7 +29,7 @@ import java.util.Map;
*/
/**
- * An adapter class that deligates calls to {@link org.apache.knox.gateway.shirorealm.KnoxLdapRealm}
+ * An adapter class that delegate calls to {@link org.apache.knox.gateway.shirorealm.KnoxLdapRealm}
* for backwards compatability with package structure.
* @since 0.14.0
*/
http://git-wip-us.apache.org/repos/asf/knox/blob/557d569f/gateway-adapter/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxPamRealm.java
----------------------------------------------------------------------
diff --git a/gateway-adapter/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxPamRealm.java b/gateway-adapter/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxPamRealm.java
index fd475d5..245d9d5 100644
--- a/gateway-adapter/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxPamRealm.java
+++ b/gateway-adapter/src/main/java/org/apache/hadoop/gateway/shirorealm/KnoxPamRealm.java
@@ -24,7 +24,7 @@ import org.apache.shiro.subject.PrincipalCollection;
*/
/**
- * An adapter class that deligates calls to {@link org.apache.knox.gateway.shirorealm.KnoxPamRealm}
+ * An adapter class that delegate calls to {@link org.apache.knox.gateway.shirorealm.KnoxPamRealm}
* for backwards compatability with package structure.
* @since 0.14.0
*/
@@ -44,4 +44,10 @@ public class KnoxPamRealm
AuthenticationToken token) throws AuthenticationException {
return super.doGetAuthenticationInfo(token);
}
+
+ @Override
+ protected AuthorizationInfo doGetAuthorizationInfo(
+ PrincipalCollection principals) {
+ return super.doGetAuthorizationInfo(principals);
+ }
}
[15/23] knox git commit: Merge branch 'master' into
KNOX-998-Package_Restructuring
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/ParserTest.java
----------------------------------------------------------------------
diff --cc gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/ParserTest.java
index 8c5f21a,0000000..70085d4
mode 100644,000000..100644
--- a/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/ParserTest.java
+++ b/gateway-util-urltemplate/src/test/java/org/apache/knox/gateway/util/urltemplate/ParserTest.java
@@@ -1,1305 -1,0 +1,1322 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.util.urltemplate;
+
+import org.apache.hadoop.test.category.FastTests;
+import org.apache.hadoop.test.category.UnitTests;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.net.URISyntaxException;
+import java.util.Iterator;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.notNullValue;
+import static org.hamcrest.CoreMatchers.nullValue;
+import static org.junit.Assert.assertThat;
+
+@Category( { UnitTests.class, FastTests.class } )
+public class ParserTest {
+
+ private void assertBasics(
+ Template template,
+ boolean isAbsolute,
+ boolean isDirectory,
+ boolean hasQuery,
+ int pathSegmentsSize,
+ int querySegmentsSize ) {
+ assertThat( "Incorrect isAbsolute value.", template.isAbsolute(), is( isAbsolute ) );
+ assertThat( "Incorrect isDirectory value.", template.isDirectory(), is( isDirectory ) );
+ assertThat( "Incorrect hasQuery value.", template.hasQuery(), is( hasQuery ) );
+ assertThat( "Incorrect path size.", template.getPath().size(), is( pathSegmentsSize ) );
+ assertThat( "Incorrect query size.", template.getQuery().size(), is( querySegmentsSize ) );
+ }
+
+ public void assertPath(
+ Template template,
+ int index,
+ String paramName,
+ String valuePattern ) {
+ Path segment = template.getPath().get( index );
+ assertThat( "Incorrect template queryParam name.", segment.getParamName(), is( paramName ) );
+ assertThat( "Incorrect template value pattern.", segment.getFirstValue().getToken().getEffectivePattern(), is( valuePattern ) );
+ }
+
+ public void assertPath(
+ Template template,
+ int index,
+ String paramName,
+ String valuePattern,
+ int type,
+ int minRequired,
+ int maxAllowed ) {
+ Path segment = template.getPath().get( index );
+ assertThat( "Param name wrong.", segment.getParamName(), is( paramName ) );
+ assertThat( "Value pattern wrong.", segment.getFirstValue().getEffectivePattern(), is( valuePattern ) );
+ assertThat( "Segment type wrong.", segment.getFirstValue().getType(), is( type ) );
+// assertThat( "Segment min required wrong.", segment.getMinRequired(), is( minRequired ) );
+// assertThat( "Segment max allowed wrong.", segment.getMaxAllowed(), is( maxAllowed ) );
+ }
+
+ public void assertQuery(
+ Template template,
+ String queryName,
+ String paramName,
+ String valuePattern ) {
+ Query segment = template.getQuery().get( queryName );
+ assertThat( "Query name wrong.", segment.getQueryName(), is( queryName ));
+ assertThat( "Param name wrong.", segment.getParamName(), is( paramName ));
+ assertThat( "value pattern wrong.", segment.getFirstValue().getToken().getEffectivePattern(), is( valuePattern ) );
+ }
+
+ public void assertQuery(
+ Template template,
+ String queryName,
+ String paramName,
+ String valuePattern,
+ int type,
+ int minRequired,
+ int maxAllowed ) {
+ Query segment = template.getQuery().get( queryName );
+ assertThat( "Query name wrong.", segment.getQueryName(), is( queryName ));
+ assertThat( "Param name wrong.", segment.getParamName(), is( paramName ));
+ assertThat( "value pattern wrong.", segment.getFirstValue().getEffectivePattern(), is( valuePattern ));
+ assertThat( "Segment type wrong.", segment.getFirstValue().getType(), is( type ) );
+// assertThat( "Segment min required wrong.", segment.getMinRequired(), is( minRequired ) );
+// assertThat( "Segment max allowed wrong.", segment.getMaxAllowed(), is( maxAllowed ) );
+ }
+
+ @Test
+ public void testCompleteUrl() throws URISyntaxException {
+ String text;
+ Template template;
+ Parser parser = new Parser();
+
+ text = "foo://username:password@example.com:8042/over/there/index.dtb?type=animal&name=narwhal#nose";
+ template = parser.parseTemplate( text );
+ assertBasics( template, true, false, true, 3, 2 );
+ assertThat( template.toString(), is( text ) );
+ }
+
+// @Test
+// public void testInvalidPatterns() {
+// //TODO: ? in wrong spot.
+// //TODO: & in wrong spots.
+// }
+
+// @Ignore( "TODO" )
+// @Test
+// public void testRegexPatterns() {
+// }
+
+ @Test
+ public void testTemplates() throws URISyntaxException {
+ String text;
+ Template template;
+
+ text = "{path}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 1, 0 );
+ assertPath( template, 0, "path", "**" );
+ assertThat( template.toString(), is( text ) );
+
+ text = "{pathA}/{pathB}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 2, 0 );
+ assertPath( template, 0, "pathA", "**" );
+ assertPath( template, 1, "pathB", "**" );
+ assertThat( template.toString(), is( text ) );
+
+ text = "?paramA={valueA}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 1 );
+ assertQuery( template, "paramA", "valueA", "**" );
+ assertThat( template.toString(), is( text ) );
+
+ text = "?paramA={valueA}¶mB={valueB}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 2 );
+ assertQuery( template, "paramA", "valueA", "**" );
+ assertQuery( template, "paramB", "valueB", "**" );
+ assertThat( template.toString(), is( text ) );
+
+ text = "?paramA={valueA}?paramB={valueB}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 2 );
+ assertQuery( template, "paramA", "valueA", "**" );
+ assertQuery( template, "paramB", "valueB", "**" );
+ //assertThat( template.toString(), is( text ) );
+
+ text = "{pathA}?paramA={valueA}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 1, 1 );
+ assertPath( template, 0, "pathA", "**" );
+ assertQuery( template, "paramA", "valueA", "**" );
+ assertThat( template.toString(), is( text ) );
+ }
+
+ @Test
+ public void testStaticPatterns() throws Exception {
+ Parser parser = new Parser();
+ String text;
+ Template template;
+
+ text = "";
+ template = parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 0, 0 );
+ assertThat( template.toString(), is( text ) );
+
+ text = "/";
+ template = parser.parseTemplate( text );
+ assertBasics( template, true, true, false, 0, 0 );
+ assertThat( template.toString(), is( text ) );
+
+ text = "?";
+ template = parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 0 );
+ assertThat( template.toString(), is( text ) );
+
+ text = "#";
+ template = parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 0, 0 );
+ assertThat( template.hasFragment(), is( true ) );
+ assertThat( template.getFragment(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "path";
+ template = parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 1, 0 );
+ assertPath( template, 0, "", "path" );
+ assertThat( template.toString(), is( text ) );
+
+ text = "/path";
+ template = parser.parseTemplate( text );
+ assertBasics( template, true, false, false, 1, 0 );
+ assertPath( template, 0, "", "path" );
+ assertThat( template.toString(), is( text ) );
+
+// text = "//path";
+// template = parser.parseTemplate( text );
+// assertBasics( template, true, false, false, 1, 0 );
+// assertPath( template, 0, "", "path" );
+
+ text = "path/";
+ template = parser.parseTemplate( text );
+ assertBasics( template, false, true, false, 1, 0 );
+ assertPath( template, 0, "", "path" );
+ assertThat( template.toString(), is( text ) );
+
+ text = "path//";
+ template = parser.parseTemplate( text );
+ assertBasics( template, false, true, false, 1, 0 );
+ assertPath( template, 0, "", "path" );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "/path/";
+ template = parser.parseTemplate( text );
+ assertBasics( template, true, true, false, 1, 0 );
+ assertPath( template, 0, "", "path" );
+ assertThat( template.toString(), is( text ) );
+
+// text = "//path//";
+// template = parser.parseTemplate( text );
+// assertBasics( template, true, true, false, 1, 0 );
+// assertPath( template, 0, "", "path" );
+
+ text = "pathA/pathB";
+ template = parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 2, 0 );
+ assertPath( template, 0, "", "pathA" );
+ assertPath( template, 1, "", "pathB" );
+ assertThat( template.toString(), is( text ) );
+
+ text = "pathA//pathB";
+ template = parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 2, 0 );
+ assertPath( template, 0, "", "pathA" );
+ assertPath( template, 1, "", "pathB" );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "/pathA/pathB";
+ template = parser.parseTemplate( text );
+ assertBasics( template, true, false, false, 2, 0 );
+ assertPath( template, 0, "", "pathA" );
+ assertPath( template, 1, "", "pathB" );
+ assertThat( template.toString(), is( text ) );
+
+ text = "/pathA//pathB";
+ template = parser.parseTemplate( text );
+ assertBasics( template, true, false, false, 2, 0 );
+ assertPath( template, 0, "", "pathA" );
+ assertPath( template, 1, "", "pathB" );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "pathA/pathB/";
+ template = parser.parseTemplate( text );
+ assertBasics( template, false, true, false, 2, 0 );
+ assertPath( template, 0, "", "pathA" );
+ assertPath( template, 1, "", "pathB" );
+ assertThat( template.toString(), is( text ) );
+
+ text = "pathA//pathB/";
+ template = parser.parseTemplate( text );
+ assertBasics( template, false, true, false, 2, 0 );
+ assertPath( template, 0, "", "pathA" );
+ assertPath( template, 1, "", "pathB" );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "/pathA/pathB/";
+ template = parser.parseTemplate( text );
+ assertBasics( template, true, true, false, 2, 0 );
+ assertPath( template, 0, "", "pathA" );
+ assertPath( template, 1, "", "pathB" );
+ assertThat( template.toString(), is( text ) );
+
+ text = "/pathA//pathB/";
+ template = parser.parseTemplate( text );
+ assertBasics( template, true, true, false, 2, 0 );
+ assertPath( template, 0, "", "pathA" );
+ assertPath( template, 1, "", "pathB" );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "/?";
+ template = parser.parseTemplate( text );
+ assertBasics( template, true, true, true, 0, 0 );
+ assertThat( template.toString(), is( text ) );
+
+// text = "//??";
+// template = parser.parseTemplate( text );
+// assertBasics( template, true, true, true, 0, 0 );
+
+ text = "?name=value";
+ template = parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 1 );
+ assertQuery( template, "name", "", "value" );
+ assertThat( template.toString(), is( text ) );
+
+ text = "?name1=value1&name2=value2";
+ template = parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 2 );
+ assertQuery( template, "name1", "", "value1" );
+ assertQuery( template, "name2", "", "value2" );
+ assertThat( template.toString(), is( text ) );
+
+ text = "?name1=value1&&name2=value2";
+ template = parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 2 );
+ assertQuery( template, "name1", "", "value1" );
+ assertQuery( template, "name2", "", "value2" );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "/?name=value";
+ template = parser.parseTemplate( text );
+ assertBasics( template, true, true, true, 0, 1 );
+ assertQuery( template, "name", "", "value" );
+ assertThat( template.toString(), is( text ) );
+
+ text = "/?name1=value1&name2=value2";
+ template = parser.parseTemplate( text );
+ assertBasics( template, true, true, true, 0, 2 );
+ assertQuery( template, "name1", "", "value1" );
+ assertQuery( template, "name2", "", "value2" );
+ assertThat( template.toString(), is( text ) );
+ }
+
++ /**
++ * KNOX-1055
++ * In some cases & could be encoded as &
++ */
++ @Test
++ public void testEncodedChar() throws URISyntaxException {
++ Parser parser = new Parser();
++ String text;
++ Template template;
++
++ text = "stage?id=007&attempt=0";
++ template = parser.parseTemplate( text );
++ assertBasics( template, false, false, true, 1, 2 );
++ assertQuery( template, "id", "", "007" );
++ assertQuery( template, "attempt", "", "0" );
++ }
++
+ @Test
+ public void testParameterizedPathTemplatesWithWildcardAndRegex() throws URISyntaxException {
+ String text;
+ Template template;
+
+ text = "{path}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 1, 0 );
+ assertPath( template, 0, "path", "**", Segment.GLOB, 1, 1 );
+ assertThat( template.toString(), is( text ) );
+
+ text = "{path=static}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 1, 0 );
+ assertPath( template, 0, "path", "static", Segment.STATIC, 1, 1 );
+ assertThat( template.toString(), is( text ) );
+
+ text = "{path=*}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 1, 0 );
+ assertPath( template, 0, "path", "*", Segment.STAR, 1, 1 );
+ assertThat( template.toString(), is( text ) );
+
+ text = "{path=**}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 1, 0 );
+ assertPath( template, 0, "path", "**", Segment.GLOB, 0, Integer.MAX_VALUE );
+ assertThat( template.toString(), is( text ) );
+
+ text = "{path=wild*card}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 1, 0 );
+ assertPath( template, 0, "path", "wild*card", Segment.REGEX, 1, 1 );
+ assertThat( template.toString(), is( text ) );
+ }
+
+ @Test
+ public void testParameterizedQueryTemplatesWithWildcardAndRegex() throws URISyntaxException {
+ String text;
+ Template template;
+
+ text = "?query={queryParam}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 1 );
+ assertQuery( template, "query", "queryParam", "**", Segment.GLOB, 1, 1 );
+ assertThat( template.toString(), is( text ) );
+
+ text = "?query={queryParam=static}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 1 );
+ assertQuery( template, "query", "queryParam", "static", Segment.STATIC, 1, 1 );
+ assertThat( template.toString(), is( text ) );
+
+ text = "?query={queryParam=*}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 1 );
+ assertQuery( template, "query", "queryParam", "*", Segment.STAR, 1, 1 );
+ assertThat( template.toString(), is( text ) );
+
+ text = "?query={queryParam=**}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 1 );
+ assertQuery( template, "query", "queryParam", "**", Segment.GLOB, 0, Integer.MAX_VALUE );
+ assertThat( template.toString(), is( text ) );
+
+ text = "?query={queryParam=wild*card}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 1 );
+ assertQuery( template, "query", "queryParam", "wild*card", Segment.REGEX, 1, 1 );
+ assertThat( template.toString(), is( text ) );
+ }
+
+ @Test
+ public void testGlobPattern() throws URISyntaxException {
+ String text;
+ Template template;
+
+ text = "**";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 1, 0 );
+ assertThat( template.toString(), is( text ) );
+
+ text = "/**";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, true, false, false, 1, 0 );
+ assertThat( template.toString(), is( text ) );
+
+ text = "**/";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, true, false, 1, 0 );
+ assertThat( template.toString(), is( text ) );
+
+ text = "/**/";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, true, true, false, 1, 0 );
+ assertThat( template.toString(), is( text ) );
+
+ text = "/**/path";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, true, false, false, 2, 0 );
+ assertThat( template.toString(), is( text ) );
+ }
+
+// @Ignore( "TODO" )
+// @Test
+// public void testPatternsWithSchemeAndAuthority() throws URISyntaxException {
+// String text;
+// Template template;
+//
+// text = "http:";
+// template = Parser.parse( text );
+//
+// text = "http:/path";
+// template = Parser.parse( text );
+//
+// text = "http://host";
+// template = Parser.parse( text );
+//
+// text = "http://host/";
+// template = Parser.parse( text );
+//
+// text = "http://host:80";
+// template = Parser.parse( text );
+//
+// text = "http://host:80/";
+// template = Parser.parse( text );
+//
+//
+// text = "{scheme}:";
+// template = Parser.parse( text );
+//
+// text = "{scheme}:/{path}";
+// template = Parser.parse( text );
+//
+// text = "{scheme}://{host}";
+// template = Parser.parse( text );
+//
+// text = "{scheme}://{host}/";
+// template = Parser.parse( text );
+//
+// text = "{scheme}://{host}:{port}";
+// template = Parser.parse( text );
+//
+// text = "{scheme}://{host}:{port}/";
+// template = Parser.parse( text );
+//
+//
+// text = "{scheme=http}:/{path=index.html}";
+// template = Parser.parse( text );
+//
+// text = "{scheme=http}://{host=*.com}";
+// template = Parser.parse( text );
+//
+// text = "{scheme=https}://{host=*.edu}/";
+// template = Parser.parse( text );
+//
+// text = "{scheme=rmi}://{host=*}:{port=80}";
+// template = Parser.parse( text );
+//
+// text = "{scheme=ftp}://{host=localhost*}:{port=*80}/";
+// template = Parser.parse( text );
+// }
+
+ @Test
+ public void testAuthority() throws URISyntaxException {
+ String text;
+ Template template;
+ String image;
+
+ text = "//";
+ template = Parser.parseTemplate( text );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getUsername(), nullValue() );
+ assertThat( template.getPassword(), nullValue() );
+ assertThat( template.getHost(), nullValue() );
+ assertThat( template.getPort(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "//:@:";
+ template = Parser.parseTemplate( text );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getUsername(), nullValue() );
+ assertThat( template.getPassword(), nullValue() );
+ assertThat( template.getHost(), nullValue() );
+ assertThat( template.getPort(), nullValue() );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "//host";
+ template = Parser.parseTemplate( text );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getUsername(), nullValue() );
+ assertThat( template.getPassword(), nullValue() );
+ assertThat( template.getHost().getFirstValue().getOriginalPattern(), is( "host" ) );
+ assertThat( template.getHost().getFirstValue().getEffectivePattern(), is( "host" ) );
+ assertThat( template.getPort(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "//@host";
+ template = Parser.parseTemplate( text );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getUsername(), nullValue() );
+ assertThat( template.getPassword(), nullValue() );
+ assertThat( template.getHost().getFirstValue().getOriginalPattern(), is( "host" ) );
+ assertThat( template.getHost().getFirstValue().getEffectivePattern(), is( "host" ) );
+ assertThat( template.getPort(), nullValue() );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "//@:80";
+ template = Parser.parseTemplate( text );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getUsername(), nullValue() );
+ assertThat( template.getPassword(), nullValue() );
+ assertThat( template.getHost(), nullValue() );
+ assertThat( template.getPort().getFirstValue().getOriginalPattern(), is( "80" ) );
+ assertThat( template.getPort().getFirstValue().getEffectivePattern(), is( "80" ) );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "//username@";
+ template = Parser.parseTemplate( text );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getUsername().getFirstValue().getOriginalPattern(), is( "username" ) );
+ assertThat( template.getUsername().getFirstValue().getEffectivePattern(), is( "username" ) );
+ assertThat( template.getPassword(), nullValue() );
+ assertThat( template.getHost(), nullValue() );
+ assertThat( template.getPort(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "//:password@";
+ template = Parser.parseTemplate( text );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getUsername(), nullValue() );
+ assertThat( template.getPassword().getFirstValue().getOriginalPattern(), is( "password" ) );
+ assertThat( template.getPassword().getFirstValue().getEffectivePattern(), is( "password" ) );
+ assertThat( template.getHost(), nullValue() );
+ assertThat( template.getPort(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "//{host}:{port}";
+ template = Parser.parseTemplate( text );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getUsername(), nullValue() );
+ assertThat( template.getPassword(), nullValue() );
+ assertThat( template.getHost().getParamName(), is( "host" ) );
+ assertThat( template.getHost().getFirstValue().getOriginalPattern(), nullValue() );
+ assertThat( template.getHost().getFirstValue().getEffectivePattern(), is( "*" ) );
+ assertThat( template.getPort().getParamName(), is( "port" ) );
+ assertThat( template.getPort().getFirstValue().getOriginalPattern(), nullValue() );
+ assertThat( template.getPort().getFirstValue().getEffectivePattern(), is( "*" ) );
+ image = template.toString();
+ assertThat( image, is( "//{host}:{port}" ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "{host}:{port}";
+ template = Parser.parseTemplate( text );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getUsername(), nullValue() );
+ assertThat( template.getPassword(), nullValue() );
+ assertThat( template.getHost().getParamName(), is( "host" ) );
+ assertThat( template.getHost().getFirstValue().getOriginalPattern(), nullValue() );
+ assertThat( template.getHost().getFirstValue().getEffectivePattern(), is( "*" ) );
+ assertThat( template.getPort().getParamName(), is( "port" ) );
+ assertThat( template.getPort().getFirstValue().getOriginalPattern(), nullValue() );
+ assertThat( template.getPort().getFirstValue().getEffectivePattern(), is( "*" ) );
+ image = template.toString();
+ assertThat( image, is( "{host}:{port}" ) );
+ assertThat( template.toString(), is( text ) );
+ }
+
+ @Test
+ public void testQuery() throws URISyntaxException {
+ String text;
+ Template template;
+ Query query;
+ Iterator<Segment.Value> values;
+ Segment.Value value;
+
+ text = "?queryName";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 1 );
+ query = template.getQuery().get( "queryName" );
+ assertThat( query, notNullValue() );
+ assertThat( query.getQueryName(), is( "queryName" ) );
+ assertThat( query.getParamName(), is( "" ) );
+ assertThat( query.getFirstValue().getEffectivePattern(), nullValue() ); //is( "*" ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "?query=value1&query=value2";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 1 );
+ query = template.getQuery().get( "query" );
+ assertThat( query, notNullValue() );
+ assertThat( query.getQueryName(), is( "query" ) );
+ assertThat( query.getParamName(), is( "" ) );
+ values = query.getValues().iterator();
+ value = values.next();
+ assertThat( value.getOriginalPattern(), is( "value1" ) );
+ assertThat( value.getEffectivePattern(), is( "value1" ) );
+ value = values.next();
+ assertThat( value.getOriginalPattern(), is( "value2" ) );
+ assertThat( value.getEffectivePattern(), is( "value2" ) );
+ assertThat( values.hasNext(), is( false ) );
+ assertThat( template.toString(), is( text ) );
+ }
+
+ @Test
+ public void testFragment() throws URISyntaxException {
+ String text;
+ Template template;
+
+ text = "#fragment";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 0, 0 );
+ assertThat( template.hasFragment(), is( true ) );
+ assertThat( template.getFragment().getFirstValue().getEffectivePattern(), is( "fragment" ) );
+ assertThat( template.toString(), is( text ) );
+ }
+
+ @Test
+ public void testEdgeCases() throws URISyntaxException {
+ Parser parser = new Parser();
+ String text;
+ Template template;
+
+ text = "//";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 0, 0 );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "??";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 0 );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "##";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 0, 0 );
+ assertThat( template.hasFragment(), is( true ) );
+ assertThat( template.getFragment().getFirstValue().getEffectivePattern(), is( "#" ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "??name=value";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 1 );
+ assertQuery( template, "name", "", "value" );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "//?";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 0 );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getUsername(), nullValue() );
+ assertThat( template.getPassword(), nullValue() );
+ assertThat( template.getHost(), nullValue() );
+ assertThat( template.getPort(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "//#";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 0, 0 );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getUsername(), nullValue() );
+ assertThat( template.getPassword(), nullValue() );
+ assertThat( template.getHost(), nullValue() );
+ assertThat( template.getPort(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = ":";
+ template = parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 1, 0 );
+ assertThat( template.hasScheme(), is( false ) );
+ assertThat( template.getScheme(), nullValue() );
+ assertThat( template.hasAuthority(), is( false ) );
+ assertThat( template.getHost(), nullValue() );
+ assertThat( template.getPort(), nullValue() );
+ assertThat( template.getPath().get( 0 ).getFirstValue().getOriginalPattern(), is( ":" ) );
+ assertThat( template.getPath().get( 0 ).getFirstValue().getEffectivePattern(), is( ":" ) );
+ assertThat( template.toString(), is( ":" ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = ":?";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 1, 0 );
+ assertThat( template.hasScheme(), is( false ) );
+ assertThat( template.getScheme(), nullValue() );
+ assertThat( template.getPath().get( 0 ).getFirstValue().getOriginalPattern(), is( ":" ) );
+ assertThat( template.getPath().get( 0 ).getFirstValue().getEffectivePattern(), is( ":" ) );
+ assertThat( template.hasQuery(), is( true ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = ":#";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 1, 0 );
+ assertThat( template.hasScheme(), is( false ) );
+ assertThat( template.getScheme(), nullValue() );
+ assertThat( template.getPath().get( 0 ).getFirstValue().getOriginalPattern(), is( ":" ) );
+ assertThat( template.getPath().get( 0 ).getFirstValue().getEffectivePattern(), is( ":" ) );
+ assertThat( template.hasFragment(), is( true ) );
+ assertThat( template.getFragment(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "http:?";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 0 );
+ assertThat( template.hasScheme(), is( true ) );
+ assertThat( template.getScheme().getFirstValue().getOriginalPattern(), is( "http" ) );
+ assertThat( template.getScheme().getFirstValue().getEffectivePattern(), is( "http" ) );
+ assertThat( template.hasQuery(), is( true ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "http:#";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 0, 0 );
+ assertThat( template.hasScheme(), is( true ) );
+ assertThat( template.getScheme().getFirstValue().getOriginalPattern(), is( "http" ) );
+ assertThat( template.getScheme().getFirstValue().getEffectivePattern(), is( "http" ) );
+ assertThat( template.hasFragment(), is( true ) );
+ assertThat( template.getFragment(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "scheme:path?";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 1, 0 );
+ assertThat( template.toString(), is( text ) );
+
+ text = "scheme:path#";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 1, 0 );
+ assertThat( template.hasFragment(), is( true ) );
+ assertThat( template.getFragment(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "//host/";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, true, true, false, 0, 0 );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getHost().getFirstValue().getOriginalPattern(), is( "host" ) );
+ assertThat( template.getHost().getFirstValue().getEffectivePattern(), is( "host" ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "//host?";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 0 );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getHost().getFirstValue().getOriginalPattern(), is( "host" ) );
+ assertThat( template.getHost().getFirstValue().getEffectivePattern(), is( "host" ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "//host#";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 0, 0 );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.hasFragment(), is( true ) );
+ assertThat( template.getFragment(), nullValue() );
+ assertThat( template.getHost().getFirstValue().getOriginalPattern(), is( "host" ) );
+ assertThat( template.getHost().getFirstValue().getEffectivePattern(), is( "host" ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "///";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, true, true, false, 0, 0 );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "//:";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 0, 0 );
+ assertThat( template.hasAuthority(), is( true ) );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "//?";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 0 );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "//#";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 0, 0 );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.hasFragment(), is( true ) );
+ assertThat( template.getFragment(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "//:/";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, true, true, false, 0, 0 );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getHost(), nullValue() );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "//:?";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 0 );
+ assertThat( template.getHost(), nullValue() );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "//:#";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, false, 0, 0 );
+ assertThat( template.hasFragment(), is( true ) );
+ assertThat( template.getHost(), nullValue() );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "///#";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, true, true, false, 0, 0 );
+ assertThat( template.hasFragment(), is( true ) );
+ assertThat( template.getHost(), nullValue() );
+ assertThat( template.getFragment(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "///path#";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, true, false, false, 1, 0 );
+ assertThat( template.hasFragment(), is( true ) );
+ assertThat( template.getHost(), nullValue() );
+ assertThat( template.getFragment(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "///?";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, true, true, true, 0, 0 );
+ assertThat( template.getHost(), nullValue() );
+ assertThat( template.getFragment(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "///path?";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, true, false, true, 1, 0 );
+ assertThat( template.getHost(), nullValue() );
+ assertThat( template.getFragment(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+ }
+
+ @Test
+ public void testQueryRemainder() throws URISyntaxException {
+ String text;
+ Template template;
+ Query query;
+
+ text = "?*";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 0 );
+ query = template.getExtra();
+ assertThat( query, notNullValue() );
+ assertThat( query.getQueryName(), is( "*" ) );
+ assertThat( query.getParamName(), is( "" ) );
+ assertThat( query.getFirstValue().getOriginalPattern(), nullValue() );
+ assertThat( query.getFirstValue().getEffectivePattern(), nullValue() ); //is( "*" ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "?**";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 0 );
+ query = template.getExtra();
+ assertThat( query, notNullValue() );
+ assertThat( query.getQueryName(), is( "**" ) );
+ assertThat( query.getParamName(), is( "" ) );
+ assertThat( query.getFirstValue().getOriginalPattern(), nullValue() );
+ assertThat( query.getFirstValue().getEffectivePattern(), nullValue() ); //is( "*" ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "?{*}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 0 );
+ query = template.getExtra();
+ assertThat( query, notNullValue() );
+ assertThat( query.getQueryName(), is( "*" ) );
+ assertThat( query.getParamName(), is( "*" ) );
+ assertThat( query.getFirstValue().getOriginalPattern(), nullValue() );
+ assertThat( query.getFirstValue().getEffectivePattern(), is( "**" ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "?{**}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 0 );
+ query = template.getExtra();
+ assertThat( query, notNullValue() );
+ assertThat( query.getQueryName(), is( "**" ) );
+ assertThat( query.getParamName(), is( "**" ) );
+ assertThat( query.getFirstValue().getOriginalPattern(), nullValue() );
+ assertThat( query.getFirstValue().getEffectivePattern(), is( "**" ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "?*={*}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 0 );
+ query = template.getExtra();
+ assertThat( query, notNullValue() );
+ assertThat( query.getQueryName(), is( "*" ) );
+ assertThat( query.getParamName(), is( "*" ) );
+ assertThat( query.getFirstValue().getOriginalPattern(), nullValue() );
+ assertThat( query.getFirstValue().getEffectivePattern(), is( "**" ) );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "?**={**}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 0 );
+ query = template.getExtra();
+ assertThat( query, notNullValue() );
+ assertThat( query.getQueryName(), is( "**" ) );
+ assertThat( query.getParamName(), is( "**" ) );
+ assertThat( query.getFirstValue().getOriginalPattern(), nullValue() );
+ assertThat( query.getFirstValue().getEffectivePattern(), is( "**" ) );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "?**={**=**}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 0 );
+ query = template.getExtra();
+ assertThat( query, notNullValue() );
+ assertThat( query.getQueryName(), is( "**" ) );
+ assertThat( query.getParamName(), is( "**" ) );
+ assertThat( query.getFirstValue().getOriginalPattern(), is( "**" ) );
+ assertThat( query.getFirstValue().getEffectivePattern(), is( "**" ) );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+ }
+
+ @Test
+ public void testSimplifiedQuerySyntax() throws URISyntaxException {
+ String text;
+ Template template;
+ Query query;
+
+ text = "?{queryParam}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 1 );
+ query = template.getQuery().get( "queryParam" );
+ assertThat( query, notNullValue() );
+ assertThat( query.getQueryName(), is( "queryParam" ) );
+ assertThat( query.getParamName(), is( "queryParam" ) );
+ assertThat( query.getFirstValue().getOriginalPattern(), nullValue() );
+ assertThat( query.getFirstValue().getEffectivePattern(), is( "**" ) );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "?{queryParam=value}";
+ template = Parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 0, 1 );
+ query = template.getQuery().get( "queryParam" );
+ assertThat( query, notNullValue() );
+ assertThat( query.getQueryName(), is( "queryParam" ) );
+ assertThat( query.getParamName(), is( "queryParam" ) );
+ assertThat( query.getFirstValue().getOriginalPattern(), is( "value" ) );
+ assertThat( query.getFirstValue().getEffectivePattern(), is( "value" ) );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+ }
+
+ @Test
+ public void testAllWildcardUseCases() throws URISyntaxException {
+ String text;
+ Template template;
+
+ text = "*://*:*/**?**";
+ template = Parser.parseTemplate( text );
+ assertThat( template, notNullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "*://*:*/**/path?{**}";
+ template = Parser.parseTemplate( text );
+ assertThat( template, notNullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "*://*:*/**/webhdfs/v1/?{**}";
+ template = Parser.parseTemplate( text );
+ assertThat( template, notNullValue() );
+ assertThat( template.toString(), is( text ) );
+ }
+
+ @Test
+ public void testQueryNameWithoutValue() throws URISyntaxException {
+ Parser parser = new Parser();
+ String text;
+ Template template;
+ String string;
+ Expander expander = new Expander();
+
+ text = "*://*:*/**?X";
+ template = parser.parseTemplate( text );
+ assertThat( template.hasScheme(), is( true ) );
+ assertThat( template.getScheme().getParamName(), is( "" ) );
+ assertThat( template.getScheme().getFirstValue().getOriginalPattern(), is( "*" ) );
+ assertThat( template.getScheme().getFirstValue().getEffectivePattern(), is( "*" ) );
+ assertThat( template.getHost().getParamName(), is( "" ) );
+ assertThat( template.getHost().getFirstValue().getOriginalPattern(), is( "*" ) );
+ assertThat( template.getHost().getFirstValue().getEffectivePattern(), is( "*" ) );
+ assertThat( template.getPort().getParamName(), is( "" ) );
+ assertThat( template.getPort().getFirstValue().getOriginalPattern(), is( "*" ) );
+ assertThat( template.getPort().getFirstValue().getEffectivePattern(), is( "*" ) );
+ assertThat( template.getPath().size(), is( 1 ) );
+ assertThat( template.getPath().get( 0 ).getParamName(), is( "" ) );
+ assertThat( template.getPath().get( 0 ).getFirstValue().getOriginalPattern(), is( "**" ) );
+ assertThat( template.getPath().get( 0 ).getFirstValue().getEffectivePattern(), is( "**" ) );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template, notNullValue() );
+ assertThat( template.getQuery().get( "X" ), notNullValue() );
+ string = expander.expandToString( template, null, null );
+ assertThat( string, is( text ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "*://*:*/**?X=";
+ template = Parser.parseTemplate( text );
+ assertThat( template, notNullValue() );
+ assertThat( template.getQuery().get( "X" ), notNullValue() );
+ string = expander.expandToString( template, null, null );
+ assertThat( string, is( "*://*:*/**?X" ) );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+
+ text = "http://localhost:62142/gateway/cluster/webhdfs/data/v1/tmp/GatewayWebHdfsFuncTest/testBasicHdfsUseCase/dir/file?aG9zdD1sb2NhbGhvc3QmcG9ydD02MjEzOSZvcD1DUkVBVEUmdXNlci5uYW1lPWhkZnM";
+ template = Parser.parseTemplate( text );
+ assertThat( template, notNullValue() );
+ assertThat( template.getQuery().get( "aG9zdD1sb2NhbGhvc3QmcG9ydD02MjEzOSZvcD1DUkVBVEUmdXNlci5uYW1lPWhkZnM" ), notNullValue() );
+ string = expander.expandToString( template, null, null );
+ assertThat( string, is( "http://localhost:62142/gateway/cluster/webhdfs/data/v1/tmp/GatewayWebHdfsFuncTest/testBasicHdfsUseCase/dir/file?aG9zdD1sb2NhbGhvc3QmcG9ydD02MjEzOSZvcD1DUkVBVEUmdXNlci5uYW1lPWhkZnM" ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "http://localhost:62142/gateway/cluster/webhdfs/data/v1/tmp/GatewayWebHdfsFuncTest/testBasicHdfsUseCase/dir/file?aG9zdD1sb2NhbGhvc3QmcG9ydD02MjEzOSZvcD1DUkVBVEUmdXNlci5uYW1lPWhkZnM=";
+ template = Parser.parseTemplate( text );
+ assertThat( template, notNullValue() );
+ assertThat( template.getQuery().get( "aG9zdD1sb2NhbGhvc3QmcG9ydD02MjEzOSZvcD1DUkVBVEUmdXNlci5uYW1lPWhkZnM" ), notNullValue() );
+ string = expander.expandToString( template, null, null );
+ assertThat( string, is( "http://localhost:62142/gateway/cluster/webhdfs/data/v1/tmp/GatewayWebHdfsFuncTest/testBasicHdfsUseCase/dir/file?aG9zdD1sb2NhbGhvc3QmcG9ydD02MjEzOSZvcD1DUkVBVEUmdXNlci5uYW1lPWhkZnM" ) );
+ //IMPROVE assertThat( template.toString(), is( text ) );
+ assertThat( template.getPattern(), is( text ) );
+ }
+
+ @Test
+ public void testTemplateWithOnlyAuthority() throws Exception {
+ String text;
+ Template template;
+ Parser parser = new Parser();
+
+ text = "test-host:42";
+ template = parser.parseTemplate( text );
+ assertThat( template.hasScheme(), is( false ) );
+ assertThat( template.getHost().getFirstValue().getOriginalPattern(), is( "test-host" ) );
+ assertThat( template.getHost().getFirstValue().getEffectivePattern(), is( "test-host" ) );
+ assertThat( template.getPort().getFirstValue().getOriginalPattern(), is( "42" ) );
+ assertThat( template.getPort().getFirstValue().getEffectivePattern(), is( "42" ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "{test-host}:{test-port}";
+ template = parser.parseTemplate( text );
+ assertThat( template.hasScheme(), is( false ) );
+ assertThat( template.getHost().getParamName(), is( "test-host" ) );
+ assertThat( template.getHost().getFirstValue().getToken().getOriginalPattern(), nullValue() );
+ assertThat( template.getHost().getFirstValue().getToken().getEffectivePattern(), is( "*" ) );
+ assertThat( template.getPort().getParamName(), is( "test-port" ) );
+ assertThat( template.getHost().getFirstValue().getToken().getOriginalPattern(), nullValue() );
+ assertThat( template.getPort().getFirstValue().getToken().getEffectivePattern(), is( "*" ) );
+ assertThat( template.toString(), is( text ) );
+ }
+
+ @Test
+ public void testTemplateWithoutAuthority() throws Exception {
+ String text;
+ Template template;
+ Parser parser = new Parser();
+
+ text = "test-scheme:/test-path";
+ template = parser.parseTemplate( text );
+ assertThat( template.hasScheme(), is( true ) );
+ assertThat( template.getScheme().getFirstValue().getOriginalPattern(), is( "test-scheme" ) );
+ assertThat( template.getScheme().getFirstValue().getEffectivePattern(), is( "test-scheme" ) );
+ assertThat( template.hasAuthority(), is( false ) );
+ assertThat( template.getPath().size(), is( 1 ) );
+ assertThat( template.getPath().get( 0 ).getFirstValue().getOriginalPattern(), is( "test-path" ) );
+ assertThat( template.getPath().get( 0 ).getFirstValue().getEffectivePattern(), is( "test-path" ) );
+ assertThat( template.hasQuery(), is( false ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "test-scheme:///test-path";
+ template = parser.parseTemplate( text );
+ assertThat( template.hasScheme(), is( true ) );
+ assertThat( template.getScheme().getFirstValue().getOriginalPattern(), is( "test-scheme" ) );
+ assertThat( template.getScheme().getFirstValue().getEffectivePattern(), is( "test-scheme" ) );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getUsername(), nullValue() );
+ assertThat( template.getPassword(), nullValue() );
+ assertThat( template.getHost(), nullValue() );
+ assertThat( template.getPort(), nullValue() );
+ assertThat( template.getPath().size(), is( 1 ) );
+ assertThat( template.getPath().get( 0 ).getFirstValue().getEffectivePattern(), is( "test-path" ) );
+ assertThat( template.hasQuery(), is( false ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "{test-scheme}:/{test-path}";
+ template = parser.parseTemplate( text );
+ assertThat( template.hasScheme(), is( true ) );
+ assertThat( template.getScheme().getParamName(), is( "test-scheme" ) );
+ assertThat( template.getScheme().getFirstValue().getOriginalPattern(), nullValue() );
+ assertThat( template.getScheme().getFirstValue().getEffectivePattern(), is( "*" ) );
+ assertThat( template.hasAuthority(), is( false ) );
+ assertThat( template.getPath().size(), is( 1 ) );
+ assertThat( template.getPath().get( 0 ).getParamName(), is( "test-path" ) );
+ assertThat( template.getPath().get( 0 ).getFirstValue().getOriginalPattern(), nullValue() );
+ assertThat( template.getPath().get( 0 ).getFirstValue().getEffectivePattern(), is( "**" ) );
+ assertThat( template.hasQuery(), is( false ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "{test-scheme}:///{test-path}";
+ template = parser.parseTemplate( text );
+ assertThat( template.hasScheme(), is( true ) );
+ assertThat( template.getScheme().getParamName(), is( "test-scheme" ) );
+ assertThat( template.getScheme().getFirstValue().getOriginalPattern(), nullValue() );
+ assertThat( template.getScheme().getFirstValue().getEffectivePattern(), is( "*" ) );
+ assertThat( template.hasAuthority(), is( true ) );
+ assertThat( template.getUsername(), nullValue() );
+ assertThat( template.getPassword(), nullValue() );
+ assertThat( template.getHost(), nullValue() );
+ assertThat( template.getPort(), nullValue() );
+ assertThat( template.getPath().size(), is( 1 ) );
+ assertThat( template.getPath().get( 0 ).getParamName(), is( "test-path" ) );
+ assertThat( template.getPath().get( 0 ).getFirstValue().getOriginalPattern(), nullValue() );
+ assertThat( template.getPath().get( 0 ).getFirstValue().getEffectivePattern(), is( "**" ) );
+ assertThat( template.hasQuery(), is( false ) );
+ assertThat( template.toString(), is( text ) );
+ }
+
+ @Test
+ public void testAuthorityWildcards() throws Exception {
+ String text;
+ Template template;
+
+ text = "*://*:*/";
+ template = Parser.parseTemplate( text );
+ assertThat( template.getHost().getFirstValue().getOriginalPattern(), is( "*" ) );
+ assertThat( template.getHost().getFirstValue().getEffectivePattern(), is( "*" ) );
+ assertThat( template.getPort().getFirstValue().getOriginalPattern(), is( "*" ) );
+ assertThat( template.getPort().getFirstValue().getEffectivePattern(), is( "*" ) );
+ assertThat( template.toString(), is( text ) );
+
+ text = "*://**/";
+ template = Parser.parseTemplate( text );
+ assertThat( template.getHost().getFirstValue().getOriginalPattern(), is( "**" ) );
+ assertThat( template.getHost().getFirstValue().getEffectivePattern(), is( "*" ) );
+ assertThat( template.getPort(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "*://*/";
+ template = Parser.parseTemplate( text );
+ assertThat( template.getHost().getFirstValue().getOriginalPattern(), is( "*" ) );
+ assertThat( template.getHost().getFirstValue().getEffectivePattern(), is( "*" ) );
+ assertThat( template.getPort(), nullValue() );
+ assertThat( template.toString(), is( text ) );
+
+ text = "*://**:**/";
+ template = Parser.parseTemplate( text );
+ assertThat( template.getHost().getFirstValue().getOriginalPattern(), is( "**" ) );
+ assertThat( template.getHost().getFirstValue().getEffectivePattern(), is( "*" ) );
+ assertThat( template.getPort().getFirstValue().getOriginalPattern(), is( "**" ) );
+ assertThat( template.getPort().getFirstValue().getEffectivePattern(), is( "*" ) );
+ assertThat( template.toString(), is( text ) );
+ }
+
+ @Test
+ public void testParseTemplateToken() {
+ Builder builder;
+ String input;
+ Token output;
+
+ builder = new Builder( "" );
+
+ input = "{";
+ output = Parser.parseTemplateToken( builder, input, "~" );
+ assertThat( output.getParameterName(), is( "" ) );
+ assertThat( output.getOriginalPattern(), is( "{" ) );
+ assertThat( output.getEffectivePattern(), is( "{" ) );
+
+ input = "}";
+ output = Parser.parseTemplateToken( builder, input, "~" );
+ assertThat( output.getParameterName(), is( "" ) );
+ assertThat( output.getOriginalPattern(), is( "}" ) );
+ assertThat( output.getEffectivePattern(), is( "}" ) );
+
+ input = "{X";
+ output = Parser.parseTemplateToken( builder, input, "~" );
+ assertThat( output.getParameterName(), is( "" ) );
+ assertThat( output.getOriginalPattern(), is( "{X" ) );
+ assertThat( output.getEffectivePattern(), is( "{X" ) );
+
+ input = "X}";
+ output = Parser.parseTemplateToken( builder, input, "~" );
+ assertThat( output.getParameterName(), is( "" ) );
+ assertThat( output.getOriginalPattern(), is( "X}" ) );
+ assertThat( output.getEffectivePattern(), is( "X}" ) );
+
+ input = "X";
+ output = Parser.parseTemplateToken( builder, input, "~" );
+ assertThat( output.getParameterName(), is( "" ) );
+ assertThat( output.getOriginalPattern(), is( "X" ) );
+ assertThat( output.getEffectivePattern(), is( "X" ) );
+
+ input = "$";
+ output = Parser.parseTemplateToken( builder, input, "~" );
+ assertThat( output.getParameterName(), is( "" ) );
+ assertThat( output.getOriginalPattern(), is( "$" ) );
+ assertThat( output.getEffectivePattern(), is( "$" ) );
+
+ input = "";
+ output = Parser.parseTemplateToken( builder, input, Segment.GLOB_PATTERN );
+ assertThat( output.getParameterName(), is( "" ) );
+ assertThat( output.getOriginalPattern(), is( "" ) );
+ assertThat( output.getEffectivePattern(), is( "" ) );
+ }
+
+ @Test
+ public void testBugKnox599() throws Exception {
+ Template template;
+ Template input;
+ Matcher<String> matcher;
+
+ matcher = new Matcher<String>();
+ template = Parser.parseTemplate( "*://*:*/**/webhdfs/v1/{path=**}?{**}" );
+ matcher.add( template, "test-value" );
+
+ input = Parser.parseTemplate( "http://kminder-os-u14-23-knoxha-150922-1352-2.novalocal:1022/gateway/sandbox/webhdfs/v1/user/hrt_qa/knox-ha/knox_webhdfs_client_dir/test_file?op=CREATE&delegation=XXX&namenoderpcaddress=nameservice&createflag=&createparent=true&overwrite=true" );
+
+ assertThat( input.getQuery().get( "createflag" ).getFirstValue().getPattern(), is( "" ) );
+
+ input = Parser.parseTemplate( "http://kminder-os-u14-23-knoxha-150922-1352-2.novalocal:1022/gateway/sandbox/webhdfs/v1/user/hrt_qa/knox-ha/knox_webhdfs_client_dir/test_file?op=CREATE&delegation=XXX&namenoderpcaddress=nameservice&createflag&createparent=true&overwrite=true" );
+
+ assertThat( input.getQuery().get( "createflag" ).getFirstValue().getPattern(), nullValue() );
+ }
+
+ @Test
+ public void testParserLiteralsWithReservedCharactersBugKnox394() throws Exception {
+ Template template;
+ String image;
+
+ template = Parser.parseLiteral( "{}" );
+ image = template.toString();
+ assertThat( image, is( "{}" ) );
+
+ template = Parser.parseLiteral( "{app.path}/child/path" );
+ image = template.toString();
+ assertThat( image, is( "{app.path}/child/path" ) );
+
+ template = Parser.parseLiteral( "${app.path}/child/path" );
+ image = template.toString();
+ assertThat( image, is( "${app.path}/child/path" ) );
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/pom.xml
----------------------------------------------------------------------
[08/23] knox git commit: KNOX-1075 and KNOX-1070 - Enforce minimum
maven and Java version (Rick Kellogg via Sandeep More)
Posted by mo...@apache.org.
KNOX-1075 and KNOX-1070 - Enforce minimum maven and Java version (Rick Kellogg via Sandeep More)
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/485769b9
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/485769b9
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/485769b9
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 485769b95b44f47699c48c47945a75c2b1d2ff97
Parents: ff3af36
Author: Sandeep More <mo...@apache.org>
Authored: Wed Oct 4 15:05:50 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Wed Oct 4 15:05:50 2017 -0400
----------------------------------------------------------------------
pom.xml | 24 ++++++++++++++++++++++++
1 file changed, 24 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/485769b9/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 2708f6b..fb00d71 100644
--- a/pom.xml
+++ b/pom.xml
@@ -336,6 +336,30 @@
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-enforcer-plugin</artifactId>
+ <version>3.0.0-M1</version>
+ <executions>
+ <execution>
+ <id>enforce-maven</id>
+ <goals>
+ <goal>enforce</goal>
+ </goals>
+ <configuration>
+ <rules>
+ <requireMavenVersion>
+ <version>[3.0.2,)</version>
+ </requireMavenVersion>
+ <!--<requireJavaVersion>
+ <version>[1.8,)</version>
+ </requireJavaVersion> -->
+ </rules>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+
</plugins>
</build>
[05/23] knox git commit: KNOX-1055 - Spark page characters &
should be treated as a delimiter (Mars via Sandeep More)
Posted by mo...@apache.org.
KNOX-1055 - Spark page characters & should be treated as a delimiter (Mars via Sandeep More)
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/90f1df7f
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/90f1df7f
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/90f1df7f
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 90f1df7f5b4ed33a9462a46f2fc5afff032890a2
Parents: c5aedf4
Author: Sandeep More <mo...@apache.org>
Authored: Mon Oct 2 11:37:24 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Mon Oct 2 11:37:24 2017 -0400
----------------------------------------------------------------------
.../hadoop/gateway/util/urltemplate/Parser.java | 10 +++++++---
.../gateway/util/urltemplate/ParserTest.java | 17 +++++++++++++++++
2 files changed, 24 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/90f1df7f/gateway-util-urltemplate/src/main/java/org/apache/hadoop/gateway/util/urltemplate/Parser.java
----------------------------------------------------------------------
diff --git a/gateway-util-urltemplate/src/main/java/org/apache/hadoop/gateway/util/urltemplate/Parser.java b/gateway-util-urltemplate/src/main/java/org/apache/hadoop/gateway/util/urltemplate/Parser.java
index e59cd32..a752b22 100644
--- a/gateway-util-urltemplate/src/main/java/org/apache/hadoop/gateway/util/urltemplate/Parser.java
+++ b/gateway-util-urltemplate/src/main/java/org/apache/hadoop/gateway/util/urltemplate/Parser.java
@@ -238,10 +238,14 @@ public class Parser {
private static void consumeQueryToken( final Builder builder, String token ) {
if( token != null ) {
- StringTokenizer tokenizer = new StringTokenizer( token, "?&" );
- while( tokenizer.hasMoreTokens() ) {
- consumeQuerySegment( builder, tokenizer.nextToken() );
+ //add "&" as a delimiter
+ String[] tokens = token.split("(&|\\?|&)");
+ if (tokens != null){
+ for (String nextToken : tokens){
+ consumeQuerySegment(builder,nextToken);
+ }
}
+
}
}
http://git-wip-us.apache.org/repos/asf/knox/blob/90f1df7f/gateway-util-urltemplate/src/test/java/org/apache/hadoop/gateway/util/urltemplate/ParserTest.java
----------------------------------------------------------------------
diff --git a/gateway-util-urltemplate/src/test/java/org/apache/hadoop/gateway/util/urltemplate/ParserTest.java b/gateway-util-urltemplate/src/test/java/org/apache/hadoop/gateway/util/urltemplate/ParserTest.java
index f6d1ab2..4305e11 100644
--- a/gateway-util-urltemplate/src/test/java/org/apache/hadoop/gateway/util/urltemplate/ParserTest.java
+++ b/gateway-util-urltemplate/src/test/java/org/apache/hadoop/gateway/util/urltemplate/ParserTest.java
@@ -343,6 +343,23 @@ public class ParserTest {
assertThat( template.toString(), is( text ) );
}
+ /**
+ * KNOX-1055
+ * In some cases & could be encoded as &
+ */
+ @Test
+ public void testEncodedChar() throws URISyntaxException {
+ Parser parser = new Parser();
+ String text;
+ Template template;
+
+ text = "stage?id=007&attempt=0";
+ template = parser.parseTemplate( text );
+ assertBasics( template, false, false, true, 1, 2 );
+ assertQuery( template, "id", "", "007" );
+ assertQuery( template, "attempt", "", "0" );
+ }
+
@Test
public void testParameterizedPathTemplatesWithWildcardAndRegex() throws URISyntaxException {
String text;
[02/23] knox git commit: KNOX-1072 - Add Client Cert Required
Capability to KnoxToken
Posted by mo...@apache.org.
KNOX-1072 - Add Client Cert Required Capability to KnoxToken
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/7b4755d5
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/7b4755d5
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/7b4755d5
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 7b4755d57c8998d1aed62c100124b8a94a3427db
Parents: 145ed5d
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Sep 28 19:27:38 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Sep 28 19:27:54 2017 -0400
----------------------------------------------------------------------
.../service/knoxtoken/TokenResource.java | 35 +++++
.../knoxtoken/TokenServiceResourceTest.java | 144 +++++++++++++++++++
2 files changed, 179 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/7b4755d5/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java b/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
index 43dd526..9d8bae3 100644
--- a/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
+++ b/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.gateway.service.knoxtoken;
import java.io.IOException;
import java.security.Principal;
+import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
@@ -54,12 +55,16 @@ public class TokenResource {
private static final String TOKEN_AUDIENCES_PARAM = "knox.token.audiences";
private static final String TOKEN_TARGET_URL = "knox.token.target.url";
private static final String TOKEN_CLIENT_DATA = "knox.token.client.data";
+ private static final String TOKEN_CLIENT_CERT_REQUIRED = "knox.token.client.cert.required";
+ private static final String TOKEN_ALLOWED_PRINCIPALS = "knox.token.allowed.principals";
static final String RESOURCE_PATH = "knoxtoken/api/v1/token";
private static TokenServiceMessages log = MessagesFactory.get( TokenServiceMessages.class );
private long tokenTTL = 30000l;
private List<String> targetAudiences = new ArrayList<>();
private String tokenTargetUrl = null;
private Map<String,Object> tokenClientDataMap = null;
+ private ArrayList<String> allowedDNs = new ArrayList<>();
+ private boolean clientCertRequired = false;
@Context
HttpServletRequest request;
@@ -81,6 +86,17 @@ public class TokenResource {
}
}
+ String clientCert = context.getInitParameter(TOKEN_CLIENT_CERT_REQUIRED);
+ clientCertRequired = "true".equals(clientCert);
+
+ String principals = context.getInitParameter(TOKEN_ALLOWED_PRINCIPALS);
+ if (principals != null) {
+ String[] dns = principals.split(";");
+ for (int i = 0; i < dns.length; i++) {
+ allowedDNs.add(dns[i]);
+ }
+ }
+
String ttl = context.getInitParameter(TOKEN_TTL_PARAM);
if (ttl != null) {
try {
@@ -113,7 +129,26 @@ public class TokenResource {
return getAuthenticationToken();
}
+ private X509Certificate extractCertificate(HttpServletRequest req) {
+ X509Certificate[] certs = (X509Certificate[]) req.getAttribute("javax.servlet.request.X509Certificate");
+ if (null != certs && certs.length > 0) {
+ return certs[0];
+ }
+ return null;
+ }
+
private Response getAuthenticationToken() {
+ if (clientCertRequired) {
+ X509Certificate cert = extractCertificate(request);
+ if (cert != null) {
+ if (!allowedDNs.contains(cert.getSubjectDN().getName())) {
+ return Response.status(403).entity("{ \"Unable to get token - untrusted client cert.\" }").build();
+ }
+ }
+ else {
+ return Response.status(403).entity("{ \"Unable to get token - client cert required.\" }").build();
+ }
+ }
GatewayServices services = (GatewayServices) request.getServletContext()
.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
http://git-wip-us.apache.org/repos/asf/knox/blob/7b4755d5/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java b/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
index bddd13d..b4e51e6 100644
--- a/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
+++ b/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.gateway.service.knoxtoken;
+import org.apache.hadoop.gateway.security.PrimaryPrincipal;
import org.apache.hadoop.gateway.service.knoxtoken.TokenResource;
import org.apache.hadoop.gateway.services.GatewayServices;
import org.apache.hadoop.gateway.services.security.token.JWTokenAuthority;
@@ -49,6 +50,7 @@ import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.NoSuchAlgorithmException;
import java.security.Principal;
+import java.security.cert.X509Certificate;
import java.security.interfaces.RSAPrivateKey;
import java.security.interfaces.RSAPublicKey;
import java.util.ArrayList;
@@ -203,6 +205,148 @@ public class TokenServiceResourceTest {
assertTrue(audiences.contains("recipient2"));
}
+ @Test
+ public void testValidClientCert() throws Exception {
+
+ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+ EasyMock.expect(context.getInitParameter("knox.token.client.cert.required")).andReturn("true");
+ EasyMock.expect(context.getInitParameter("knox.token.allowed.principals")).andReturn("CN=localhost, OU=Test, O=Hadoop, L=Test, ST=Test, C=US");
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+ X509Certificate trustedCertMock = EasyMock.createMock(X509Certificate.class);
+ EasyMock.expect(trustedCertMock.getSubjectDN()).andReturn(new PrimaryPrincipal("CN=localhost, OU=Test, O=Hadoop, L=Test, ST=Test, C=US")).anyTimes();
+ ArrayList<X509Certificate> certArrayList = new ArrayList<X509Certificate>();
+ certArrayList.add(trustedCertMock);
+ X509Certificate[] certs = {};
+ EasyMock.expect(request.getAttribute("javax.servlet.request.X509Certificate")).andReturn(certArrayList.toArray(certs)).anyTimes();
+
+ Principal principal = EasyMock.createNiceMock(Principal.class);
+ EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+ EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+ GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+ EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+ JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+ EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+ StringWriter writer = new StringWriter();
+ PrintWriter printWriter = new PrintWriter(writer);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.getWriter()).andReturn(printWriter);
+
+ EasyMock.replay(principal, services, context, request, response, trustedCertMock);
+
+ TokenResource tr = new TokenResource();
+ tr.request = request;
+ tr.response = response;
+ tr.context = context;
+ tr.init();
+
+ // Issue a token
+ Response retResponse = tr.doGet();
+
+ assertEquals(200, retResponse.getStatus());
+
+ // Parse the response
+ String retString = writer.toString();
+ String accessToken = getTagValue(retString, "access_token");
+ assertNotNull(accessToken);
+ String expiry = getTagValue(retString, "expires_in");
+ assertNotNull(expiry);
+
+ // Verify the token
+ JWTToken parsedToken = new JWTToken(accessToken);
+ assertEquals("alice", parsedToken.getSubject());
+ assertTrue(authority.verifyToken(parsedToken));
+ }
+
+ @Test
+ public void testValidClientCertWrongUser() throws Exception {
+
+ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+ EasyMock.expect(context.getInitParameter("knox.token.client.cert.required")).andReturn("true");
+ EasyMock.expect(context.getInitParameter("knox.token.allowed.principals")).andReturn("CN=remotehost, OU=Test, O=Hadoop, L=Test, ST=Test, C=US");
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+ X509Certificate trustedCertMock = EasyMock.createMock(X509Certificate.class);
+ EasyMock.expect(trustedCertMock.getSubjectDN()).andReturn(new PrimaryPrincipal("CN=localhost, OU=Test, O=Hadoop, L=Test, ST=Test, C=US")).anyTimes();
+ ArrayList<X509Certificate> certArrayList = new ArrayList<X509Certificate>();
+ certArrayList.add(trustedCertMock);
+ X509Certificate[] certs = {};
+ EasyMock.expect(request.getAttribute("javax.servlet.request.X509Certificate")).andReturn(certArrayList.toArray(certs)).anyTimes();
+
+ Principal principal = EasyMock.createNiceMock(Principal.class);
+ EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+ EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+ GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+ EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+ JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+ EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+ StringWriter writer = new StringWriter();
+ PrintWriter printWriter = new PrintWriter(writer);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.getWriter()).andReturn(printWriter);
+
+ EasyMock.replay(principal, services, context, request, response, trustedCertMock);
+
+ TokenResource tr = new TokenResource();
+ tr.request = request;
+ tr.response = response;
+ tr.context = context;
+ tr.init();
+
+ // Issue a token
+ Response retResponse = tr.doGet();
+
+ assertEquals(403, retResponse.getStatus());
+ }
+
+ @Test
+ public void testMissingClientCert() throws Exception {
+
+ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+ EasyMock.expect(context.getInitParameter("knox.token.client.cert.required")).andReturn("true");
+ EasyMock.expect(context.getInitParameter("knox.token.allowed.principals")).andReturn("CN=remotehost, OU=Test, O=Hadoop, L=Test, ST=Test, C=US");
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+ EasyMock.expect(request.getAttribute("javax.servlet.request.X509Certificate")).andReturn(null).anyTimes();
+
+ Principal principal = EasyMock.createNiceMock(Principal.class);
+ EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+ EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+ GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+ EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+ JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+ EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+ StringWriter writer = new StringWriter();
+ PrintWriter printWriter = new PrintWriter(writer);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.getWriter()).andReturn(printWriter);
+
+ EasyMock.replay(principal, services, context, request, response);
+
+ TokenResource tr = new TokenResource();
+ tr.request = request;
+ tr.response = response;
+ tr.context = context;
+ tr.init();
+
+ // Issue a token
+ Response retResponse = tr.doGet();
+
+ assertEquals(403, retResponse.getStatus());
+ }
+
private String getTagValue(String token, String tagName) {
String searchString = tagName + "\":";
String value = token.substring(token.indexOf(searchString) + searchString.length());
[03/23] knox git commit: KNOX-1074 - Knox Proxy - Workflow Manager
view fails to load when using ambari through KNOX (Venkatasairam Lanka via
lmccay)
Posted by mo...@apache.org.
KNOX-1074 - Knox Proxy - Workflow Manager view fails to load when using ambari through KNOX (Venkatasairam Lanka via lmccay)
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/c5aedf40
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/c5aedf40
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/c5aedf40
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: c5aedf40f21ffc9f884330e576ff8f680fd5f368
Parents: 7b4755d
Author: Larry McCay <lm...@hortonworks.com>
Authored: Sat Sep 30 20:51:05 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Sat Sep 30 20:51:05 2017 -0400
----------------------------------------------------------------------
.../src/main/resources/services/ambariui/2.2.0/service.xml | 5 +++++
1 file changed, 5 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/c5aedf40/gateway-service-definitions/src/main/resources/services/ambariui/2.2.0/service.xml
----------------------------------------------------------------------
diff --git a/gateway-service-definitions/src/main/resources/services/ambariui/2.2.0/service.xml b/gateway-service-definitions/src/main/resources/services/ambariui/2.2.0/service.xml
index ab4ab2b..0ed0a31 100644
--- a/gateway-service-definitions/src/main/resources/services/ambariui/2.2.0/service.xml
+++ b/gateway-service-definitions/src/main/resources/services/ambariui/2.2.0/service.xml
@@ -80,6 +80,11 @@
<rewrite apply="AMBARIUI/ambari/outbound/apiendpoint/noroot" to="response.body"/>
</route>
+ <!-- Wfmanager view -->
+ <route path="/ambari/views/WORKFLOW_MANAGER/*/*/">
+ <rewrite apply="AMBARIUI/ambari/outbound/apiendpoint/html" to="response.body"/>
+ </route>
+
<!-- SmartSense view -->
<route path="/ambari/views/SMARTSENSE/**/assets/hstapp-*.js">
<rewrite apply="AMBARIUI/ambari/outbound/apiendpoint" to="response.body"/>
[22/23] knox git commit: KNOX-998 - package name refactoring
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
deleted file mode 100644
index dd35dbb..0000000
--- a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
+++ /dev/null
@@ -1,876 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import org.apache.commons.io.FileUtils;
-import org.easymock.EasyMock;
-import org.junit.Test;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
-import static junit.framework.TestCase.assertTrue;
-import static junit.framework.TestCase.fail;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-
-
-public class AmbariDynamicServiceURLCreatorTest {
-
- @Test
- public void testHiveURLFromInternalMapping() throws Exception {
- testHiveURL(null);
- }
-
- @Test
- public void testHiveURLFromExternalMapping() throws Exception {
- testHiveURL(TEST_MAPPING_CONFIG);
- }
-
- private void testHiveURL(Object mappingConfiguration) throws Exception {
-
- final String SERVICE_NAME = "HIVE";
- final String[] HOSTNAMES = {"host3", "host2", "host4"};
- final String HTTP_PATH = "cliservice";
- final String HTTP_PORT = "10001";
- final String BINARY_PORT = "10000";
-
- String expectedScheme = "http";
-
- final List<String> hiveServerHosts = Arrays.asList(HOSTNAMES);
-
- AmbariComponent hiveServer = EasyMock.createNiceMock(AmbariComponent.class);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("HIVE_SERVER")).andReturn(hiveServer).anyTimes();
- EasyMock.replay(cluster);
-
- // Configure HTTP Transport
- EasyMock.expect(hiveServer.getHostNames()).andReturn(hiveServerHosts).anyTimes();
- EasyMock.expect(hiveServer.getConfigProperty("hive.server2.use.SSL")).andReturn("false").anyTimes();
- EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.path")).andReturn(HTTP_PATH).anyTimes();
- EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.port")).andReturn(HTTP_PORT).anyTimes();
- EasyMock.expect(hiveServer.getConfigProperty("hive.server2.transport.mode")).andReturn("http").anyTimes();
- EasyMock.replay(hiveServer);
-
- // Run the test
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
- List<String> urls = builder.create(SERVICE_NAME);
- assertEquals(HOSTNAMES.length, urls.size());
- validateServiceURLs(urls, HOSTNAMES, expectedScheme, HTTP_PORT, HTTP_PATH);
-
- // Configure BINARY Transport
- EasyMock.reset(hiveServer);
- EasyMock.expect(hiveServer.getHostNames()).andReturn(hiveServerHosts).anyTimes();
- EasyMock.expect(hiveServer.getConfigProperty("hive.server2.use.SSL")).andReturn("false").anyTimes();
- EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.path")).andReturn("").anyTimes();
- EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.port")).andReturn(HTTP_PORT).anyTimes();
- EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.port")).andReturn(BINARY_PORT).anyTimes();
- EasyMock.expect(hiveServer.getConfigProperty("hive.server2.transport.mode")).andReturn("binary").anyTimes();
- EasyMock.replay(hiveServer);
-
- // Run the test
- urls = builder.create(SERVICE_NAME);
- assertEquals(HOSTNAMES.length, urls.size());
- validateServiceURLs(urls, HOSTNAMES, expectedScheme, HTTP_PORT, "");
-
- // Configure HTTPS Transport
- EasyMock.reset(hiveServer);
- EasyMock.expect(hiveServer.getHostNames()).andReturn(hiveServerHosts).anyTimes();
- EasyMock.expect(hiveServer.getConfigProperty("hive.server2.use.SSL")).andReturn("true").anyTimes();
- EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.path")).andReturn(HTTP_PATH).anyTimes();
- EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.port")).andReturn(HTTP_PORT).anyTimes();
- EasyMock.expect(hiveServer.getConfigProperty("hive.server2.transport.mode")).andReturn("http").anyTimes();
- EasyMock.replay(hiveServer);
-
- // Run the test
- expectedScheme = "https";
- urls = builder.create(SERVICE_NAME);
- assertEquals(HOSTNAMES.length, urls.size());
- validateServiceURLs(urls, HOSTNAMES, expectedScheme, HTTP_PORT, HTTP_PATH);
- }
-
- @Test
- public void testResourceManagerURLFromInternalMapping() throws Exception {
- testResourceManagerURL(null);
- }
-
- @Test
- public void testResourceManagerURLFromExternalMapping() throws Exception {
- testResourceManagerURL(TEST_MAPPING_CONFIG);
- }
-
- private void testResourceManagerURL(Object mappingConfiguration) throws Exception {
-
- final String HTTP_ADDRESS = "host2:1111";
- final String HTTPS_ADDRESS = "host2:22222";
-
- // HTTP
- AmbariComponent resman = EasyMock.createNiceMock(AmbariComponent.class);
- setResourceManagerComponentExpectations(resman, HTTP_ADDRESS, HTTPS_ADDRESS, "HTTP");
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("RESOURCEMANAGER")).andReturn(resman).anyTimes();
- EasyMock.replay(cluster);
-
- // Run the test
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
- String url = builder.create("RESOURCEMANAGER").get(0);
- assertEquals("http://" + HTTP_ADDRESS + "/ws", url);
-
- // HTTPS
- EasyMock.reset(resman);
- setResourceManagerComponentExpectations(resman, HTTP_ADDRESS, HTTPS_ADDRESS, "HTTPS_ONLY");
-
- // Run the test
- url = builder.create("RESOURCEMANAGER").get(0);
- assertEquals("https://" + HTTPS_ADDRESS + "/ws", url);
- }
-
- private void setResourceManagerComponentExpectations(final AmbariComponent resmanMock,
- final String httpAddress,
- final String httpsAddress,
- final String httpPolicy) {
- EasyMock.expect(resmanMock.getConfigProperty("yarn.resourcemanager.webapp.address")).andReturn(httpAddress).anyTimes();
- EasyMock.expect(resmanMock.getConfigProperty("yarn.resourcemanager.webapp.https.address")).andReturn(httpsAddress).anyTimes();
- EasyMock.expect(resmanMock.getConfigProperty("yarn.http.policy")).andReturn(httpPolicy).anyTimes();
- EasyMock.replay(resmanMock);
- }
-
- @Test
- public void testJobTrackerURLFromInternalMapping() throws Exception {
- testJobTrackerURL(null);
- }
-
- @Test
- public void testJobTrackerURLFromExternalMapping() throws Exception {
- testJobTrackerURL(TEST_MAPPING_CONFIG);
- }
-
- private void testJobTrackerURL(Object mappingConfiguration) throws Exception {
- final String ADDRESS = "host2:5678";
-
- AmbariComponent resman = EasyMock.createNiceMock(AmbariComponent.class);
- EasyMock.expect(resman.getConfigProperty("yarn.resourcemanager.address")).andReturn(ADDRESS).anyTimes();
- EasyMock.replay(resman);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("RESOURCEMANAGER")).andReturn(resman).anyTimes();
- EasyMock.replay(cluster);
-
- // Run the test
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
- String url = builder.create("JOBTRACKER").get(0);
- assertEquals("rpc://" + ADDRESS, url);
- }
-
- @Test
- public void testNameNodeURLFromInternalMapping() throws Exception {
- testNameNodeURL(null);
- }
-
- @Test
- public void testNameNodeURLFromExternalMapping() throws Exception {
- testNameNodeURL(TEST_MAPPING_CONFIG);
- }
-
- private void testNameNodeURL(Object mappingConfiguration) throws Exception {
- final String ADDRESS = "host1:1234";
-
- AmbariComponent namenode = EasyMock.createNiceMock(AmbariComponent.class);
- EasyMock.expect(namenode.getConfigProperty("dfs.namenode.rpc-address")).andReturn(ADDRESS).anyTimes();
- EasyMock.replay(namenode);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("NAMENODE")).andReturn(namenode).anyTimes();
- EasyMock.replay(cluster);
-
- // Run the test
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
- String url = builder.create("NAMENODE").get(0);
- assertEquals("hdfs://" + ADDRESS, url);
- }
-
- @Test
- public void testWebHCatURLFromInternalMapping() throws Exception {
- testWebHCatURL(null);
- }
-
- @Test
- public void testWebHCatURLFromExternalMapping() throws Exception {
- testWebHCatURL(TEST_MAPPING_CONFIG);
- }
-
- private void testWebHCatURL(Object mappingConfiguration) throws Exception {
-
- final String HOSTNAME = "host3";
- final String PORT = "1919";
-
- AmbariComponent webhcatServer = EasyMock.createNiceMock(AmbariComponent.class);
- EasyMock.expect(webhcatServer.getConfigProperty("templeton.port")).andReturn(PORT).anyTimes();
- List<String> webHcatServerHosts = Collections.singletonList(HOSTNAME);
- EasyMock.expect(webhcatServer.getHostNames()).andReturn(webHcatServerHosts).anyTimes();
- EasyMock.replay(webhcatServer);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("WEBHCAT_SERVER")).andReturn(webhcatServer).anyTimes();
- EasyMock.replay(cluster);
-
- // Run the test
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
- String url = builder.create("WEBHCAT").get(0);
- assertEquals("http://" + HOSTNAME + ":" + PORT + "/templeton", url);
- }
-
- @Test
- public void testOozieURLFromInternalMapping() throws Exception {
- testOozieURL(null);
- }
-
- @Test
- public void testOozieURLFromExternalMapping() throws Exception {
- testOozieURL(TEST_MAPPING_CONFIG);
- }
-
- private void testOozieURL(Object mappingConfiguration) throws Exception {
- final String URL = "http://host3:2222";
-
- AmbariComponent oozieServer = EasyMock.createNiceMock(AmbariComponent.class);
- EasyMock.expect(oozieServer.getConfigProperty("oozie.base.url")).andReturn(URL).anyTimes();
- EasyMock.replay(oozieServer);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("OOZIE_SERVER")).andReturn(oozieServer).anyTimes();
- EasyMock.replay(cluster);
-
- // Run the test
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
- String url = builder.create("OOZIE").get(0);
- assertEquals(URL, url);
- }
-
- @Test
- public void testWebHBaseURLFromInternalMapping() throws Exception {
- testWebHBaseURL(null);
- }
-
- @Test
- public void testWebHBaseURLFromExternalMapping() throws Exception {
- testWebHBaseURL(TEST_MAPPING_CONFIG);
- }
-
- private void testWebHBaseURL(Object mappingConfiguration) throws Exception {
- final String[] HOSTNAMES = {"host2", "host4"};
-
- AmbariComponent hbaseMaster = EasyMock.createNiceMock(AmbariComponent.class);
- List<String> hbaseMasterHosts = Arrays.asList(HOSTNAMES);
- EasyMock.expect(hbaseMaster.getHostNames()).andReturn(hbaseMasterHosts).anyTimes();
- EasyMock.replay(hbaseMaster);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("HBASE_MASTER")).andReturn(hbaseMaster).anyTimes();
- EasyMock.replay(cluster);
-
- // Run the test
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
- List<String> urls = builder.create("WEBHBASE");
- validateServiceURLs(urls, HOSTNAMES, "http", "60080", null);
- }
-
- @Test
- public void testWebHdfsURLFromInternalMapping() throws Exception {
- testWebHdfsURL(null);
- }
-
- @Test
- public void testWebHdfsURLFromExternalMapping() throws Exception {
- testWebHdfsURL(TEST_MAPPING_CONFIG);
- }
-
- @Test
- public void testWebHdfsURLFromSystemPropertyOverride() throws Exception {
- // Write the test mapping configuration to a temp file
- File mappingFile = File.createTempFile("mapping-config", "xml");
- FileUtils.write(mappingFile, OVERRIDE_MAPPING_FILE_CONTENTS, "utf-8");
-
- // Set the system property to point to the temp file
- System.setProperty(AmbariDynamicServiceURLCreator.MAPPING_CONFIG_OVERRIDE_PROPERTY,
- mappingFile.getAbsolutePath());
- try {
- final String ADDRESS = "host3:1357";
- // The URL creator should apply the file contents, and create the URL accordingly
- String url = getTestWebHdfsURL(ADDRESS, null);
-
- // Verify the URL matches the pattern from the file
- assertEquals("http://" + ADDRESS + "/webhdfs/OVERRIDE", url);
- } finally {
- // Reset the system property, and delete the temp file
- System.clearProperty(AmbariDynamicServiceURLCreator.MAPPING_CONFIG_OVERRIDE_PROPERTY);
- mappingFile.delete();
- }
- }
-
- private void testWebHdfsURL(Object mappingConfiguration) throws Exception {
- final String ADDRESS = "host3:1357";
- assertEquals("http://" + ADDRESS + "/webhdfs", getTestWebHdfsURL(ADDRESS, mappingConfiguration));
- }
-
-
- private String getTestWebHdfsURL(String address, Object mappingConfiguration) throws Exception {
- AmbariCluster.ServiceConfiguration hdfsSC = EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
- Map<String, String> hdfsProps = new HashMap<>();
- hdfsProps.put("dfs.namenode.http-address", address);
- EasyMock.expect(hdfsSC.getProperties()).andReturn(hdfsProps).anyTimes();
- EasyMock.replay(hdfsSC);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getServiceConfiguration("HDFS", "hdfs-site")).andReturn(hdfsSC).anyTimes();
- EasyMock.replay(cluster);
-
- // Create the URL
- AmbariDynamicServiceURLCreator creator = newURLCreator(cluster, mappingConfiguration);
- return creator.create("WEBHDFS").get(0);
- }
-
-
- @Test
- public void testAtlasApiURL() throws Exception {
- final String ATLAS_REST_ADDRESS = "http://host2:21000";
-
- AmbariComponent atlasServer = EasyMock.createNiceMock(AmbariComponent.class);
- EasyMock.expect(atlasServer.getConfigProperty("atlas.rest.address")).andReturn(ATLAS_REST_ADDRESS).anyTimes();
- EasyMock.replay(atlasServer);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("ATLAS_SERVER")).andReturn(atlasServer).anyTimes();
- EasyMock.replay(cluster);
-
- // Run the test
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
- List<String> urls = builder.create("ATLAS-API");
- assertEquals(1, urls.size());
- assertEquals(ATLAS_REST_ADDRESS, urls.get(0));
- }
-
-
- @Test
- public void testAtlasURL() throws Exception {
- final String HTTP_PORT = "8787";
- final String HTTPS_PORT = "8989";
-
- final String[] HOSTNAMES = {"host1", "host4"};
- final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
-
- AmbariComponent atlasServer = EasyMock.createNiceMock(AmbariComponent.class);
- EasyMock.expect(atlasServer.getHostNames()).andReturn(atlastServerHosts).anyTimes();
- EasyMock.expect(atlasServer.getConfigProperty("atlas.enableTLS")).andReturn("false").anyTimes();
- EasyMock.expect(atlasServer.getConfigProperty("atlas.server.http.port")).andReturn(HTTP_PORT).anyTimes();
- EasyMock.expect(atlasServer.getConfigProperty("atlas.server.https.port")).andReturn(HTTPS_PORT).anyTimes();
- EasyMock.replay(atlasServer);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("ATLAS_SERVER")).andReturn(atlasServer).anyTimes();
- EasyMock.replay(cluster);
-
- // Run the test
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
- List<String> urls = builder.create("ATLAS");
- validateServiceURLs(urls, HOSTNAMES, "http", HTTP_PORT, null);
-
- EasyMock.reset(atlasServer);
- EasyMock.expect(atlasServer.getHostNames()).andReturn(atlastServerHosts).anyTimes();
- EasyMock.expect(atlasServer.getConfigProperty("atlas.enableTLS")).andReturn("true").anyTimes();
- EasyMock.expect(atlasServer.getConfigProperty("atlas.server.http.port")).andReturn(HTTP_PORT).anyTimes();
- EasyMock.expect(atlasServer.getConfigProperty("atlas.server.https.port")).andReturn(HTTPS_PORT).anyTimes();
- EasyMock.replay(atlasServer);
-
- // Run the test
- urls = builder.create("ATLAS");
- validateServiceURLs(urls, HOSTNAMES, "https", HTTPS_PORT, null);
- }
-
-
- @Test
- public void testZeppelinURL() throws Exception {
- final String HTTP_PORT = "8787";
- final String HTTPS_PORT = "8989";
-
- final String[] HOSTNAMES = {"host1", "host4"};
- final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
-
- AmbariComponent zeppelinMaster = EasyMock.createNiceMock(AmbariComponent.class);
- EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("false").anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
- EasyMock.replay(zeppelinMaster);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("ZEPPELIN_MASTER")).andReturn(zeppelinMaster).anyTimes();
- EasyMock.replay(cluster);
-
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
-
- // Run the test
- validateServiceURLs(builder.create("ZEPPELIN"), HOSTNAMES, "http", HTTP_PORT, null);
-
- EasyMock.reset(zeppelinMaster);
- EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("true").anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
- EasyMock.replay(zeppelinMaster);
-
- // Run the test
- validateServiceURLs(builder.create("ZEPPELIN"), HOSTNAMES, "https", HTTPS_PORT, null);
- }
-
-
- @Test
- public void testZeppelinUiURL() throws Exception {
- final String HTTP_PORT = "8787";
- final String HTTPS_PORT = "8989";
-
- final String[] HOSTNAMES = {"host1", "host4"};
- final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
-
- AmbariComponent zeppelinMaster = EasyMock.createNiceMock(AmbariComponent.class);
- EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("false").anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
- EasyMock.replay(zeppelinMaster);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("ZEPPELIN_MASTER")).andReturn(zeppelinMaster).anyTimes();
- EasyMock.replay(cluster);
-
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
-
- // Run the test
- validateServiceURLs(builder.create("ZEPPELINUI"), HOSTNAMES, "http", HTTP_PORT, null);
-
- EasyMock.reset(zeppelinMaster);
- EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("true").anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
- EasyMock.replay(zeppelinMaster);
-
- // Run the test
- validateServiceURLs(builder.create("ZEPPELINUI"), HOSTNAMES, "https", HTTPS_PORT, null);
- }
-
-
- @Test
- public void testZeppelinWsURL() throws Exception {
- final String HTTP_PORT = "8787";
- final String HTTPS_PORT = "8989";
-
- final String[] HOSTNAMES = {"host1", "host4"};
- final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
-
- AmbariComponent zeppelinMaster = EasyMock.createNiceMock(AmbariComponent.class);
- EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("false").anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
- EasyMock.replay(zeppelinMaster);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("ZEPPELIN_MASTER")).andReturn(zeppelinMaster).anyTimes();
- EasyMock.replay(cluster);
-
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
-
- // Run the test
- validateServiceURLs(builder.create("ZEPPELINWS"), HOSTNAMES, "ws", HTTP_PORT, null);
-
- EasyMock.reset(zeppelinMaster);
- EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("true").anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
- EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
- EasyMock.replay(zeppelinMaster);
-
- // Run the test
- validateServiceURLs(builder.create("ZEPPELINWS"), HOSTNAMES, "wss", HTTPS_PORT, null);
- }
-
-
- @Test
- public void testDruidCoordinatorURL() throws Exception {
- final String PORT = "8787";
-
- final String[] HOSTNAMES = {"host3", "host2"};
- final List<String> druidCoordinatorHosts = Arrays.asList(HOSTNAMES);
-
- AmbariComponent druidCoordinator = EasyMock.createNiceMock(AmbariComponent.class);
- EasyMock.expect(druidCoordinator.getHostNames()).andReturn(druidCoordinatorHosts).anyTimes();
- EasyMock.expect(druidCoordinator.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
- EasyMock.replay(druidCoordinator);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("DRUID_COORDINATOR")).andReturn(druidCoordinator).anyTimes();
- EasyMock.replay(cluster);
-
- // Run the test
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
- List<String> urls = builder.create("DRUID-COORDINATOR");
- validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
- }
-
-
- @Test
- public void testDruidBrokerURL() throws Exception {
- final String PORT = "8181";
-
- final String[] HOSTNAMES = {"host4", "host3"};
- final List<String> druidHosts = Arrays.asList(HOSTNAMES);
-
- AmbariComponent druidBroker = EasyMock.createNiceMock(AmbariComponent.class);
- EasyMock.expect(druidBroker.getHostNames()).andReturn(druidHosts).anyTimes();
- EasyMock.expect(druidBroker.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
- EasyMock.replay(druidBroker);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("DRUID_BROKER")).andReturn(druidBroker).anyTimes();
- EasyMock.replay(cluster);
-
- // Run the test
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
- List<String> urls = builder.create("DRUID-BROKER");
- validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
- }
-
-
- @Test
- public void testDruidRouterURL() throws Exception {
- final String PORT = "8282";
-
- final String[] HOSTNAMES = {"host5", "host7"};
- final List<String> druidHosts = Arrays.asList(HOSTNAMES);
-
- AmbariComponent druidRouter = EasyMock.createNiceMock(AmbariComponent.class);
- EasyMock.expect(druidRouter.getHostNames()).andReturn(druidHosts).anyTimes();
- EasyMock.expect(druidRouter.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
- EasyMock.replay(druidRouter);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("DRUID_ROUTER")).andReturn(druidRouter).anyTimes();
- EasyMock.replay(cluster);
-
- // Run the test
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
- List<String> urls = builder.create("DRUID-ROUTER");
- validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
- }
-
-
- @Test
- public void testDruidOverlordURL() throws Exception {
- final String PORT = "8383";
-
- final String[] HOSTNAMES = {"host4", "host1"};
- final List<String> druidHosts = Arrays.asList(HOSTNAMES);
-
- AmbariComponent druidOverlord = EasyMock.createNiceMock(AmbariComponent.class);
- EasyMock.expect(druidOverlord.getHostNames()).andReturn(druidHosts).anyTimes();
- EasyMock.expect(druidOverlord.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
- EasyMock.replay(druidOverlord);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("DRUID_OVERLORD")).andReturn(druidOverlord).anyTimes();
- EasyMock.replay(cluster);
-
- // Run the test
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
- List<String> urls = builder.create("DRUID-OVERLORD");
- validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
- }
-
-
- @Test
- public void testDruidSupersetURL() throws Exception {
- final String PORT = "8484";
-
- final String[] HOSTNAMES = {"host4", "host1"};
- final List<String> druidHosts = Arrays.asList(HOSTNAMES);
-
- AmbariComponent druidSuperset = EasyMock.createNiceMock(AmbariComponent.class);
- EasyMock.expect(druidSuperset.getHostNames()).andReturn(druidHosts).anyTimes();
- EasyMock.expect(druidSuperset.getConfigProperty("SUPERSET_WEBSERVER_PORT")).andReturn(PORT).anyTimes();
- EasyMock.replay(druidSuperset);
-
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("DRUID_SUPERSET")).andReturn(druidSuperset).anyTimes();
- EasyMock.replay(cluster);
-
- // Run the test
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
- List<String> urls = builder.create("SUPERSET");
- validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
- }
-
-
- @Test
- public void testMissingServiceComponentURL() throws Exception {
- AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
- EasyMock.expect(cluster.getComponent("DRUID_BROKER")).andReturn(null).anyTimes();
- EasyMock.expect(cluster.getComponent("HIVE_SERVER")).andReturn(null).anyTimes();
- EasyMock.replay(cluster);
-
- // Run the test
- AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
- List<String> urls = builder.create("DRUID-BROKER");
- assertNotNull(urls);
- assertEquals(1, urls.size());
- assertEquals("http://{HOST}:{PORT}", urls.get(0));
-
- urls = builder.create("HIVE");
- assertNotNull(urls);
- assertEquals(1, urls.size());
- assertEquals("http://{HOST}:{PORT}/{PATH}", urls.get(0));
- }
-
-
- /**
- * Convenience method for creating AmbariDynamicServiceURLCreator instances from different mapping configuration
- * input sources.
- *
- * @param cluster The Ambari ServiceDiscovery Cluster model
- * @param mappingConfig The mapping configuration, or null if the internal config should be used.
- *
- * @return An AmbariDynamicServiceURLCreator instance, capable of creating service URLs based on the specified
- * cluster's configuration details.
- */
- private static AmbariDynamicServiceURLCreator newURLCreator(AmbariCluster cluster, Object mappingConfig) throws Exception {
- AmbariDynamicServiceURLCreator result = null;
-
- if (mappingConfig == null) {
- result = new AmbariDynamicServiceURLCreator(cluster);
- } else {
- if (mappingConfig instanceof String) {
- result = new AmbariDynamicServiceURLCreator(cluster, (String) mappingConfig);
- } else if (mappingConfig instanceof File) {
- result = new AmbariDynamicServiceURLCreator(cluster, (File) mappingConfig);
- }
- }
-
- return result;
- }
-
-
- /**
- * Validate the specifed HIVE URLs.
- *
- * @param urlsToValidate The URLs to validate
- * @param hostNames The host names expected in the test URLs
- * @param scheme The expected scheme for the URLs
- * @param port The expected port for the URLs
- * @param path The expected path for the URLs
- */
- private static void validateServiceURLs(List<String> urlsToValidate,
- String[] hostNames,
- String scheme,
- String port,
- String path) throws MalformedURLException {
-
- List<String> hostNamesToTest = new LinkedList<>(Arrays.asList(hostNames));
- for (String url : urlsToValidate) {
- URI test = null;
- try {
- // Make sure it's a valid URL
- test = new URI(url);
- } catch (URISyntaxException e) {
- fail(e.getMessage());
- }
-
- // Validate the scheme
- assertEquals(scheme, test.getScheme());
-
- // Validate the port
- assertEquals(port, String.valueOf(test.getPort()));
-
- // If the expected path is not specified, don't validate it
- if (path != null) {
- assertEquals("/" + path, test.getPath());
- }
-
- // Validate the host name
- assertTrue(hostNamesToTest.contains(test.getHost()));
- hostNamesToTest.remove(test.getHost());
- }
- assertTrue(hostNamesToTest.isEmpty());
- }
-
-
- private static final String TEST_MAPPING_CONFIG =
- "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
- "<service-discovery-url-mappings>\n" +
- " <service name=\"NAMENODE\">\n" +
- " <url-pattern>hdfs://{DFS_NAMENODE_RPC_ADDRESS}</url-pattern>\n" +
- " <properties>\n" +
- " <property name=\"DFS_NAMENODE_RPC_ADDRESS\">\n" +
- " <component>NAMENODE</component>\n" +
- " <config-property>dfs.namenode.rpc-address</config-property>\n" +
- " </property>\n" +
- " </properties>\n" +
- " </service>\n" +
- "\n" +
- " <service name=\"JOBTRACKER\">\n" +
- " <url-pattern>rpc://{YARN_RM_ADDRESS}</url-pattern>\n" +
- " <properties>\n" +
- " <property name=\"YARN_RM_ADDRESS\">\n" +
- " <component>RESOURCEMANAGER</component>\n" +
- " <config-property>yarn.resourcemanager.address</config-property>\n" +
- " </property>\n" +
- " </properties>\n" +
- " </service>\n" +
- "\n" +
- " <service name=\"WEBHDFS\">\n" +
- " <url-pattern>http://{WEBHDFS_ADDRESS}/webhdfs</url-pattern>\n" +
- " <properties>\n" +
- " <property name=\"WEBHDFS_ADDRESS\">\n" +
- " <service-config name=\"HDFS\">hdfs-site</service-config>\n" +
- " <config-property>dfs.namenode.http-address</config-property>\n" +
- " </property>\n" +
- " </properties>\n" +
- " </service>\n" +
- "\n" +
- " <service name=\"WEBHCAT\">\n" +
- " <url-pattern>http://{HOST}:{PORT}/templeton</url-pattern>\n" +
- " <properties>\n" +
- " <property name=\"HOST\">\n" +
- " <component>WEBHCAT_SERVER</component>\n" +
- " <hostname/>\n" +
- " </property>\n" +
- " <property name=\"PORT\">\n" +
- " <component>WEBHCAT_SERVER</component>\n" +
- " <config-property>templeton.port</config-property>\n" +
- " </property>\n" +
- " </properties>\n" +
- " </service>\n" +
- "\n" +
- " <service name=\"OOZIE\">\n" +
- " <url-pattern>{OOZIE_ADDRESS}</url-pattern>\n" +
- " <properties>\n" +
- " <property name=\"OOZIE_ADDRESS\">\n" +
- " <component>OOZIE_SERVER</component>\n" +
- " <config-property>oozie.base.url</config-property>\n" +
- " </property>\n" +
- " </properties>\n" +
- " </service>\n" +
- "\n" +
- " <service name=\"WEBHBASE\">\n" +
- " <url-pattern>http://{HOST}:60080</url-pattern>\n" +
- " <properties>\n" +
- " <property name=\"HOST\">\n" +
- " <component>HBASE_MASTER</component>\n" +
- " <hostname/>\n" +
- " </property>\n" +
- " </properties>\n" +
- " </service>\n" +
- " <service name=\"RESOURCEMANAGER\">\n" +
- " <url-pattern>{SCHEME}://{WEBAPP_ADDRESS}/ws</url-pattern>\n" +
- " <properties>\n" +
- " <property name=\"WEBAPP_HTTP_ADDRESS\">\n" +
- " <component>RESOURCEMANAGER</component>\n" +
- " <config-property>yarn.resourcemanager.webapp.address</config-property>\n" +
- " </property>\n" +
- " <property name=\"WEBAPP_HTTPS_ADDRESS\">\n" +
- " <component>RESOURCEMANAGER</component>\n" +
- " <config-property>yarn.resourcemanager.webapp.https.address</config-property>\n" +
- " </property>\n" +
- " <property name=\"HTTP_POLICY\">\n" +
- " <component>RESOURCEMANAGER</component>\n" +
- " <config-property>yarn.http.policy</config-property>\n" +
- " </property>\n" +
- " <property name=\"SCHEME\">\n" +
- " <config-property>\n" +
- " <if property=\"HTTP_POLICY\" value=\"HTTPS_ONLY\">\n" +
- " <then>https</then>\n" +
- " <else>http</else>\n" +
- " </if>\n" +
- " </config-property>\n" +
- " </property>\n" +
- " <property name=\"WEBAPP_ADDRESS\">\n" +
- " <component>RESOURCEMANAGER</component>\n" +
- " <config-property>\n" +
- " <if property=\"HTTP_POLICY\" value=\"HTTPS_ONLY\">\n" +
- " <then>WEBAPP_HTTPS_ADDRESS</then>\n" +
- " <else>WEBAPP_HTTP_ADDRESS</else>\n" +
- " </if>\n" +
- " </config-property>\n" +
- " </property>\n" +
- " </properties>\n" +
- " </service>\n" +
- " <service name=\"HIVE\">\n" +
- " <url-pattern>{SCHEME}://{HOST}:{PORT}/{PATH}</url-pattern>\n" +
- " <properties>\n" +
- " <property name=\"HOST\">\n" +
- " <component>HIVE_SERVER</component>\n" +
- " <hostname/>\n" +
- " </property>\n" +
- " <property name=\"USE_SSL\">\n" +
- " <component>HIVE_SERVER</component>\n" +
- " <config-property>hive.server2.use.SSL</config-property>\n" +
- " </property>\n" +
- " <property name=\"PATH\">\n" +
- " <component>HIVE_SERVER</component>\n" +
- " <config-property>hive.server2.thrift.http.path</config-property>\n" +
- " </property>\n" +
- " <property name=\"PORT\">\n" +
- " <component>HIVE_SERVER</component>\n" +
- " <config-property>hive.server2.thrift.http.port</config-property>\n" +
- " </property>\n" +
- " <property name=\"SCHEME\">\n" +
- " <config-property>\n" +
- " <if property=\"USE_SSL\" value=\"true\">\n" +
- " <then>https</then>\n" +
- " <else>http</else>\n" +
- " </if>\n" +
- " </config-property>\n" +
- " </property>\n" +
- " </properties>\n" +
- " </service>\n" +
- "</service-discovery-url-mappings>\n";
-
-
- private static final String OVERRIDE_MAPPING_FILE_CONTENTS =
- "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
- "<service-discovery-url-mappings>\n" +
- " <service name=\"WEBHDFS\">\n" +
- " <url-pattern>http://{WEBHDFS_ADDRESS}/webhdfs/OVERRIDE</url-pattern>\n" +
- " <properties>\n" +
- " <property name=\"WEBHDFS_ADDRESS\">\n" +
- " <service-config name=\"HDFS\">hdfs-site</service-config>\n" +
- " <config-property>dfs.namenode.http-address</config-property>\n" +
- " </property>\n" +
- " </properties>\n" +
- " </service>\n" +
- "</service-discovery-url-mappings>\n";
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java b/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
new file mode 100644
index 0000000..f015dd5
--- /dev/null
+++ b/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
@@ -0,0 +1,876 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import org.apache.commons.io.FileUtils;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import static junit.framework.TestCase.assertTrue;
+import static junit.framework.TestCase.fail;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+
+public class AmbariDynamicServiceURLCreatorTest {
+
+ @Test
+ public void testHiveURLFromInternalMapping() throws Exception {
+ testHiveURL(null);
+ }
+
+ @Test
+ public void testHiveURLFromExternalMapping() throws Exception {
+ testHiveURL(TEST_MAPPING_CONFIG);
+ }
+
+ private void testHiveURL(Object mappingConfiguration) throws Exception {
+
+ final String SERVICE_NAME = "HIVE";
+ final String[] HOSTNAMES = {"host3", "host2", "host4"};
+ final String HTTP_PATH = "cliservice";
+ final String HTTP_PORT = "10001";
+ final String BINARY_PORT = "10000";
+
+ String expectedScheme = "http";
+
+ final List<String> hiveServerHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent hiveServer = EasyMock.createNiceMock(AmbariComponent.class);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("HIVE_SERVER")).andReturn(hiveServer).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Configure HTTP Transport
+ EasyMock.expect(hiveServer.getHostNames()).andReturn(hiveServerHosts).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.use.SSL")).andReturn("false").anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.path")).andReturn(HTTP_PATH).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.transport.mode")).andReturn("http").anyTimes();
+ EasyMock.replay(hiveServer);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+ List<String> urls = builder.create(SERVICE_NAME);
+ assertEquals(HOSTNAMES.length, urls.size());
+ validateServiceURLs(urls, HOSTNAMES, expectedScheme, HTTP_PORT, HTTP_PATH);
+
+ // Configure BINARY Transport
+ EasyMock.reset(hiveServer);
+ EasyMock.expect(hiveServer.getHostNames()).andReturn(hiveServerHosts).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.use.SSL")).andReturn("false").anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.path")).andReturn("").anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.port")).andReturn(BINARY_PORT).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.transport.mode")).andReturn("binary").anyTimes();
+ EasyMock.replay(hiveServer);
+
+ // Run the test
+ urls = builder.create(SERVICE_NAME);
+ assertEquals(HOSTNAMES.length, urls.size());
+ validateServiceURLs(urls, HOSTNAMES, expectedScheme, HTTP_PORT, "");
+
+ // Configure HTTPS Transport
+ EasyMock.reset(hiveServer);
+ EasyMock.expect(hiveServer.getHostNames()).andReturn(hiveServerHosts).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.use.SSL")).andReturn("true").anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.path")).andReturn(HTTP_PATH).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.transport.mode")).andReturn("http").anyTimes();
+ EasyMock.replay(hiveServer);
+
+ // Run the test
+ expectedScheme = "https";
+ urls = builder.create(SERVICE_NAME);
+ assertEquals(HOSTNAMES.length, urls.size());
+ validateServiceURLs(urls, HOSTNAMES, expectedScheme, HTTP_PORT, HTTP_PATH);
+ }
+
+ @Test
+ public void testResourceManagerURLFromInternalMapping() throws Exception {
+ testResourceManagerURL(null);
+ }
+
+ @Test
+ public void testResourceManagerURLFromExternalMapping() throws Exception {
+ testResourceManagerURL(TEST_MAPPING_CONFIG);
+ }
+
+ private void testResourceManagerURL(Object mappingConfiguration) throws Exception {
+
+ final String HTTP_ADDRESS = "host2:1111";
+ final String HTTPS_ADDRESS = "host2:22222";
+
+ // HTTP
+ AmbariComponent resman = EasyMock.createNiceMock(AmbariComponent.class);
+ setResourceManagerComponentExpectations(resman, HTTP_ADDRESS, HTTPS_ADDRESS, "HTTP");
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("RESOURCEMANAGER")).andReturn(resman).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+ String url = builder.create("RESOURCEMANAGER").get(0);
+ assertEquals("http://" + HTTP_ADDRESS + "/ws", url);
+
+ // HTTPS
+ EasyMock.reset(resman);
+ setResourceManagerComponentExpectations(resman, HTTP_ADDRESS, HTTPS_ADDRESS, "HTTPS_ONLY");
+
+ // Run the test
+ url = builder.create("RESOURCEMANAGER").get(0);
+ assertEquals("https://" + HTTPS_ADDRESS + "/ws", url);
+ }
+
+ private void setResourceManagerComponentExpectations(final AmbariComponent resmanMock,
+ final String httpAddress,
+ final String httpsAddress,
+ final String httpPolicy) {
+ EasyMock.expect(resmanMock.getConfigProperty("yarn.resourcemanager.webapp.address")).andReturn(httpAddress).anyTimes();
+ EasyMock.expect(resmanMock.getConfigProperty("yarn.resourcemanager.webapp.https.address")).andReturn(httpsAddress).anyTimes();
+ EasyMock.expect(resmanMock.getConfigProperty("yarn.http.policy")).andReturn(httpPolicy).anyTimes();
+ EasyMock.replay(resmanMock);
+ }
+
+ @Test
+ public void testJobTrackerURLFromInternalMapping() throws Exception {
+ testJobTrackerURL(null);
+ }
+
+ @Test
+ public void testJobTrackerURLFromExternalMapping() throws Exception {
+ testJobTrackerURL(TEST_MAPPING_CONFIG);
+ }
+
+ private void testJobTrackerURL(Object mappingConfiguration) throws Exception {
+ final String ADDRESS = "host2:5678";
+
+ AmbariComponent resman = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(resman.getConfigProperty("yarn.resourcemanager.address")).andReturn(ADDRESS).anyTimes();
+ EasyMock.replay(resman);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("RESOURCEMANAGER")).andReturn(resman).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+ String url = builder.create("JOBTRACKER").get(0);
+ assertEquals("rpc://" + ADDRESS, url);
+ }
+
+ @Test
+ public void testNameNodeURLFromInternalMapping() throws Exception {
+ testNameNodeURL(null);
+ }
+
+ @Test
+ public void testNameNodeURLFromExternalMapping() throws Exception {
+ testNameNodeURL(TEST_MAPPING_CONFIG);
+ }
+
+ private void testNameNodeURL(Object mappingConfiguration) throws Exception {
+ final String ADDRESS = "host1:1234";
+
+ AmbariComponent namenode = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(namenode.getConfigProperty("dfs.namenode.rpc-address")).andReturn(ADDRESS).anyTimes();
+ EasyMock.replay(namenode);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("NAMENODE")).andReturn(namenode).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+ String url = builder.create("NAMENODE").get(0);
+ assertEquals("hdfs://" + ADDRESS, url);
+ }
+
+ @Test
+ public void testWebHCatURLFromInternalMapping() throws Exception {
+ testWebHCatURL(null);
+ }
+
+ @Test
+ public void testWebHCatURLFromExternalMapping() throws Exception {
+ testWebHCatURL(TEST_MAPPING_CONFIG);
+ }
+
+ private void testWebHCatURL(Object mappingConfiguration) throws Exception {
+
+ final String HOSTNAME = "host3";
+ final String PORT = "1919";
+
+ AmbariComponent webhcatServer = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(webhcatServer.getConfigProperty("templeton.port")).andReturn(PORT).anyTimes();
+ List<String> webHcatServerHosts = Collections.singletonList(HOSTNAME);
+ EasyMock.expect(webhcatServer.getHostNames()).andReturn(webHcatServerHosts).anyTimes();
+ EasyMock.replay(webhcatServer);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("WEBHCAT_SERVER")).andReturn(webhcatServer).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+ String url = builder.create("WEBHCAT").get(0);
+ assertEquals("http://" + HOSTNAME + ":" + PORT + "/templeton", url);
+ }
+
+ @Test
+ public void testOozieURLFromInternalMapping() throws Exception {
+ testOozieURL(null);
+ }
+
+ @Test
+ public void testOozieURLFromExternalMapping() throws Exception {
+ testOozieURL(TEST_MAPPING_CONFIG);
+ }
+
+ private void testOozieURL(Object mappingConfiguration) throws Exception {
+ final String URL = "http://host3:2222";
+
+ AmbariComponent oozieServer = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(oozieServer.getConfigProperty("oozie.base.url")).andReturn(URL).anyTimes();
+ EasyMock.replay(oozieServer);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("OOZIE_SERVER")).andReturn(oozieServer).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+ String url = builder.create("OOZIE").get(0);
+ assertEquals(URL, url);
+ }
+
+ @Test
+ public void testWebHBaseURLFromInternalMapping() throws Exception {
+ testWebHBaseURL(null);
+ }
+
+ @Test
+ public void testWebHBaseURLFromExternalMapping() throws Exception {
+ testWebHBaseURL(TEST_MAPPING_CONFIG);
+ }
+
+ private void testWebHBaseURL(Object mappingConfiguration) throws Exception {
+ final String[] HOSTNAMES = {"host2", "host4"};
+
+ AmbariComponent hbaseMaster = EasyMock.createNiceMock(AmbariComponent.class);
+ List<String> hbaseMasterHosts = Arrays.asList(HOSTNAMES);
+ EasyMock.expect(hbaseMaster.getHostNames()).andReturn(hbaseMasterHosts).anyTimes();
+ EasyMock.replay(hbaseMaster);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("HBASE_MASTER")).andReturn(hbaseMaster).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+ List<String> urls = builder.create("WEBHBASE");
+ validateServiceURLs(urls, HOSTNAMES, "http", "60080", null);
+ }
+
+ @Test
+ public void testWebHdfsURLFromInternalMapping() throws Exception {
+ testWebHdfsURL(null);
+ }
+
+ @Test
+ public void testWebHdfsURLFromExternalMapping() throws Exception {
+ testWebHdfsURL(TEST_MAPPING_CONFIG);
+ }
+
+ @Test
+ public void testWebHdfsURLFromSystemPropertyOverride() throws Exception {
+ // Write the test mapping configuration to a temp file
+ File mappingFile = File.createTempFile("mapping-config", "xml");
+ FileUtils.write(mappingFile, OVERRIDE_MAPPING_FILE_CONTENTS, "utf-8");
+
+ // Set the system property to point to the temp file
+ System.setProperty(AmbariDynamicServiceURLCreator.MAPPING_CONFIG_OVERRIDE_PROPERTY,
+ mappingFile.getAbsolutePath());
+ try {
+ final String ADDRESS = "host3:1357";
+ // The URL creator should apply the file contents, and create the URL accordingly
+ String url = getTestWebHdfsURL(ADDRESS, null);
+
+ // Verify the URL matches the pattern from the file
+ assertEquals("http://" + ADDRESS + "/webhdfs/OVERRIDE", url);
+ } finally {
+ // Reset the system property, and delete the temp file
+ System.clearProperty(AmbariDynamicServiceURLCreator.MAPPING_CONFIG_OVERRIDE_PROPERTY);
+ mappingFile.delete();
+ }
+ }
+
+ private void testWebHdfsURL(Object mappingConfiguration) throws Exception {
+ final String ADDRESS = "host3:1357";
+ assertEquals("http://" + ADDRESS + "/webhdfs", getTestWebHdfsURL(ADDRESS, mappingConfiguration));
+ }
+
+
+ private String getTestWebHdfsURL(String address, Object mappingConfiguration) throws Exception {
+ AmbariCluster.ServiceConfiguration hdfsSC = EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+ Map<String, String> hdfsProps = new HashMap<>();
+ hdfsProps.put("dfs.namenode.http-address", address);
+ EasyMock.expect(hdfsSC.getProperties()).andReturn(hdfsProps).anyTimes();
+ EasyMock.replay(hdfsSC);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getServiceConfiguration("HDFS", "hdfs-site")).andReturn(hdfsSC).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Create the URL
+ AmbariDynamicServiceURLCreator creator = newURLCreator(cluster, mappingConfiguration);
+ return creator.create("WEBHDFS").get(0);
+ }
+
+
+ @Test
+ public void testAtlasApiURL() throws Exception {
+ final String ATLAS_REST_ADDRESS = "http://host2:21000";
+
+ AmbariComponent atlasServer = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(atlasServer.getConfigProperty("atlas.rest.address")).andReturn(ATLAS_REST_ADDRESS).anyTimes();
+ EasyMock.replay(atlasServer);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("ATLAS_SERVER")).andReturn(atlasServer).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("ATLAS-API");
+ assertEquals(1, urls.size());
+ assertEquals(ATLAS_REST_ADDRESS, urls.get(0));
+ }
+
+
+ @Test
+ public void testAtlasURL() throws Exception {
+ final String HTTP_PORT = "8787";
+ final String HTTPS_PORT = "8989";
+
+ final String[] HOSTNAMES = {"host1", "host4"};
+ final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent atlasServer = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(atlasServer.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(atlasServer.getConfigProperty("atlas.enableTLS")).andReturn("false").anyTimes();
+ EasyMock.expect(atlasServer.getConfigProperty("atlas.server.http.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(atlasServer.getConfigProperty("atlas.server.https.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(atlasServer);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("ATLAS_SERVER")).andReturn(atlasServer).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("ATLAS");
+ validateServiceURLs(urls, HOSTNAMES, "http", HTTP_PORT, null);
+
+ EasyMock.reset(atlasServer);
+ EasyMock.expect(atlasServer.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(atlasServer.getConfigProperty("atlas.enableTLS")).andReturn("true").anyTimes();
+ EasyMock.expect(atlasServer.getConfigProperty("atlas.server.http.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(atlasServer.getConfigProperty("atlas.server.https.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(atlasServer);
+
+ // Run the test
+ urls = builder.create("ATLAS");
+ validateServiceURLs(urls, HOSTNAMES, "https", HTTPS_PORT, null);
+ }
+
+
+ @Test
+ public void testZeppelinURL() throws Exception {
+ final String HTTP_PORT = "8787";
+ final String HTTPS_PORT = "8989";
+
+ final String[] HOSTNAMES = {"host1", "host4"};
+ final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent zeppelinMaster = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("false").anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(zeppelinMaster);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("ZEPPELIN_MASTER")).andReturn(zeppelinMaster).anyTimes();
+ EasyMock.replay(cluster);
+
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+
+ // Run the test
+ validateServiceURLs(builder.create("ZEPPELIN"), HOSTNAMES, "http", HTTP_PORT, null);
+
+ EasyMock.reset(zeppelinMaster);
+ EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("true").anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(zeppelinMaster);
+
+ // Run the test
+ validateServiceURLs(builder.create("ZEPPELIN"), HOSTNAMES, "https", HTTPS_PORT, null);
+ }
+
+
+ @Test
+ public void testZeppelinUiURL() throws Exception {
+ final String HTTP_PORT = "8787";
+ final String HTTPS_PORT = "8989";
+
+ final String[] HOSTNAMES = {"host1", "host4"};
+ final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent zeppelinMaster = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("false").anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(zeppelinMaster);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("ZEPPELIN_MASTER")).andReturn(zeppelinMaster).anyTimes();
+ EasyMock.replay(cluster);
+
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+
+ // Run the test
+ validateServiceURLs(builder.create("ZEPPELINUI"), HOSTNAMES, "http", HTTP_PORT, null);
+
+ EasyMock.reset(zeppelinMaster);
+ EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("true").anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(zeppelinMaster);
+
+ // Run the test
+ validateServiceURLs(builder.create("ZEPPELINUI"), HOSTNAMES, "https", HTTPS_PORT, null);
+ }
+
+
+ @Test
+ public void testZeppelinWsURL() throws Exception {
+ final String HTTP_PORT = "8787";
+ final String HTTPS_PORT = "8989";
+
+ final String[] HOSTNAMES = {"host1", "host4"};
+ final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent zeppelinMaster = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("false").anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(zeppelinMaster);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("ZEPPELIN_MASTER")).andReturn(zeppelinMaster).anyTimes();
+ EasyMock.replay(cluster);
+
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+
+ // Run the test
+ validateServiceURLs(builder.create("ZEPPELINWS"), HOSTNAMES, "ws", HTTP_PORT, null);
+
+ EasyMock.reset(zeppelinMaster);
+ EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("true").anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(zeppelinMaster);
+
+ // Run the test
+ validateServiceURLs(builder.create("ZEPPELINWS"), HOSTNAMES, "wss", HTTPS_PORT, null);
+ }
+
+
+ @Test
+ public void testDruidCoordinatorURL() throws Exception {
+ final String PORT = "8787";
+
+ final String[] HOSTNAMES = {"host3", "host2"};
+ final List<String> druidCoordinatorHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent druidCoordinator = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(druidCoordinator.getHostNames()).andReturn(druidCoordinatorHosts).anyTimes();
+ EasyMock.expect(druidCoordinator.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+ EasyMock.replay(druidCoordinator);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("DRUID_COORDINATOR")).andReturn(druidCoordinator).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("DRUID-COORDINATOR");
+ validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+ }
+
+
+ @Test
+ public void testDruidBrokerURL() throws Exception {
+ final String PORT = "8181";
+
+ final String[] HOSTNAMES = {"host4", "host3"};
+ final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent druidBroker = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(druidBroker.getHostNames()).andReturn(druidHosts).anyTimes();
+ EasyMock.expect(druidBroker.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+ EasyMock.replay(druidBroker);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("DRUID_BROKER")).andReturn(druidBroker).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("DRUID-BROKER");
+ validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+ }
+
+
+ @Test
+ public void testDruidRouterURL() throws Exception {
+ final String PORT = "8282";
+
+ final String[] HOSTNAMES = {"host5", "host7"};
+ final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent druidRouter = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(druidRouter.getHostNames()).andReturn(druidHosts).anyTimes();
+ EasyMock.expect(druidRouter.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+ EasyMock.replay(druidRouter);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("DRUID_ROUTER")).andReturn(druidRouter).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("DRUID-ROUTER");
+ validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+ }
+
+
+ @Test
+ public void testDruidOverlordURL() throws Exception {
+ final String PORT = "8383";
+
+ final String[] HOSTNAMES = {"host4", "host1"};
+ final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent druidOverlord = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(druidOverlord.getHostNames()).andReturn(druidHosts).anyTimes();
+ EasyMock.expect(druidOverlord.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+ EasyMock.replay(druidOverlord);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("DRUID_OVERLORD")).andReturn(druidOverlord).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("DRUID-OVERLORD");
+ validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+ }
+
+
+ @Test
+ public void testDruidSupersetURL() throws Exception {
+ final String PORT = "8484";
+
+ final String[] HOSTNAMES = {"host4", "host1"};
+ final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent druidSuperset = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(druidSuperset.getHostNames()).andReturn(druidHosts).anyTimes();
+ EasyMock.expect(druidSuperset.getConfigProperty("SUPERSET_WEBSERVER_PORT")).andReturn(PORT).anyTimes();
+ EasyMock.replay(druidSuperset);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("DRUID_SUPERSET")).andReturn(druidSuperset).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("SUPERSET");
+ validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+ }
+
+
+ @Test
+ public void testMissingServiceComponentURL() throws Exception {
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("DRUID_BROKER")).andReturn(null).anyTimes();
+ EasyMock.expect(cluster.getComponent("HIVE_SERVER")).andReturn(null).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("DRUID-BROKER");
+ assertNotNull(urls);
+ assertEquals(1, urls.size());
+ assertEquals("http://{HOST}:{PORT}", urls.get(0));
+
+ urls = builder.create("HIVE");
+ assertNotNull(urls);
+ assertEquals(1, urls.size());
+ assertEquals("http://{HOST}:{PORT}/{PATH}", urls.get(0));
+ }
+
+
+ /**
+ * Convenience method for creating AmbariDynamicServiceURLCreator instances from different mapping configuration
+ * input sources.
+ *
+ * @param cluster The Ambari ServiceDiscovery Cluster model
+ * @param mappingConfig The mapping configuration, or null if the internal config should be used.
+ *
+ * @return An AmbariDynamicServiceURLCreator instance, capable of creating service URLs based on the specified
+ * cluster's configuration details.
+ */
+ private static AmbariDynamicServiceURLCreator newURLCreator(AmbariCluster cluster, Object mappingConfig) throws Exception {
+ AmbariDynamicServiceURLCreator result = null;
+
+ if (mappingConfig == null) {
+ result = new AmbariDynamicServiceURLCreator(cluster);
+ } else {
+ if (mappingConfig instanceof String) {
+ result = new AmbariDynamicServiceURLCreator(cluster, (String) mappingConfig);
+ } else if (mappingConfig instanceof File) {
+ result = new AmbariDynamicServiceURLCreator(cluster, (File) mappingConfig);
+ }
+ }
+
+ return result;
+ }
+
+
+ /**
+ * Validate the specifed HIVE URLs.
+ *
+ * @param urlsToValidate The URLs to validate
+ * @param hostNames The host names expected in the test URLs
+ * @param scheme The expected scheme for the URLs
+ * @param port The expected port for the URLs
+ * @param path The expected path for the URLs
+ */
+ private static void validateServiceURLs(List<String> urlsToValidate,
+ String[] hostNames,
+ String scheme,
+ String port,
+ String path) throws MalformedURLException {
+
+ List<String> hostNamesToTest = new LinkedList<>(Arrays.asList(hostNames));
+ for (String url : urlsToValidate) {
+ URI test = null;
+ try {
+ // Make sure it's a valid URL
+ test = new URI(url);
+ } catch (URISyntaxException e) {
+ fail(e.getMessage());
+ }
+
+ // Validate the scheme
+ assertEquals(scheme, test.getScheme());
+
+ // Validate the port
+ assertEquals(port, String.valueOf(test.getPort()));
+
+ // If the expected path is not specified, don't validate it
+ if (path != null) {
+ assertEquals("/" + path, test.getPath());
+ }
+
+ // Validate the host name
+ assertTrue(hostNamesToTest.contains(test.getHost()));
+ hostNamesToTest.remove(test.getHost());
+ }
+ assertTrue(hostNamesToTest.isEmpty());
+ }
+
+
+ private static final String TEST_MAPPING_CONFIG =
+ "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
+ "<service-discovery-url-mappings>\n" +
+ " <service name=\"NAMENODE\">\n" +
+ " <url-pattern>hdfs://{DFS_NAMENODE_RPC_ADDRESS}</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"DFS_NAMENODE_RPC_ADDRESS\">\n" +
+ " <component>NAMENODE</component>\n" +
+ " <config-property>dfs.namenode.rpc-address</config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ "\n" +
+ " <service name=\"JOBTRACKER\">\n" +
+ " <url-pattern>rpc://{YARN_RM_ADDRESS}</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"YARN_RM_ADDRESS\">\n" +
+ " <component>RESOURCEMANAGER</component>\n" +
+ " <config-property>yarn.resourcemanager.address</config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ "\n" +
+ " <service name=\"WEBHDFS\">\n" +
+ " <url-pattern>http://{WEBHDFS_ADDRESS}/webhdfs</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"WEBHDFS_ADDRESS\">\n" +
+ " <service-config name=\"HDFS\">hdfs-site</service-config>\n" +
+ " <config-property>dfs.namenode.http-address</config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ "\n" +
+ " <service name=\"WEBHCAT\">\n" +
+ " <url-pattern>http://{HOST}:{PORT}/templeton</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"HOST\">\n" +
+ " <component>WEBHCAT_SERVER</component>\n" +
+ " <hostname/>\n" +
+ " </property>\n" +
+ " <property name=\"PORT\">\n" +
+ " <component>WEBHCAT_SERVER</component>\n" +
+ " <config-property>templeton.port</config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ "\n" +
+ " <service name=\"OOZIE\">\n" +
+ " <url-pattern>{OOZIE_ADDRESS}</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"OOZIE_ADDRESS\">\n" +
+ " <component>OOZIE_SERVER</component>\n" +
+ " <config-property>oozie.base.url</config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ "\n" +
+ " <service name=\"WEBHBASE\">\n" +
+ " <url-pattern>http://{HOST}:60080</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"HOST\">\n" +
+ " <component>HBASE_MASTER</component>\n" +
+ " <hostname/>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ " <service name=\"RESOURCEMANAGER\">\n" +
+ " <url-pattern>{SCHEME}://{WEBAPP_ADDRESS}/ws</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"WEBAPP_HTTP_ADDRESS\">\n" +
+ " <component>RESOURCEMANAGER</component>\n" +
+ " <config-property>yarn.resourcemanager.webapp.address</config-property>\n" +
+ " </property>\n" +
+ " <property name=\"WEBAPP_HTTPS_ADDRESS\">\n" +
+ " <component>RESOURCEMANAGER</component>\n" +
+ " <config-property>yarn.resourcemanager.webapp.https.address</config-property>\n" +
+ " </property>\n" +
+ " <property name=\"HTTP_POLICY\">\n" +
+ " <component>RESOURCEMANAGER</component>\n" +
+ " <config-property>yarn.http.policy</config-property>\n" +
+ " </property>\n" +
+ " <property name=\"SCHEME\">\n" +
+ " <config-property>\n" +
+ " <if property=\"HTTP_POLICY\" value=\"HTTPS_ONLY\">\n" +
+ " <then>https</then>\n" +
+ " <else>http</else>\n" +
+ " </if>\n" +
+ " </config-property>\n" +
+ " </property>\n" +
+ " <property name=\"WEBAPP_ADDRESS\">\n" +
+ " <component>RESOURCEMANAGER</component>\n" +
+ " <config-property>\n" +
+ " <if property=\"HTTP_POLICY\" value=\"HTTPS_ONLY\">\n" +
+ " <then>WEBAPP_HTTPS_ADDRESS</then>\n" +
+ " <else>WEBAPP_HTTP_ADDRESS</else>\n" +
+ " </if>\n" +
+ " </config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ " <service name=\"HIVE\">\n" +
+ " <url-pattern>{SCHEME}://{HOST}:{PORT}/{PATH}</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"HOST\">\n" +
+ " <component>HIVE_SERVER</component>\n" +
+ " <hostname/>\n" +
+ " </property>\n" +
+ " <property name=\"USE_SSL\">\n" +
+ " <component>HIVE_SERVER</component>\n" +
+ " <config-property>hive.server2.use.SSL</config-property>\n" +
+ " </property>\n" +
+ " <property name=\"PATH\">\n" +
+ " <component>HIVE_SERVER</component>\n" +
+ " <config-property>hive.server2.thrift.http.path</config-property>\n" +
+ " </property>\n" +
+ " <property name=\"PORT\">\n" +
+ " <component>HIVE_SERVER</component>\n" +
+ " <config-property>hive.server2.thrift.http.port</config-property>\n" +
+ " </property>\n" +
+ " <property name=\"SCHEME\">\n" +
+ " <config-property>\n" +
+ " <if property=\"USE_SSL\" value=\"true\">\n" +
+ " <then>https</then>\n" +
+ " <else>http</else>\n" +
+ " </if>\n" +
+ " </config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ "</service-discovery-url-mappings>\n";
+
+
+ private static final String OVERRIDE_MAPPING_FILE_CONTENTS =
+ "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
+ "<service-discovery-url-mappings>\n" +
+ " <service name=\"WEBHDFS\">\n" +
+ " <url-pattern>http://{WEBHDFS_ADDRESS}/webhdfs/OVERRIDE</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"WEBHDFS_ADDRESS\">\n" +
+ " <service-config name=\"HDFS\">hdfs-site</service-config>\n" +
+ " <config-property>dfs.namenode.http-address</config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ "</service-discovery-url-mappings>\n";
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyInboundClient.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyInboundClient.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyInboundClient.java
deleted file mode 100644
index 4e938d2..0000000
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyInboundClient.java
+++ /dev/null
@@ -1,107 +0,0 @@
-package org.apache.hadoop.gateway.websockets;
-
-import javax.websocket.CloseReason;
-import javax.websocket.Endpoint;
-import javax.websocket.EndpointConfig;
-import javax.websocket.MessageHandler;
-import javax.websocket.Session;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-/**
- * A Websocket client with callback which is not annotation based.
- * This handler accepts String and binary messages.
- * @since 0.14.0
- */
-public class ProxyInboundClient extends Endpoint {
-
- /**
- * Callback to be called once we have events on our socket.
- */
- private MessageEventCallback callback;
-
- protected Session session;
- protected EndpointConfig config;
-
-
- public ProxyInboundClient(final MessageEventCallback callback) {
- super();
- this.callback = callback;
- }
-
- /**
- * Developers must implement this method to be notified when a new
- * conversation has just begun.
- *
- * @param backendSession the session that has just been activated.
- * @param config the configuration used to configure this endpoint.
- */
- @Override
- public void onOpen(final javax.websocket.Session backendSession, final EndpointConfig config) {
- this.session = backendSession;
- this.config = config;
-
- /* Set the max message size */
- session.setMaxBinaryMessageBufferSize(Integer.MAX_VALUE);
- session.setMaxTextMessageBufferSize(Integer.MAX_VALUE);
-
- /* Add message handler for binary data */
- session.addMessageHandler(new MessageHandler.Whole<byte[]>() {
-
- /**
- * Called when the message has been fully received.
- *
- * @param message the message data.
- */
- @Override
- public void onMessage(final byte[] message) {
- callback.onMessageBinary(message, true, session);
- }
-
- });
-
- /* Add message handler for text data */
- session.addMessageHandler(new MessageHandler.Whole<String>() {
-
- /**
- * Called when the message has been fully received.
- *
- * @param message the message data.
- */
- @Override
- public void onMessage(final String message) {
- callback.onMessageText(message, session);
- }
-
- });
-
- callback.onConnectionOpen(backendSession);
- }
-
- @Override
- public void onClose(final javax.websocket.Session backendSession, final CloseReason closeReason) {
- callback.onConnectionClose(closeReason);
- this.session = null;
- }
-
- @Override
- public void onError(final javax.websocket.Session backendSession, final Throwable cause) {
- callback.onError(cause);
- this.session = null;
- }
-
-}
[09/23] knox git commit: KNOX-1070 - Drop support for Java 7 (Rick
Kellogg via Sandeep More)
Posted by mo...@apache.org.
KNOX-1070 - Drop support for Java 7 (Rick Kellogg via Sandeep More)
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/d762ed3f
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/d762ed3f
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/d762ed3f
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: d762ed3fd9193ad6e837fec1747d18f334872951
Parents: 485769b
Author: Sandeep More <mo...@apache.org>
Authored: Thu Oct 5 09:33:33 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Thu Oct 5 09:33:33 2017 -0400
----------------------------------------------------------------------
pom.xml | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/d762ed3f/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index fb00d71..e314415 100644
--- a/pom.xml
+++ b/pom.xml
@@ -291,8 +291,8 @@
<version>3.3</version>
<configuration>
<compilerId>javac</compilerId>
- <source>1.7</source>
- <target>1.7</target>
+ <source>1.8</source>
+ <target>1.8</target>
<debug>true</debug>
</configuration>
<dependencies>
@@ -351,9 +351,9 @@
<requireMavenVersion>
<version>[3.0.2,)</version>
</requireMavenVersion>
- <!--<requireJavaVersion>
+ <requireJavaVersion>
<version>[1.8,)</version>
- </requireJavaVersion> -->
+ </requireJavaVersion>
</rules>
</configuration>
</execution>
[16/23] knox git commit: Merge branch 'master' into
KNOX-998-Package_Restructuring
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-service-knoxtoken/src/test/java/org/apache/knox/gateway/service/knoxtoken/TokenServiceResourceTest.java
----------------------------------------------------------------------
diff --cc gateway-service-knoxtoken/src/test/java/org/apache/knox/gateway/service/knoxtoken/TokenServiceResourceTest.java
index 224eb1c,0000000..b73b1b7
mode 100644,000000..100644
--- a/gateway-service-knoxtoken/src/test/java/org/apache/knox/gateway/service/knoxtoken/TokenServiceResourceTest.java
+++ b/gateway-service-knoxtoken/src/test/java/org/apache/knox/gateway/service/knoxtoken/TokenServiceResourceTest.java
@@@ -1,307 -1,0 +1,510 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.service.knoxtoken;
+
+import org.apache.knox.gateway.service.knoxtoken.TokenResource;
+import org.apache.knox.gateway.services.GatewayServices;
+import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
+import org.apache.knox.gateway.services.security.token.TokenServiceException;
+import org.apache.knox.gateway.services.security.token.impl.JWT;
+import org.apache.knox.gateway.services.security.token.impl.JWTToken;
++import org.apache.knox.gateway.security.PrimaryPrincipal;
++
+import org.easymock.EasyMock;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.nimbusds.jose.JWSSigner;
+import com.nimbusds.jose.JWSVerifier;
+import com.nimbusds.jose.crypto.RSASSASigner;
+import com.nimbusds.jose.crypto.RSASSAVerifier;
+
+import java.util.Map;
+
+import javax.security.auth.Subject;
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.Response;
+
+import static org.junit.Assert.*;
+
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.security.KeyPair;
+import java.security.KeyPairGenerator;
+import java.security.NoSuchAlgorithmException;
+import java.security.Principal;
++import java.security.cert.X509Certificate;
+import java.security.interfaces.RSAPrivateKey;
+import java.security.interfaces.RSAPublicKey;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+
+/**
+ * Some tests for the token service
+ */
+public class TokenServiceResourceTest {
+
+ protected static RSAPublicKey publicKey;
+ protected static RSAPrivateKey privateKey;
+
+ @BeforeClass
+ public static void setup() throws Exception, NoSuchAlgorithmException {
+ KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
+ kpg.initialize(1024);
+ KeyPair KPair = kpg.generateKeyPair();
+
+ publicKey = (RSAPublicKey) KPair.getPublic();
+ privateKey = (RSAPrivateKey) KPair.getPrivate();
+ }
+
+ @Test
+ public void testTokenService() throws Exception {
+ Assert.assertTrue(true);
+ }
+
+ @Test
+ public void testClientData() throws Exception {
+ TokenResource tr = new TokenResource();
+
+ Map<String,Object> clientDataMap = new HashMap<>();
+ tr.addClientDataToMap("cookie.name=hadoop-jwt,test=value".split(","), clientDataMap);
+ Assert.assertTrue(clientDataMap.size() == 2);
+
+ clientDataMap = new HashMap<>();
+ tr.addClientDataToMap("cookie.name=hadoop-jwt".split(","), clientDataMap);
+ Assert.assertTrue(clientDataMap.size() == 1);
+
+ clientDataMap = new HashMap<>();
+ tr.addClientDataToMap("".split(","), clientDataMap);
+ Assert.assertTrue(clientDataMap.size() == 0);
+ }
+
+ @Test
+ public void testGetToken() throws Exception {
+ TokenResource tr = new TokenResource();
+
+ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+ //tr.context = context;
+ // tr.init();
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+ Principal principal = EasyMock.createNiceMock(Principal.class);
+ EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+ EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+ GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+ EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+ JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+ EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+ StringWriter writer = new StringWriter();
+ PrintWriter printWriter = new PrintWriter(writer);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.getWriter()).andReturn(printWriter);
+
+ EasyMock.replay(principal, services, context, request, response);
+
+ tr.request = request;
+ tr.response = response;
+
+ // Issue a token
+ Response retResponse = tr.doGet();
+
+ assertEquals(200, retResponse.getStatus());
+
+ // Parse the response
+ String retString = writer.toString();
+ String accessToken = getTagValue(retString, "access_token");
+ assertNotNull(accessToken);
+ String expiry = getTagValue(retString, "expires_in");
+ assertNotNull(expiry);
+
+ // Verify the token
+ JWTToken parsedToken = new JWTToken(accessToken);
+ assertEquals("alice", parsedToken.getSubject());
+ assertTrue(authority.verifyToken(parsedToken));
+ }
+
+ @Test
+ public void testAudiences() throws Exception {
+
+ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+ EasyMock.expect(context.getInitParameter("knox.token.audiences")).andReturn("recipient1,recipient2");
+ EasyMock.expect(context.getInitParameter("knox.token.ttl")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knox.token.target.url")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knox.token.client.data")).andReturn(null);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+ Principal principal = EasyMock.createNiceMock(Principal.class);
+ EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+ EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+ GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+ EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+ JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+ EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+ StringWriter writer = new StringWriter();
+ PrintWriter printWriter = new PrintWriter(writer);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.getWriter()).andReturn(printWriter);
+
+ EasyMock.replay(principal, services, context, request, response);
+
+ TokenResource tr = new TokenResource();
+ tr.request = request;
+ tr.response = response;
+ tr.context = context;
+ tr.init();
+
+ // Issue a token
+ Response retResponse = tr.doGet();
+
+ assertEquals(200, retResponse.getStatus());
+
+ // Parse the response
+ String retString = writer.toString();
+ String accessToken = getTagValue(retString, "access_token");
+ assertNotNull(accessToken);
+ String expiry = getTagValue(retString, "expires_in");
+ assertNotNull(expiry);
+
+ // Verify the token
+ JWTToken parsedToken = new JWTToken(accessToken);
+ assertEquals("alice", parsedToken.getSubject());
+ assertTrue(authority.verifyToken(parsedToken));
+
+ // Verify the audiences
+ List<String> audiences = Arrays.asList(parsedToken.getAudienceClaims());
+ assertEquals(2, audiences.size());
+ assertTrue(audiences.contains("recipient1"));
+ assertTrue(audiences.contains("recipient2"));
+ }
+
++ @Test
++ public void testAudiencesWhitespace() throws Exception {
++
++ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
++ EasyMock.expect(context.getInitParameter("knox.token.audiences")).andReturn(" recipient1, recipient2 ");
++ EasyMock.expect(context.getInitParameter("knox.token.ttl")).andReturn(null);
++ EasyMock.expect(context.getInitParameter("knox.token.target.url")).andReturn(null);
++ EasyMock.expect(context.getInitParameter("knox.token.client.data")).andReturn(null);
++
++ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
++ EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
++ Principal principal = EasyMock.createNiceMock(Principal.class);
++ EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
++ EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
++
++ GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
++ EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
++
++ JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
++ EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
++
++ StringWriter writer = new StringWriter();
++ PrintWriter printWriter = new PrintWriter(writer);
++ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
++ EasyMock.expect(response.getWriter()).andReturn(printWriter);
++
++ EasyMock.replay(principal, services, context, request, response);
++
++ TokenResource tr = new TokenResource();
++ tr.request = request;
++ tr.response = response;
++ tr.context = context;
++ tr.init();
++
++ // Issue a token
++ Response retResponse = tr.doGet();
++
++ assertEquals(200, retResponse.getStatus());
++
++ // Parse the response
++ String retString = writer.toString();
++ String accessToken = getTagValue(retString, "access_token");
++ assertNotNull(accessToken);
++ String expiry = getTagValue(retString, "expires_in");
++ assertNotNull(expiry);
++
++ // Verify the token
++ JWTToken parsedToken = new JWTToken(accessToken);
++ assertEquals("alice", parsedToken.getSubject());
++ assertTrue(authority.verifyToken(parsedToken));
++
++ // Verify the audiences
++ List<String> audiences = Arrays.asList(parsedToken.getAudienceClaims());
++ assertEquals(2, audiences.size());
++ assertTrue(audiences.contains("recipient1"));
++ assertTrue(audiences.contains("recipient2"));
++ }
++
++ @Test
++ public void testValidClientCert() throws Exception {
++
++ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
++ EasyMock.expect(context.getInitParameter("knox.token.client.cert.required")).andReturn("true");
++ EasyMock.expect(context.getInitParameter("knox.token.allowed.principals")).andReturn("CN=localhost, OU=Test, O=Hadoop, L=Test, ST=Test, C=US");
++
++ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
++ EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
++ X509Certificate trustedCertMock = EasyMock.createMock(X509Certificate.class);
++ EasyMock.expect(trustedCertMock.getSubjectDN()).andReturn(new PrimaryPrincipal("CN=localhost, OU=Test, O=Hadoop, L=Test, ST=Test, C=US")).anyTimes();
++ ArrayList<X509Certificate> certArrayList = new ArrayList<X509Certificate>();
++ certArrayList.add(trustedCertMock);
++ X509Certificate[] certs = {};
++ EasyMock.expect(request.getAttribute("javax.servlet.request.X509Certificate")).andReturn(certArrayList.toArray(certs)).anyTimes();
++
++ Principal principal = EasyMock.createNiceMock(Principal.class);
++ EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
++ EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
++
++ GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
++ EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
++
++ JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
++ EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
++
++ StringWriter writer = new StringWriter();
++ PrintWriter printWriter = new PrintWriter(writer);
++ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
++ EasyMock.expect(response.getWriter()).andReturn(printWriter);
++
++ EasyMock.replay(principal, services, context, request, response, trustedCertMock);
++
++ TokenResource tr = new TokenResource();
++ tr.request = request;
++ tr.response = response;
++ tr.context = context;
++ tr.init();
++
++ // Issue a token
++ Response retResponse = tr.doGet();
++
++ assertEquals(200, retResponse.getStatus());
++
++ // Parse the response
++ String retString = writer.toString();
++ String accessToken = getTagValue(retString, "access_token");
++ assertNotNull(accessToken);
++ String expiry = getTagValue(retString, "expires_in");
++ assertNotNull(expiry);
++
++ // Verify the token
++ JWTToken parsedToken = new JWTToken(accessToken);
++ assertEquals("alice", parsedToken.getSubject());
++ assertTrue(authority.verifyToken(parsedToken));
++ }
++
++ @Test
++ public void testValidClientCertWrongUser() throws Exception {
++
++ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
++ EasyMock.expect(context.getInitParameter("knox.token.client.cert.required")).andReturn("true");
++ EasyMock.expect(context.getInitParameter("knox.token.allowed.principals")).andReturn("CN=remotehost, OU=Test, O=Hadoop, L=Test, ST=Test, C=US");
++
++ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
++ EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
++ X509Certificate trustedCertMock = EasyMock.createMock(X509Certificate.class);
++ EasyMock.expect(trustedCertMock.getSubjectDN()).andReturn(new PrimaryPrincipal("CN=localhost, OU=Test, O=Hadoop, L=Test, ST=Test, C=US")).anyTimes();
++ ArrayList<X509Certificate> certArrayList = new ArrayList<X509Certificate>();
++ certArrayList.add(trustedCertMock);
++ X509Certificate[] certs = {};
++ EasyMock.expect(request.getAttribute("javax.servlet.request.X509Certificate")).andReturn(certArrayList.toArray(certs)).anyTimes();
++
++ Principal principal = EasyMock.createNiceMock(Principal.class);
++ EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
++ EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
++
++ GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
++ EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
++
++ JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
++ EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
++
++ StringWriter writer = new StringWriter();
++ PrintWriter printWriter = new PrintWriter(writer);
++ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
++ EasyMock.expect(response.getWriter()).andReturn(printWriter);
++
++ EasyMock.replay(principal, services, context, request, response, trustedCertMock);
++
++ TokenResource tr = new TokenResource();
++ tr.request = request;
++ tr.response = response;
++ tr.context = context;
++ tr.init();
++
++ // Issue a token
++ Response retResponse = tr.doGet();
++
++ assertEquals(403, retResponse.getStatus());
++ }
++
++ @Test
++ public void testMissingClientCert() throws Exception {
++
++ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
++ EasyMock.expect(context.getInitParameter("knox.token.client.cert.required")).andReturn("true");
++ EasyMock.expect(context.getInitParameter("knox.token.allowed.principals")).andReturn("CN=remotehost, OU=Test, O=Hadoop, L=Test, ST=Test, C=US");
++
++ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
++ EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
++ EasyMock.expect(request.getAttribute("javax.servlet.request.X509Certificate")).andReturn(null).anyTimes();
++
++ Principal principal = EasyMock.createNiceMock(Principal.class);
++ EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
++ EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
++
++ GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
++ EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
++
++ JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
++ EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
++
++ StringWriter writer = new StringWriter();
++ PrintWriter printWriter = new PrintWriter(writer);
++ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
++ EasyMock.expect(response.getWriter()).andReturn(printWriter);
++
++ EasyMock.replay(principal, services, context, request, response);
++
++ TokenResource tr = new TokenResource();
++ tr.request = request;
++ tr.response = response;
++ tr.context = context;
++ tr.init();
++
++ // Issue a token
++ Response retResponse = tr.doGet();
++
++ assertEquals(403, retResponse.getStatus());
++ }
++
+ private String getTagValue(String token, String tagName) {
+ String searchString = tagName + "\":";
+ String value = token.substring(token.indexOf(searchString) + searchString.length());
+ if (value.startsWith("\"")) {
+ value = value.substring(1);
+ }
+ if (value.contains("\"")) {
+ return value.substring(0, value.indexOf("\""));
+ } else if (value.contains(",")) {
+ return value.substring(0, value.indexOf(","));
+ } else {
+ return value.substring(0, value.length() - 1);
+ }
+ }
+
+ private static class TestJWTokenAuthority implements JWTokenAuthority {
+
+ private RSAPublicKey publicKey;
+ private RSAPrivateKey privateKey;
+
+ public TestJWTokenAuthority(RSAPublicKey publicKey, RSAPrivateKey privateKey) {
+ this.publicKey = publicKey;
+ this.privateKey = privateKey;
+ }
+
+ @Override
+ public JWT issueToken(Subject subject, String algorithm)
+ throws TokenServiceException {
+ Principal p = (Principal) subject.getPrincipals().toArray()[0];
+ return issueToken(p, algorithm);
+ }
+
+ @Override
+ public JWT issueToken(Principal p, String algorithm)
+ throws TokenServiceException {
+ return issueToken(p, null, algorithm);
+ }
+
+ @Override
+ public JWT issueToken(Principal p, String audience, String algorithm)
+ throws TokenServiceException {
+ return issueToken(p, audience, algorithm, -1);
+ }
+
+ @Override
+ public boolean verifyToken(JWT token) throws TokenServiceException {
+ JWSVerifier verifier = new RSASSAVerifier(publicKey);
+ return token.verify(verifier);
+ }
+
+ @Override
+ public JWT issueToken(Principal p, String audience, String algorithm,
+ long expires) throws TokenServiceException {
+ ArrayList<String> audiences = null;
+ if (audience != null) {
+ audiences = new ArrayList<String>();
+ audiences.add(audience);
+ }
+ return issueToken(p, audiences, algorithm, expires);
+ }
+
+ @Override
+ public JWT issueToken(Principal p, List<String> audiences, String algorithm,
+ long expires) throws TokenServiceException {
+ String[] claimArray = new String[4];
+ claimArray[0] = "KNOXSSO";
+ claimArray[1] = p.getName();
+ claimArray[2] = null;
+ if (expires == -1) {
+ claimArray[3] = null;
+ } else {
+ claimArray[3] = String.valueOf(expires);
+ }
+
+ JWTToken token = null;
+ if ("RS256".equals(algorithm)) {
+ token = new JWTToken("RS256", claimArray, audiences);
+ JWSSigner signer = new RSASSASigner(privateKey);
+ token.sign(signer);
+ } else {
+ throw new TokenServiceException("Cannot issue token - Unsupported algorithm");
+ }
+
+ return token;
+ }
+
+ @Override
+ public JWT issueToken(Principal p, String algorithm, long expiry)
+ throws TokenServiceException {
+ return issueToken(p, Collections.<String>emptyList(), algorithm, expiry);
+ }
+
+ @Override
+ public boolean verifyToken(JWT token, RSAPublicKey publicKey) throws TokenServiceException {
+ JWSVerifier verifier = new RSASSAVerifier(publicKey);
+ return token.verify(verifier);
+ }
+
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-shell-release/pom.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-util-urltemplate/src/main/java/org/apache/knox/gateway/util/urltemplate/Parser.java
----------------------------------------------------------------------
diff --cc gateway-util-urltemplate/src/main/java/org/apache/knox/gateway/util/urltemplate/Parser.java
index 47ed00c,0000000..1d58978
mode 100644,000000..100644
--- a/gateway-util-urltemplate/src/main/java/org/apache/knox/gateway/util/urltemplate/Parser.java
+++ b/gateway-util-urltemplate/src/main/java/org/apache/knox/gateway/util/urltemplate/Parser.java
@@@ -1,345 -1,0 +1,349 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.util.urltemplate;
+
+import org.apache.knox.gateway.i18n.resources.ResourcesFactory;
+
+import java.net.URISyntaxException;
+import java.util.StringTokenizer;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+//NOTE: Instances Not thread safe but reusable. Static parse method is thread safe.
+//NOTE: Ignores matrix parameters at this point.
+public class Parser {
+
+ /*
+ ^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?
+ 12 3 4 5 6 7 8 9
+
+ The numbers in the second line above are only to assist readability;
+ they indicate the reference points for each subexpression (i.e., each
+ paired parenthesis). We refer to the value matched for subexpression
+ <n> as $<n>. For example, matching the above expression to
+
+ http://www.ics.uci.edu/pub/ietf/uri/#Related
+
+ results in the following subexpression matches:
+
+ $1 = http:
+ $2 = http
+ $3 = //www.ics.uci.edu
+ $4 = www.ics.uci.edu
+ $5 = /pub/ietf/uri/
+ $6 = <undefined>
+ $7 = <undefined>
+ $8 = #Related
+ $9 = Related
+
+ where <undefined> indicates that the component is not present, as is
+ the case for the query component in the above example. Therefore, we
+ can determine the value of the five components as
+
+ scheme = $2
+ authority = $4
+ path = $5
+ query = $7
+ fragment = $9
+ */
+
+ private static final Resources RES = ResourcesFactory.get( Resources.class );
+
+ public static final char TEMPLATE_OPEN_MARKUP = '{';
+ public static final char TEMPLATE_CLOSE_MARKUP = '}';
+ public static final char NAME_PATTERN_SEPARATOR = '=';
+
+ private static final int MATCH_GROUP_SCHEME = 1;
+ private static final int MATCH_GROUP_SCHEME_NAKED = 2;
+ private static final int MATCH_GROUP_AUTHORITY = 3;
+ private static final int MATCH_GROUP_AUTHORITY_NAKED = 4;
+ private static final int MATCH_GROUP_PATH = 5;
+ private static final int MATCH_GROUP_QUERY = 6;
+ private static final int MATCH_GROUP_QUERY_NAKED = 7;
+ private static final int MATCH_GROUP_FRAGMENT = 8;
+ private static final int MATCH_GROUP_FRAGMENT_NAKED = 9;
+
+ private static Pattern PATTERN = Pattern.compile( "^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\\?([^#]*))?(#(.*))?" );
+
+ @Deprecated
+ public static final Template parse( String template ) throws URISyntaxException {
+ return Parser.parseTemplate( template );
+ }
+
+ public static final Template parseTemplate( final String template ) throws URISyntaxException {
+ Builder builder = new Builder( template );
+ return parseInternal( builder );
+ }
+
+ public static final Template parseLiteral( final String literal ) throws URISyntaxException {
+ Builder builder = new Builder( literal );
+ builder.setLiteral( true );
+ return parseInternal( builder );
+ }
+
+ private static final Template parseInternal( final Builder builder ) throws URISyntaxException {
+ String original = builder.getOriginal();
+ builder.setHasScheme( false );
+ builder.setHasAuthority( false ); // Assume no until found otherwise. If true, will cause // in output URL.
+ builder.setIsAuthorityOnly( false );
+ builder.setIsAbsolute( false ); // Assume relative until found otherwise. If true, will cause leading / in output URL.
+ builder.setIsDirectory( false ); // Assume a file path until found otherwise. If true, will cause trailing / in output URL.
+ builder.setHasQuery( false ); // Assume no ? until found otherwise. If true, will cause ? in output URL.
+ builder.setHasFragment( false ); // Assume no # until found otherwise. If true, will cause # in output URL.
+ Matcher match = PATTERN.matcher( original );
+ if( match.matches() ) {
+ consumeSchemeMatch( builder, match );
+ consumeAuthorityMatch( builder, match );
+ consumePathMatch( builder, match );
+ consumeQueryMatch( builder, match );
+ consumeFragmentMatch( builder, match );
+ fixNakedAuthority( builder );
+ } else {
+ throw new URISyntaxException( original, RES.parseTemplateFailureReason( original ) );
+ }
+ return builder.build();
+ }
+
+ private static final void fixNakedAuthority( final Builder builder ) {
+ if( builder.getHasScheme() &&
+ !builder.getHasAuthority() &&
+ !builder.getIsAbsolute() &&
+ !builder.getIsDirectory() &&
+ ( builder.getPath().size() == 1 ) &&
+ !builder.getHasQuery() &&
+ !builder.getHasFragment() ) {
+ final Scheme scheme = builder.getScheme();
+ builder.setHasScheme( false );
+ builder.setHost( makeTokenSingular( scheme.getToken() ) );
+ Path path = builder.getPath().remove( 0 );
+ builder.setPort( makeTokenSingular( path.getToken() ) );
+ builder.setIsAuthorityOnly( true );
+ }
+ }
+
+ private static final Token makeTokenSingular( Token token ) {
+ final String effectivePattern = token.getEffectivePattern();
+ if( Segment.GLOB_PATTERN.equals( effectivePattern ) ) {
+ token = new Token( token.getParameterName(), token.getOriginalPattern(), Segment.STAR_PATTERN, token.isLiteral() );
+ }
+ return token;
+ }
+
+// private String makePatternSingular( String pattern ) {
+// if( Segment.GLOB_PATTERN.equals( pattern ) ) {
+// pattern = Segment.STAR_PATTERN;
+// }
+// return pattern;
+// }
+
+ private static void consumeSchemeMatch( final Builder builder, final Matcher match ) {
+ if( match.group( MATCH_GROUP_SCHEME ) != null ) {
+ builder.setHasScheme( true );
+ consumeSchemeToken( builder, match.group( MATCH_GROUP_SCHEME_NAKED ) );
+ }
+ }
+
+ private static void consumeSchemeToken( final Builder builder, final String token ) {
+ if( token != null ) {
+ Token t = parseTemplateToken( builder, token, Segment.STAR_PATTERN );
+ builder.setScheme( t );
+ }
+ }
+
+ private static void consumeAuthorityMatch( final Builder builder, final Matcher match ) {
+ if( match.group( MATCH_GROUP_AUTHORITY ) != null ) {
+ builder.setHasAuthority( true );
+ consumeAuthorityToken( builder, match.group( MATCH_GROUP_AUTHORITY_NAKED ) );
+ }
+ }
+
+ private static void consumeAuthorityToken( final Builder builder, final String token ) {
+ if( token != null ) {
+ Token paramPattern;
+ String[] usernamePassword=null, hostPort;
+ String[] userAddr = split( token, '@' );
+ if( userAddr.length == 1 ) {
+ hostPort = split( userAddr[ 0 ], ':' );
+ } else {
+ usernamePassword = split( userAddr[ 0 ], ':' );
+ hostPort = split( userAddr[ 1 ], ':' );
+ }
+ if( usernamePassword != null ) {
+ if( usernamePassword[ 0 ].length() > 0 ) {
+ paramPattern = makeTokenSingular( parseTemplateToken( builder, usernamePassword[ 0 ], Segment.STAR_PATTERN ) );
+ builder.setUsername( paramPattern );
+ }
+ if( usernamePassword.length > 1 && usernamePassword[ 1 ].length() > 0 ) {
+ paramPattern = makeTokenSingular( parseTemplateToken( builder, usernamePassword[ 1 ], Segment.STAR_PATTERN ) );
+ builder.setPassword( paramPattern );
+ }
+ }
+ if( hostPort[ 0 ].length() > 0 ) {
+ paramPattern = makeTokenSingular( parseTemplateToken( builder, hostPort[ 0 ], Segment.STAR_PATTERN ) );
+ builder.setHost( paramPattern );
+ }
+ if( hostPort.length > 1 && hostPort[ 1 ].length() > 0 ) {
+ paramPattern = makeTokenSingular( parseTemplateToken( builder, hostPort[ 1 ], Segment.STAR_PATTERN ) );
+ builder.setPort( paramPattern );
+ }
+ }
+ }
+
+ private static void consumePathMatch( final Builder builder, final Matcher match ) {
+ String path = match.group( MATCH_GROUP_PATH );
+ if( path != null ) {
+ builder.setIsAbsolute( path.startsWith( "/" ) );
+ builder.setIsDirectory( path.endsWith( "/" ) );
+ consumePathToken( builder, path );
+ }
+ }
+
+ private static final void consumePathToken( final Builder builder, final String token ) {
+ if( token != null ) {
+ final StringTokenizer tokenizer = new StringTokenizer( token, "/" );
+ while( tokenizer.hasMoreTokens() ) {
+ consumePathSegment( builder, tokenizer.nextToken() );
+ }
+ }
+ }
+
+ private static final void consumePathSegment( final Builder builder, final String token ) {
+ if( token != null ) {
+ final Token t = parseTemplateToken( builder, token, Segment.GLOB_PATTERN );
+ builder.addPath( t );
+ }
+ }
+
+ private static void consumeQueryMatch( final Builder builder, Matcher match ) {
+ if( match.group( MATCH_GROUP_QUERY ) != null ) {
+ builder.setHasQuery( true );
+ consumeQueryToken( builder, match.group( MATCH_GROUP_QUERY_NAKED ) );
+ }
+ }
+
+ private static void consumeQueryToken( final Builder builder, String token ) {
+ if( token != null ) {
- StringTokenizer tokenizer = new StringTokenizer( token, "?&" );
- while( tokenizer.hasMoreTokens() ) {
- consumeQuerySegment( builder, tokenizer.nextToken() );
++ //add "&" as a delimiter
++ String[] tokens = token.split("(&|\\?|&)");
++ if (tokens != null){
++ for (String nextToken : tokens){
++ consumeQuerySegment(builder,nextToken);
++ }
+ }
++
+ }
+ }
+
+ private static void consumeQuerySegment( final Builder builder, String token ) {
+ if( token != null && token.length() > 0 ) {
+ // Shorthand format {queryParam} == queryParam={queryParam=*}
+ if( TEMPLATE_OPEN_MARKUP == token.charAt( 0 ) ) {
+ Token paramPattern = parseTemplateToken( builder, token, Segment.GLOB_PATTERN );
+ String paramName = paramPattern.parameterName;
+ if( paramPattern.originalPattern == null ) {
+ builder.addQuery( paramName, new Token( paramName, null, Segment.GLOB_PATTERN, builder.isLiteral() ) );
+// if( Segment.STAR_PATTERN.equals( paramName ) || Segment.GLOB_PATTERN.equals( paramName ) ) {
+// builder.addQuery( paramName, new Token( paramName, null, Segment.GLOB_PATTERN ) );
+// } else {
+// builder.addQuery( paramName, new Token( paramName, null, Segment.GLOB_PATTERN ) );
+// }
+ } else {
+ builder.addQuery( paramName, new Token( paramName, paramPattern.originalPattern, builder.isLiteral() ) );
+ }
+ } else {
+ String nameValue[] = split( token, '=' );
+ if( nameValue.length == 1 ) {
+ String queryName = nameValue[ 0 ];
+ builder.addQuery( queryName, new Token( Segment.ANONYMOUS_PARAM, null, builder.isLiteral() ) );
+ } else {
+ String queryName = nameValue[ 0 ];
+ Token paramPattern = parseTemplateToken( builder, nameValue[ 1 ], Segment.GLOB_PATTERN );
+ builder.addQuery( queryName, paramPattern );
+ }
+ }
+ }
+ }
+
+ private static void consumeFragmentMatch( final Builder builder, Matcher match ) {
+ if( match.group( MATCH_GROUP_FRAGMENT ) != null ) {
+ builder.setHasFragment( true );
+ consumeFragmentToken( builder, match.group( MATCH_GROUP_FRAGMENT_NAKED ) );
+ }
+ }
+
+ private static void consumeFragmentToken( final Builder builder, String token ) {
+ if( token != null && token.length() > 0 ) {
+ Token t = parseTemplateToken( builder, token, Segment.STAR_PATTERN );
+ builder.setFragment( t );
+ }
+ }
+
+ static final Token parseTemplateToken( final Builder builder, final String s, final String defaultEffectivePattern ) {
+ String paramName, actualPattern, effectivePattern;
+ final int l = s.length();
+ // If the token isn't the empty string, then
+ if( l > 0 && !builder.isLiteral() ) {
+ final int b = ( s.charAt( 0 ) == TEMPLATE_OPEN_MARKUP ? 1 : -1 );
+ final int e = ( s.charAt( l-1 ) == TEMPLATE_CLOSE_MARKUP ? l-1 : -1 );
+ // If this is a parameter template, ie {...}
+ if( ( b > 0 ) && ( e > 0 ) && ( e > b ) ) {
+ final int i = s.indexOf( NAME_PATTERN_SEPARATOR, b );
+ // If this is an anonymous template
+ if( i < 0 ) {
+ paramName = s.substring( b, e );
+ actualPattern = null;
+ if( Segment.GLOB_PATTERN.equals( paramName ) ) {
+ effectivePattern = Segment.GLOB_PATTERN;
+ } else {
+ effectivePattern = defaultEffectivePattern;
+ }
+ // Otherwise populate the NVP.
+ } else {
+ paramName = s.substring( b, i );
+ actualPattern = s.substring( i+1, e );
+ effectivePattern = actualPattern;
+ }
+ // Otherwise it is just a pattern.
+ } else {
+ paramName = Segment.ANONYMOUS_PARAM;
+ actualPattern = s;
+ effectivePattern = actualPattern;
+ }
+ // Otherwise the token has no value.
+ } else {
+ paramName = Segment.ANONYMOUS_PARAM;
+ actualPattern = s;
+ effectivePattern = actualPattern;
+ }
+ final Token token = new Token( paramName, actualPattern, effectivePattern, builder.isLiteral() );
+ return token;
+ }
+
+ // Using this because String.split is very inefficient.
+ private static String[] split( String s, char d ) {
+ String[] a;
+ int i = s.indexOf( d );
+ if( i < 0 ) {
+ a = new String[]{ s };
+ } else {
+ a = new String[]{ s.substring( 0, i ), s.substring( i + 1 ) };
+ }
+ return a;
+ }
+
+}
[17/23] knox git commit: Merge branch 'master' into
KNOX-998-Package_Restructuring
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-server/src/main/java/org/apache/knox/gateway/websockets/ProxyWebSocketAdapter.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/websockets/ProxyWebSocketAdapter.java
index 850157e,0000000..a678a72
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/websockets/ProxyWebSocketAdapter.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/websockets/ProxyWebSocketAdapter.java
@@@ -1,276 -1,0 +1,289 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.websockets;
+
+import java.io.IOException;
+import java.net.URI;
+import java.util.concurrent.ExecutorService;
+
++import javax.websocket.ClientEndpointConfig;
+import javax.websocket.CloseReason;
+import javax.websocket.ContainerProvider;
+import javax.websocket.DeploymentException;
+import javax.websocket.WebSocketContainer;
+
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.eclipse.jetty.io.RuntimeIOException;
+import org.eclipse.jetty.util.component.LifeCycle;
+import org.eclipse.jetty.websocket.api.BatchMode;
+import org.eclipse.jetty.websocket.api.RemoteEndpoint;
+import org.eclipse.jetty.websocket.api.Session;
+import org.eclipse.jetty.websocket.api.StatusCode;
+import org.eclipse.jetty.websocket.api.WebSocketAdapter;
+
+/**
+ * Handles outbound/inbound Websocket connections and sessions.
+ *
+ * @since 0.10
+ */
+public class ProxyWebSocketAdapter extends WebSocketAdapter {
+
+ private static final WebsocketLogMessages LOG = MessagesFactory
+ .get(WebsocketLogMessages.class);
+
+ /* URI for the backend */
+ private final URI backend;
+
+ /* Session between the frontend (browser) and Knox */
+ private Session frontendSession;
+
+ /* Session between the backend (outbound) and Knox */
+ private javax.websocket.Session backendSession;
+
+ private WebSocketContainer container;
+
+ private ExecutorService pool;
+
+ /**
++ * Used to transmit headers from browser to backend server.
++ * @since 0.14
++ */
++ private ClientEndpointConfig clientConfig;
++
++ /**
+ * Create an instance
+ */
+ public ProxyWebSocketAdapter(final URI backend, final ExecutorService pool) {
++ this(backend, pool, null);
++ }
++
++ public ProxyWebSocketAdapter(final URI backend, final ExecutorService pool, final ClientEndpointConfig clientConfig) {
+ super();
+ this.backend = backend;
+ this.pool = pool;
++ this.clientConfig = clientConfig;
+ }
+
+ @Override
+ public void onWebSocketConnect(final Session frontEndSession) {
+
+ /*
+ * Let's connect to the backend, this is where the Backend-to-frontend
+ * plumbing takes place
+ */
+ container = ContainerProvider.getWebSocketContainer();
- final ProxyInboundSocket backendSocket = new ProxyInboundSocket(
- getMessageCallback());
++
++ final ProxyInboundClient backendSocket = new ProxyInboundClient(getMessageCallback());
+
+ /* build the configuration */
+
+ /* Attempt Connect */
+ try {
- backendSession = container.connectToServer(backendSocket, backend);
++ backendSession = container.connectToServer(backendSocket, clientConfig, backend);
++
+ LOG.onConnectionOpen(backend.toString());
+
+ } catch (DeploymentException e) {
+ LOG.connectionFailed(e);
+ throw new RuntimeException(e);
+ } catch (IOException e) {
+ LOG.connectionFailed(e);
+ throw new RuntimeIOException(e);
+ }
+
+ super.onWebSocketConnect(frontEndSession);
+ this.frontendSession = frontEndSession;
+
+ }
+
+ @Override
+ public void onWebSocketBinary(final byte[] payload, final int offset,
+ final int length) {
+
+ if (isNotConnected()) {
+ return;
+ }
+
+ throw new UnsupportedOperationException(
+ "Websocket support for binary messages is not supported at this time.");
+ }
+
+ @Override
+ public void onWebSocketText(final String message) {
+
+ if (isNotConnected()) {
+ return;
+ }
+
+ LOG.logMessage("[From Frontend --->]" + message);
+
+ /* Proxy message to backend */
+ try {
+ backendSession.getBasicRemote().sendText(message);
+
+ } catch (IOException e) {
+ LOG.connectionFailed(e);
+ }
+
+ }
+
+ @Override
+ public void onWebSocketClose(int statusCode, String reason) {
+ super.onWebSocketClose(statusCode, reason);
+
+ /* do the cleaning business in seperate thread so we don't block */
+ pool.execute(new Runnable() {
+ @Override
+ public void run() {
+ closeQuietly();
+ }
+ });
+
+ LOG.onConnectionClose(backend.toString());
+
+ }
+
+ @Override
+ public void onWebSocketError(final Throwable t) {
+ cleanupOnError(t);
+ }
+
+ /**
+ * Cleanup sessions
+ */
+ private void cleanupOnError(final Throwable t) {
+
+ LOG.onError(t.toString());
+ if (t.toString().contains("exceeds maximum size")) {
+ if(frontendSession != null && !frontendSession.isOpen()) {
+ frontendSession.close(StatusCode.MESSAGE_TOO_LARGE, t.getMessage());
+ }
+ }
+
+ else {
+ if(frontendSession != null && !frontendSession.isOpen()) {
+ frontendSession.close(StatusCode.SERVER_ERROR, t.getMessage());
+ }
+
+ /* do the cleaning business in seperate thread so we don't block */
+ pool.execute(new Runnable() {
+ @Override
+ public void run() {
+ closeQuietly();
+ }
+ });
+
+ }
+ }
+
+ private MessageEventCallback getMessageCallback() {
+
+ return new MessageEventCallback() {
+
+ @Override
+ public void doCallback(String message) {
+ /* do nothing */
+
+ }
+
+ @Override
+ public void onConnectionOpen(Object session) {
+ /* do nothing */
+
+ }
+
+ @Override
+ public void onConnectionClose(final CloseReason reason) {
+ try {
+ frontendSession.close(reason.getCloseCode().getCode(),
+ reason.getReasonPhrase());
+ } finally {
+
+ /* do the cleaning business in seperate thread so we don't block */
+ pool.execute(new Runnable() {
+ @Override
+ public void run() {
+ closeQuietly();
+ }
+ });
+
+ }
+
+ }
+
+ @Override
+ public void onError(Throwable cause) {
+ cleanupOnError(cause);
+ }
+
+ @Override
+ public void onMessageText(String message, Object session) {
+ final RemoteEndpoint remote = getRemote();
+
+ LOG.logMessage("[From Backend <---]" + message);
+
+ /* Proxy message to frontend */
+ try {
+ remote.sendString(message);
+ if (remote.getBatchMode() == BatchMode.ON) {
+ remote.flush();
+ }
+ } catch (IOException e) {
+ LOG.connectionFailed(e);
+ throw new RuntimeIOException(e);
+ }
+
+ }
+
+ @Override
+ public void onMessageBinary(byte[] message, boolean last,
+ Object session) {
+ throw new UnsupportedOperationException(
+ "Websocket support for binary messages is not supported at this time.");
+
+ }
+
+ };
+
+ }
+
+ private void closeQuietly() {
+
+ try {
+ if(backendSession != null && !backendSession.isOpen()) {
+ backendSession.close();
+ }
+ } catch (IOException e) {
+ LOG.connectionFailed(e);
+ }
+
+ if (container instanceof LifeCycle) {
+ try {
+ ((LifeCycle) container).stop();
+ } catch (Exception e) {
+ LOG.connectionFailed(e);
+ }
+ }
+
+ if(frontendSession != null && !frontendSession.isOpen()) {
+ frontendSession.close();
+ }
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
----------------------------------------------------------------------
diff --cc gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
index b713491,0000000..b5558fd
mode 100644,000000..100644
--- a/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
+++ b/gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
@@@ -1,239 -1,0 +1,392 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.topology.simple;
+
+import org.apache.knox.gateway.topology.validation.TopologyValidator;
+import org.apache.knox.gateway.util.XmlUtils;
++import java.io.ByteArrayInputStream;
++import java.io.File;
++import java.io.FileOutputStream;
++import java.io.IOException;
++
++import java.util.ArrayList;
++import java.util.Collections;
++import java.util.HashMap;
++import java.util.List;
++import java.util.Map;
++import java.util.Properties;
++
++import javax.xml.xpath.XPath;
++import javax.xml.xpath.XPathConstants;
++import javax.xml.xpath.XPathFactory;
++
++import org.apache.commons.io.FileUtils;
+import org.easymock.EasyMock;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.xml.sax.SAXException;
+
- import javax.xml.xpath.XPath;
- import javax.xml.xpath.XPathConstants;
- import javax.xml.xpath.XPathFactory;
- import java.io.*;
- import java.util.*;
-
- import static org.junit.Assert.*;
++import static org.junit.Assert.assertEquals;
++import static org.junit.Assert.assertFalse;
++import static org.junit.Assert.assertNotNull;
++import static org.junit.Assert.assertTrue;
++import static org.junit.Assert.fail;
+
+
+public class SimpleDescriptorHandlerTest {
+
+ private static final String TEST_PROVIDER_CONFIG =
+ " <gateway>\n" +
+ " <provider>\n" +
+ " <role>authentication</role>\n" +
+ " <name>ShiroProvider</name>\n" +
+ " <enabled>true</enabled>\n" +
+ " <param>\n" +
+ " <!-- \n" +
+ " session timeout in minutes, this is really idle timeout,\n" +
+ " defaults to 30mins, if the property value is not defined,, \n" +
+ " current client authentication would expire if client idles contiuosly for more than this value\n" +
+ " -->\n" +
+ " <name>sessionTimeout</name>\n" +
+ " <value>30</value>\n" +
+ " </param>\n" +
+ " <param>\n" +
+ " <name>main.ldapRealm</name>\n" +
+ " <value>org.apache.knox.gateway.shirorealm.KnoxLdapRealm</value>\n" +
+ " </param>\n" +
+ " <param>\n" +
+ " <name>main.ldapContextFactory</name>\n" +
+ " <value>org.apache.knox.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
+ " </param>\n" +
+ " <param>\n" +
+ " <name>main.ldapRealm.contextFactory</name>\n" +
+ " <value>$ldapContextFactory</value>\n" +
+ " </param>\n" +
+ " <param>\n" +
+ " <name>main.ldapRealm.userDnTemplate</name>\n" +
+ " <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
+ " </param>\n" +
+ " <param>\n" +
+ " <name>main.ldapRealm.contextFactory.url</name>\n" +
+ " <value>ldap://localhost:33389</value>\n" +
+ " </param>\n" +
+ " <param>\n" +
+ " <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
+ " <value>simple</value>\n" +
+ " </param>\n" +
+ " <param>\n" +
+ " <name>urls./**</name>\n" +
+ " <value>authcBasic</value>\n" +
+ " </param>\n" +
+ " </provider>\n" +
+ "\n" +
+ " <provider>\n" +
+ " <role>identity-assertion</role>\n" +
+ " <name>Default</name>\n" +
+ " <enabled>true</enabled>\n" +
+ " </provider>\n" +
+ "\n" +
+ " <!--\n" +
+ " Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.\n" +
+ " For example, a hadoop service running in AWS may return a response that includes URLs containing the\n" +
+ " some AWS internal host name. If the client needs to make a subsequent request to the host identified\n" +
+ " in those URLs they need to be mapped to external host names that the client Knox can use to connect.\n" +
+ "\n" +
+ " If the external hostname and internal host names are same turn of this provider by setting the value of\n" +
+ " enabled parameter as false.\n" +
+ "\n" +
+ " The name parameter specifies the external host names in a comma separated list.\n" +
+ " The value parameter specifies corresponding internal host names in a comma separated list.\n" +
+ "\n" +
+ " Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out\n" +
+ " of box sandbox.xml. This is because Sandbox uses port mapping to allow clients to connect to the\n" +
+ " Hadoop services using localhost. In real clusters, external host names would almost never be localhost.\n" +
+ " -->\n" +
+ " <provider>\n" +
+ " <role>hostmap</role>\n" +
+ " <name>static</name>\n" +
+ " <enabled>true</enabled>\n" +
+ " <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
+ " </provider>\n" +
+ " </gateway>\n";
+
+
+ /**
+ * KNOX-1006
+ *
+ * N.B. This test depends on the DummyServiceDiscovery extension being configured:
+ * org.apache.knox.gateway.topology.discovery.test.extension.DummyServiceDiscovery
+ */
+ @Test
+ public void testSimpleDescriptorHandler() throws Exception {
+
+ final String type = "DUMMY";
+ final String address = "http://c6401.ambari.apache.org:8080";
+ final String clusterName = "dummy";
+ final Map<String, List<String>> serviceURLs = new HashMap<>();
+ serviceURLs.put("NAMENODE", null);
+ serviceURLs.put("JOBTRACKER", null);
+ serviceURLs.put("WEBHDFS", null);
+ serviceURLs.put("WEBHCAT", null);
+ serviceURLs.put("OOZIE", null);
+ serviceURLs.put("WEBHBASE", null);
+ serviceURLs.put("HIVE", null);
+ serviceURLs.put("RESOURCEMANAGER", null);
- serviceURLs.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
++ serviceURLs.put("AMBARIUI", Collections.singletonList("http://c6401.ambari.apache.org:8080"));
+
+ // Write the externalized provider config to a temp file
+ File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", TEST_PROVIDER_CONFIG);
+
+ File topologyFile = null;
+ try {
+ File destDir = (new File(".")).getCanonicalFile();
+
+ // Mock out the simple descriptor
+ SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
+ EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
+ EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(address).anyTimes();
+ EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(type).anyTimes();
+ EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
+ EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
+ EasyMock.expect(testDescriptor.getClusterName()).andReturn(clusterName).anyTimes();
+ List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
+ for (String serviceName : serviceURLs.keySet()) {
+ SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
+ EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
+ EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
+ EasyMock.replay(svc);
+ serviceMocks.add(svc);
+ }
+ EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
+ EasyMock.replay(testDescriptor);
+
+ // Invoke the simple descriptor handler
+ Map<String, File> files =
+ SimpleDescriptorHandler.handle(testDescriptor,
+ providerConfig.getParentFile(), // simple desc co-located with provider config
+ destDir);
+ topologyFile = files.get("topology");
+
+ // Validate the resulting topology descriptor
+ assertTrue(topologyFile.exists());
+
+ // Validate the topology descriptor's correctness
+ TopologyValidator validator = new TopologyValidator( topologyFile.getAbsolutePath() );
+ if( !validator.validateTopology() ){
+ throw new SAXException( validator.getErrorString() );
+ }
+
+ XPathFactory xPathfactory = XPathFactory.newInstance();
+ XPath xpath = xPathfactory.newXPath();
+
+ // Parse the topology descriptor
+ Document topologyXml = XmlUtils.readXml(topologyFile);
+
+ // Validate the provider configuration
+ Document extProviderConf = XmlUtils.readXml(new ByteArrayInputStream(TEST_PROVIDER_CONFIG.getBytes()));
+ Node gatewayNode = (Node) xpath.compile("/topology/gateway").evaluate(topologyXml, XPathConstants.NODE);
+ assertTrue("Resulting provider config should be identical to the referenced content.",
+ extProviderConf.getDocumentElement().isEqualNode(gatewayNode));
+
+ // Validate the service declarations
+ Map<String, List<String>> topologyServiceURLs = new HashMap<>();
+ NodeList serviceNodes =
+ (NodeList) xpath.compile("/topology/service").evaluate(topologyXml, XPathConstants.NODESET);
+ for (int serviceNodeIndex=0; serviceNodeIndex < serviceNodes.getLength(); serviceNodeIndex++) {
+ Node serviceNode = serviceNodes.item(serviceNodeIndex);
+ Node roleNode = (Node) xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE);
+ assertNotNull(roleNode);
+ String role = roleNode.getNodeValue();
+ NodeList urlNodes = (NodeList) xpath.compile("url/text()").evaluate(serviceNode, XPathConstants.NODESET);
+ for(int urlNodeIndex = 0 ; urlNodeIndex < urlNodes.getLength(); urlNodeIndex++) {
+ Node urlNode = urlNodes.item(urlNodeIndex);
+ assertNotNull(urlNode);
+ String url = urlNode.getNodeValue();
+ assertNotNull("Every declared service should have a URL.", url);
+ if (!topologyServiceURLs.containsKey(role)) {
+ topologyServiceURLs.put(role, new ArrayList<String>());
+ }
+ topologyServiceURLs.get(role).add(url);
+ }
+ }
+ assertEquals("Unexpected number of service declarations.", serviceURLs.size(), topologyServiceURLs.size());
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ fail(e.getMessage());
+ } finally {
+ providerConfig.delete();
+ if (topologyFile != null) {
+ topologyFile.delete();
+ }
+ }
+ }
+
+
- private File writeProviderConfig(String path, String content) throws IOException {
- File f = new File(path);
++ /**
++ * KNOX-1006
++ *
++ * Verify the behavior of the SimpleDescriptorHandler when service discovery fails to produce a valid URL for
++ * a service.
++ *
++ * N.B. This test depends on the PropertiesFileServiceDiscovery extension being configured:
++ * org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscovery
++ */
++ @Test
++ public void testInvalidServiceURLFromDiscovery() throws Exception {
++ final String CLUSTER_NAME = "myproperties";
++
++ // Configure the PropertiesFile Service Discovery implementation for this test
++ final String DEFAULT_VALID_SERVICE_URL = "http://localhost:9999/thiswillwork";
++ Properties serviceDiscoverySourceProps = new Properties();
++ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".NAMENODE",
++ DEFAULT_VALID_SERVICE_URL.replace("http", "hdfs"));
++ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".JOBTRACKER",
++ DEFAULT_VALID_SERVICE_URL.replace("http", "rpc"));
++ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHDFS", DEFAULT_VALID_SERVICE_URL);
++ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHCAT", DEFAULT_VALID_SERVICE_URL);
++ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".OOZIE", DEFAULT_VALID_SERVICE_URL);
++ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHBASE", DEFAULT_VALID_SERVICE_URL);
++ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".HIVE", "{SCHEME}://localhost:10000/");
++ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".RESOURCEMANAGER", DEFAULT_VALID_SERVICE_URL);
++ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".AMBARIUI", DEFAULT_VALID_SERVICE_URL);
++ File serviceDiscoverySource = File.createTempFile("service-discovery", ".properties");
++ serviceDiscoverySourceProps.store(new FileOutputStream(serviceDiscoverySource),
++ "Test Service Discovery Source");
++
++ // Prepare a mock SimpleDescriptor
++ final String type = "PROPERTIES_FILE";
++ final String address = serviceDiscoverySource.getAbsolutePath();
++ final Map<String, List<String>> serviceURLs = new HashMap<>();
++ serviceURLs.put("NAMENODE", null);
++ serviceURLs.put("JOBTRACKER", null);
++ serviceURLs.put("WEBHDFS", null);
++ serviceURLs.put("WEBHCAT", null);
++ serviceURLs.put("OOZIE", null);
++ serviceURLs.put("WEBHBASE", null);
++ serviceURLs.put("HIVE", null);
++ serviceURLs.put("RESOURCEMANAGER", null);
++ serviceURLs.put("AMBARIUI", Collections.singletonList("http://c6401.ambari.apache.org:8080"));
+
- Writer fw = new FileWriter(f);
- fw.write(content);
- fw.flush();
- fw.close();
++ // Write the externalized provider config to a temp file
++ File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", TEST_PROVIDER_CONFIG);
++
++ File topologyFile = null;
++ try {
++ File destDir = (new File(".")).getCanonicalFile();
++
++ // Mock out the simple descriptor
++ SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
++ EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
++ EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(address).anyTimes();
++ EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(type).anyTimes();
++ EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
++ EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
++ EasyMock.expect(testDescriptor.getClusterName()).andReturn(CLUSTER_NAME).anyTimes();
++ List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
++ for (String serviceName : serviceURLs.keySet()) {
++ SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
++ EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
++ EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
++ EasyMock.replay(svc);
++ serviceMocks.add(svc);
++ }
++ EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
++ EasyMock.replay(testDescriptor);
++
++ // Invoke the simple descriptor handler
++ Map<String, File> files =
++ SimpleDescriptorHandler.handle(testDescriptor,
++ providerConfig.getParentFile(), // simple desc co-located with provider config
++ destDir);
++
++ topologyFile = files.get("topology");
+
++ // Validate the resulting topology descriptor
++ assertTrue(topologyFile.exists());
++
++ // Validate the topology descriptor's correctness
++ TopologyValidator validator = new TopologyValidator( topologyFile.getAbsolutePath() );
++ if( !validator.validateTopology() ){
++ throw new SAXException( validator.getErrorString() );
++ }
++
++ XPathFactory xPathfactory = XPathFactory.newInstance();
++ XPath xpath = xPathfactory.newXPath();
++
++ // Parse the topology descriptor
++ Document topologyXml = XmlUtils.readXml(topologyFile);
++
++ // Validate the provider configuration
++ Document extProviderConf = XmlUtils.readXml(new ByteArrayInputStream(TEST_PROVIDER_CONFIG.getBytes()));
++ Node gatewayNode = (Node) xpath.compile("/topology/gateway").evaluate(topologyXml, XPathConstants.NODE);
++ assertTrue("Resulting provider config should be identical to the referenced content.",
++ extProviderConf.getDocumentElement().isEqualNode(gatewayNode));
++
++ // Validate the service declarations
++ List<String> topologyServices = new ArrayList<>();
++ Map<String, List<String>> topologyServiceURLs = new HashMap<>();
++ NodeList serviceNodes =
++ (NodeList) xpath.compile("/topology/service").evaluate(topologyXml, XPathConstants.NODESET);
++ for (int serviceNodeIndex=0; serviceNodeIndex < serviceNodes.getLength(); serviceNodeIndex++) {
++ Node serviceNode = serviceNodes.item(serviceNodeIndex);
++ Node roleNode = (Node) xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE);
++ assertNotNull(roleNode);
++ String role = roleNode.getNodeValue();
++ topologyServices.add(role);
++ NodeList urlNodes = (NodeList) xpath.compile("url/text()").evaluate(serviceNode, XPathConstants.NODESET);
++ for(int urlNodeIndex = 0 ; urlNodeIndex < urlNodes.getLength(); urlNodeIndex++) {
++ Node urlNode = urlNodes.item(urlNodeIndex);
++ assertNotNull(urlNode);
++ String url = urlNode.getNodeValue();
++ assertNotNull("Every declared service should have a URL.", url);
++ if (!topologyServiceURLs.containsKey(role)) {
++ topologyServiceURLs.put(role, new ArrayList<String>());
++ }
++ topologyServiceURLs.get(role).add(url);
++ }
++ }
++
++ // There should not be a service element for HIVE, since it had no valid URLs
++ assertEquals("Unexpected number of service declarations.", serviceURLs.size() - 1, topologyServices.size());
++ assertFalse("The HIVE service should have been omitted from the generated topology.", topologyServices.contains("HIVE"));
++
++ assertEquals("Unexpected number of service URLs.", serviceURLs.size() - 1, topologyServiceURLs.size());
++
++ } catch (Exception e) {
++ e.printStackTrace();
++ fail(e.getMessage());
++ } finally {
++ serviceDiscoverySource.delete();
++ providerConfig.delete();
++ if (topologyFile != null) {
++ topologyFile.delete();
++ }
++ }
++ }
++
++
++ private File writeProviderConfig(String path, String content) throws IOException {
++ File f = new File(path);
++ FileUtils.write(f, content);
+ return f;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-service-definitions/src/main/resources/services/ambariui/2.2.0/service.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-service-knoxsso/src/main/java/org/apache/knox/gateway/service/knoxsso/WebSSOResource.java
----------------------------------------------------------------------
diff --cc gateway-service-knoxsso/src/main/java/org/apache/knox/gateway/service/knoxsso/WebSSOResource.java
index 8a9d028,0000000..a97cee2
mode 100644,000000..100644
--- a/gateway-service-knoxsso/src/main/java/org/apache/knox/gateway/service/knoxsso/WebSSOResource.java
+++ b/gateway-service-knoxsso/src/main/java/org/apache/knox/gateway/service/knoxsso/WebSSOResource.java
@@@ -1,322 -1,0 +1,322 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.service.knoxsso;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.security.Principal;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import javax.annotation.PostConstruct;
+import javax.servlet.ServletContext;
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.WebApplicationException;
+
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.services.GatewayServices;
+import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
+import org.apache.knox.gateway.services.security.token.TokenServiceException;
+import org.apache.knox.gateway.services.security.token.impl.JWT;
+import org.apache.knox.gateway.util.RegExUtils;
+import org.apache.knox.gateway.util.Urls;
+
+import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
+import static javax.ws.rs.core.MediaType.APPLICATION_XML;
+
+@Path( WebSSOResource.RESOURCE_PATH )
+public class WebSSOResource {
+ private static final String SSO_COOKIE_NAME = "knoxsso.cookie.name";
+ private static final String SSO_COOKIE_SECURE_ONLY_INIT_PARAM = "knoxsso.cookie.secure.only";
+ private static final String SSO_COOKIE_MAX_AGE_INIT_PARAM = "knoxsso.cookie.max.age";
+ private static final String SSO_COOKIE_DOMAIN_SUFFIX_PARAM = "knoxsso.cookie.domain.suffix";
+ private static final String SSO_COOKIE_TOKEN_TTL_PARAM = "knoxsso.token.ttl";
+ private static final String SSO_COOKIE_TOKEN_AUDIENCES_PARAM = "knoxsso.token.audiences";
+ private static final String SSO_COOKIE_TOKEN_WHITELIST_PARAM = "knoxsso.redirect.whitelist.regex";
+ private static final String SSO_ENABLE_SESSION_PARAM = "knoxsso.enable.session";
+ private static final String ORIGINAL_URL_REQUEST_PARAM = "originalUrl";
+ private static final String ORIGINAL_URL_COOKIE_NAME = "original-url";
+ private static final String DEFAULT_SSO_COOKIE_NAME = "hadoop-jwt";
+ // default for the whitelist - open up for development - relative paths and localhost only
+ private static final String DEFAULT_WHITELIST = "^/.*$;^https?://(localhost|127.0.0.1|0:0:0:0:0:0:0:1|::1):\\d{0,9}/.*$";
+ static final String RESOURCE_PATH = "/api/v1/websso";
+ private static KnoxSSOMessages log = MessagesFactory.get( KnoxSSOMessages.class );
+ private String cookieName = null;
+ private boolean secureOnly = true;
+ private int maxAge = -1;
+ private long tokenTTL = 30000l;
+ private String whitelist = null;
+ private String domainSuffix = null;
+ private List<String> targetAudiences = new ArrayList<>();
+ private boolean enableSession = false;
+
+ @Context
+ HttpServletRequest request;
+
+ @Context
+ HttpServletResponse response;
+
+ @Context
+ ServletContext context;
+
+ @PostConstruct
+ public void init() {
+
+ // configured cookieName
+ cookieName = context.getInitParameter(SSO_COOKIE_NAME);
+ if (cookieName == null) {
+ cookieName = DEFAULT_SSO_COOKIE_NAME;
+ }
+
+ String secure = context.getInitParameter(SSO_COOKIE_SECURE_ONLY_INIT_PARAM);
+ if (secure != null) {
+ secureOnly = ("false".equals(secure) ? false : true);
+ if (!secureOnly) {
+ log.cookieSecureOnly(secureOnly);
+ }
+ }
+
+ String age = context.getInitParameter(SSO_COOKIE_MAX_AGE_INIT_PARAM);
+ if (age != null) {
+ try {
+ log.setMaxAge(age);
+ maxAge = Integer.parseInt(age);
+ }
+ catch (NumberFormatException nfe) {
+ log.invalidMaxAgeEncountered(age);
+ }
+ }
+
+ domainSuffix = context.getInitParameter(SSO_COOKIE_DOMAIN_SUFFIX_PARAM);
+
+ whitelist = context.getInitParameter(SSO_COOKIE_TOKEN_WHITELIST_PARAM);
+ if (whitelist == null) {
+ // default to local/relative targets
+ whitelist = DEFAULT_WHITELIST;
+ }
+
+ String audiences = context.getInitParameter(SSO_COOKIE_TOKEN_AUDIENCES_PARAM);
+ if (audiences != null) {
+ String[] auds = audiences.split(",");
+ for (int i = 0; i < auds.length; i++) {
- targetAudiences.add(auds[i]);
++ targetAudiences.add(auds[i].trim());
+ }
+ }
+
+ String ttl = context.getInitParameter(SSO_COOKIE_TOKEN_TTL_PARAM);
+ if (ttl != null) {
+ try {
+ tokenTTL = Long.parseLong(ttl);
+ }
+ catch (NumberFormatException nfe) {
+ log.invalidTokenTTLEncountered(ttl);
+ }
+ }
+
+ String enableSession = context.getInitParameter(SSO_ENABLE_SESSION_PARAM);
+ this.enableSession = ("true".equals(enableSession));
+ }
+
+ @GET
+ @Produces({APPLICATION_JSON, APPLICATION_XML})
+ public Response doGet() {
+ return getAuthenticationToken(HttpServletResponse.SC_TEMPORARY_REDIRECT);
+ }
+
+ @POST
+ @Produces({APPLICATION_JSON, APPLICATION_XML})
+ public Response doPost() {
+ return getAuthenticationToken(HttpServletResponse.SC_SEE_OTHER);
+ }
+
+ private Response getAuthenticationToken(int statusCode) {
+ GatewayServices services = (GatewayServices) request.getServletContext()
+ .getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+ boolean removeOriginalUrlCookie = true;
+ String original = getCookieValue((HttpServletRequest) request, ORIGINAL_URL_COOKIE_NAME);
+ if (original == null) {
+ // in the case where there are no SAML redirects done before here
+ // we need to get it from the request parameters
+ removeOriginalUrlCookie = false;
+ original = getOriginalUrlFromQueryParams();
+ if (original.isEmpty()) {
+ log.originalURLNotFound();
+ throw new WebApplicationException("Original URL not found in the request.", Response.Status.BAD_REQUEST);
+ }
+ boolean validRedirect = RegExUtils.checkWhitelist(whitelist, original);
+ if (!validRedirect) {
+ log.whiteListMatchFail(original, whitelist);
+ throw new WebApplicationException("Original URL not valid according to the configured whitelist.",
+ Response.Status.BAD_REQUEST);
+ }
+ }
+
+ JWTokenAuthority ts = services.getService(GatewayServices.TOKEN_SERVICE);
+ Principal p = ((HttpServletRequest)request).getUserPrincipal();
+
+ try {
+ JWT token = null;
+ if (targetAudiences.isEmpty()) {
+ token = ts.issueToken(p, "RS256", getExpiry());
+ } else {
+ token = ts.issueToken(p, targetAudiences, "RS256", getExpiry());
+ }
+
+ // Coverity CID 1327959
+ if( token != null ) {
+ addJWTHadoopCookie( original, token );
+ }
+
+ if (removeOriginalUrlCookie) {
+ removeOriginalUrlCookie(response);
+ }
+
+ log.aboutToRedirectToOriginal(original);
+ response.setStatus(statusCode);
+ response.setHeader("Location", original);
+ try {
+ response.getOutputStream().close();
+ } catch (IOException e) {
+ log.unableToCloseOutputStream(e.getMessage(), Arrays.toString(e.getStackTrace()));
+ }
+ }
+ catch (TokenServiceException e) {
+ log.unableToIssueToken(e);
+ }
+ URI location = null;
+ try {
+ location = new URI(original);
+ }
+ catch(URISyntaxException urise) {
+ // todo log return error response
+ }
+
+ if (!enableSession) {
+ // invalidate the session to avoid autologin
+ // Coverity CID 1352857
+ HttpSession session = request.getSession(false);
+ if( session != null ) {
+ session.invalidate();
+ }
+ }
+
+ return Response.seeOther(location).entity("{ \"redirectTo\" : " + original + " }").build();
+ }
+
+ private String getOriginalUrlFromQueryParams() {
+ String original = request.getParameter(ORIGINAL_URL_REQUEST_PARAM);
+ StringBuffer buf = new StringBuffer(original);
+
+ // Add any other query params.
+ // Probably not ideal but will not break existing integrations by requiring
+ // some encoding.
+ Map<String, String[]> params = request.getParameterMap();
+ for (Entry<String, String[]> entry : params.entrySet()) {
+ if (!ORIGINAL_URL_REQUEST_PARAM.equals(entry.getKey())
+ && !original.contains(entry.getKey() + "=")) {
+ buf.append("&").append(entry.getKey());
+ String[] values = entry.getValue();
+ if (values.length > 0 && values[0] != null) {
+ buf.append("=");
+ }
+ for (int i = 0; i < values.length; i++) {
+ if (values[0] != null) {
+ buf.append(values[i]);
+ if (i < values.length-1) {
+ buf.append("&").append(entry.getKey()).append("=");
+ }
+ }
+ }
+ }
+ }
+
+ return buf.toString();
+ }
+
+ private long getExpiry() {
+ long expiry = 0l;
+ if (tokenTTL == -1) {
+ expiry = -1;
+ }
+ else {
+ expiry = System.currentTimeMillis() + tokenTTL;
+ }
+ return expiry;
+ }
+
+ private void addJWTHadoopCookie(String original, JWT token) {
+ log.addingJWTCookie(token.toString());
+ Cookie c = new Cookie(cookieName, token.toString());
+ c.setPath("/");
+ try {
+ String domain = Urls.getDomainName(original, domainSuffix);
+ if (domain != null) {
+ c.setDomain(domain);
+ }
+ c.setHttpOnly(true);
+ if (secureOnly) {
+ c.setSecure(true);
+ }
+ if (maxAge != -1) {
+ c.setMaxAge(maxAge);
+ }
+ response.addCookie(c);
+ log.addedJWTCookie();
+ }
+ catch(Exception e) {
+ log.unableAddCookieToResponse(e.getMessage(), Arrays.toString(e.getStackTrace()));
+ throw new WebApplicationException("Unable to add JWT cookie to response.");
+ }
+ }
+
+ private void removeOriginalUrlCookie(HttpServletResponse response) {
+ Cookie c = new Cookie(ORIGINAL_URL_COOKIE_NAME, null);
+ c.setMaxAge(0);
+ c.setPath(RESOURCE_PATH);
+ response.addCookie(c);
+ }
+
+ private String getCookieValue(HttpServletRequest request, String name) {
+ Cookie[] cookies = request.getCookies();
+ String value = null;
+ if (cookies != null) {
+ for(Cookie cookie : cookies){
+ if(name.equals(cookie.getName())){
+ value = cookie.getValue();
+ }
+ }
+ }
+ if (value == null) {
+ log.cookieNotFound(name);
+ }
+ return value;
+ }
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-service-knoxsso/src/test/java/org/apache/knox/gateway/service/knoxsso/WebSSOResourceTest.java
----------------------------------------------------------------------
diff --cc gateway-service-knoxsso/src/test/java/org/apache/knox/gateway/service/knoxsso/WebSSOResourceTest.java
index 6f0a805,0000000..6b8411e
mode 100644,000000..100644
--- a/gateway-service-knoxsso/src/test/java/org/apache/knox/gateway/service/knoxsso/WebSSOResourceTest.java
+++ b/gateway-service-knoxsso/src/test/java/org/apache/knox/gateway/service/knoxsso/WebSSOResourceTest.java
@@@ -1,352 -1,0 +1,410 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.service.knoxsso;
+
+import org.apache.knox.gateway.util.RegExUtils;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.security.KeyPair;
+import java.security.KeyPairGenerator;
+import java.security.NoSuchAlgorithmException;
+import java.security.Principal;
+import java.security.interfaces.RSAPrivateKey;
+import java.security.interfaces.RSAPublicKey;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.security.auth.Subject;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletOutputStream;
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpServletResponseWrapper;
+
+import org.apache.knox.gateway.services.GatewayServices;
+import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
+import org.apache.knox.gateway.services.security.token.TokenServiceException;
+import org.apache.knox.gateway.services.security.token.impl.JWT;
+import org.apache.knox.gateway.services.security.token.impl.JWTToken;
+import org.apache.knox.gateway.util.RegExUtils;
+import org.easymock.EasyMock;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.nimbusds.jose.JWSSigner;
+import com.nimbusds.jose.JWSVerifier;
+import com.nimbusds.jose.crypto.RSASSASigner;
+import com.nimbusds.jose.crypto.RSASSAVerifier;
+
+/**
+ * Some tests for the Knox SSO service.
+ */
+public class WebSSOResourceTest {
+
+ protected static RSAPublicKey publicKey;
+ protected static RSAPrivateKey privateKey;
+
+ @BeforeClass
+ public static void setup() throws Exception, NoSuchAlgorithmException {
+ KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
+ kpg.initialize(1024);
+ KeyPair KPair = kpg.generateKeyPair();
+
+ publicKey = (RSAPublicKey) KPair.getPublic();
+ privateKey = (RSAPrivateKey) KPair.getPrivate();
+ }
+
+ @Test
+ public void testWhitelistMatching() throws Exception {
+ String whitelist = "^https?://.*example.com:8080/.*$;" +
+ "^https?://.*example.com/.*$;" +
+ "^https?://.*example2.com:\\d{0,9}/.*$;" +
+ "^https://.*example3.com:\\d{0,9}/.*$;" +
+ "^https?://localhost:\\d{0,9}/.*$;^/.*$";
+
+ // match on explicit hostname/domain and port
+ Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist,
+ "http://host.example.com:8080/"));
+ // match on non-required port
+ Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist,
+ "http://host.example.com/"));
+ // match on required but any port
+ Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist,
+ "http://host.example2.com:1234/"));
+ // fail on missing port
+ Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
+ "http://host.example2.com/"));
+ // fail on invalid port
+ Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
+ "http://host.example.com:8081/"));
+ // fail on alphanumeric port
+ Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
+ "http://host.example.com:A080/"));
+ // fail on invalid hostname/domain
+ Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
+ "http://host.example.net:8080/"));
+ // fail on required port
+ Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
+ "http://host.example2.com/"));
+ // fail on required https
+ Assert.assertFalse("Matched whitelist inappropriately", RegExUtils.checkWhitelist(whitelist,
+ "http://host.example3.com/"));
+ // match on localhost and port
+ Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist,
+ "http://localhost:8080/"));
+ // match on local/relative path
+ Assert.assertTrue("Failed to match whitelist", RegExUtils.checkWhitelist(whitelist,
+ "/local/resource/"));
+ }
+
+ @Test
+ public void testGetToken() throws Exception {
+
+ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+ EasyMock.expect(context.getInitParameter("knoxsso.cookie.name")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.cookie.secure.only")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.cookie.max.age")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.cookie.domain.suffix")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.redirect.whitelist.regex")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.token.audiences")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.token.ttl")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.enable.session")).andReturn(null);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ EasyMock.expect(request.getParameter("originalUrl")).andReturn("http://localhost:9080/service");
+ EasyMock.expect(request.getParameterMap()).andReturn(Collections.<String,String[]>emptyMap());
+ EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+
+ Principal principal = EasyMock.createNiceMock(Principal.class);
+ EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+ EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+ GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+ EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+ JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+ EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ ServletOutputStream outputStream = EasyMock.createNiceMock(ServletOutputStream.class);
+ CookieResponseWrapper responseWrapper = new CookieResponseWrapper(response, outputStream);
+
+ EasyMock.replay(principal, services, context, request);
+
+ WebSSOResource webSSOResponse = new WebSSOResource();
+ webSSOResponse.request = request;
+ webSSOResponse.response = responseWrapper;
+ webSSOResponse.context = context;
+ webSSOResponse.init();
+
+ // Issue a token
+ webSSOResponse.doGet();
+
+ // Check the cookie
+ Cookie cookie = responseWrapper.getCookie("hadoop-jwt");
+ assertNotNull(cookie);
+
+ JWTToken parsedToken = new JWTToken(cookie.getValue());
+ assertEquals("alice", parsedToken.getSubject());
+ assertTrue(authority.verifyToken(parsedToken));
+ }
+
+ @Test
+ public void testAudiences() throws Exception {
+
+ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+ EasyMock.expect(context.getInitParameter("knoxsso.cookie.name")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.cookie.secure.only")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.cookie.max.age")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.cookie.domain.suffix")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.redirect.whitelist.regex")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.token.audiences")).andReturn("recipient1,recipient2");
+ EasyMock.expect(context.getInitParameter("knoxsso.token.ttl")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.enable.session")).andReturn(null);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ EasyMock.expect(request.getParameter("originalUrl")).andReturn("http://localhost:9080/service");
+ EasyMock.expect(request.getParameterMap()).andReturn(Collections.<String,String[]>emptyMap());
+ EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+
+ Principal principal = EasyMock.createNiceMock(Principal.class);
+ EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+ EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
++
++ GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
++ EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
++
++ JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
++ EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
++
++ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
++ ServletOutputStream outputStream = EasyMock.createNiceMock(ServletOutputStream.class);
++ CookieResponseWrapper responseWrapper = new CookieResponseWrapper(response, outputStream);
++
++ EasyMock.replay(principal, services, context, request);
++
++ WebSSOResource webSSOResponse = new WebSSOResource();
++ webSSOResponse.request = request;
++ webSSOResponse.response = responseWrapper;
++ webSSOResponse.context = context;
++ webSSOResponse.init();
++
++ // Issue a token
++ webSSOResponse.doGet();
++
++ // Check the cookie
++ Cookie cookie = responseWrapper.getCookie("hadoop-jwt");
++ assertNotNull(cookie);
++
++ JWTToken parsedToken = new JWTToken(cookie.getValue());
++ assertEquals("alice", parsedToken.getSubject());
++ assertTrue(authority.verifyToken(parsedToken));
++
++ // Verify the audiences
++ List<String> audiences = Arrays.asList(parsedToken.getAudienceClaims());
++ assertEquals(2, audiences.size());
++ assertTrue(audiences.contains("recipient1"));
++ assertTrue(audiences.contains("recipient2"));
++ }
++
++ @Test
++ public void testAudiencesWhitespace() throws Exception {
++
++ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
++ EasyMock.expect(context.getInitParameter("knoxsso.cookie.name")).andReturn(null);
++ EasyMock.expect(context.getInitParameter("knoxsso.cookie.secure.only")).andReturn(null);
++ EasyMock.expect(context.getInitParameter("knoxsso.cookie.max.age")).andReturn(null);
++ EasyMock.expect(context.getInitParameter("knoxsso.cookie.domain.suffix")).andReturn(null);
++ EasyMock.expect(context.getInitParameter("knoxsso.redirect.whitelist.regex")).andReturn(null);
++ EasyMock.expect(context.getInitParameter("knoxsso.token.audiences")).andReturn(" recipient1, recipient2 ");
++ EasyMock.expect(context.getInitParameter("knoxsso.token.ttl")).andReturn(null);
++ EasyMock.expect(context.getInitParameter("knoxsso.enable.session")).andReturn(null);
++
++ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
++ EasyMock.expect(request.getParameter("originalUrl")).andReturn("http://localhost:9080/service");
++ EasyMock.expect(request.getParameterMap()).andReturn(Collections.<String,String[]>emptyMap());
++ EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
++
++ Principal principal = EasyMock.createNiceMock(Principal.class);
++ EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
++ EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+ GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+ EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+ JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+ EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ ServletOutputStream outputStream = EasyMock.createNiceMock(ServletOutputStream.class);
+ CookieResponseWrapper responseWrapper = new CookieResponseWrapper(response, outputStream);
+
+ EasyMock.replay(principal, services, context, request);
+
+ WebSSOResource webSSOResponse = new WebSSOResource();
+ webSSOResponse.request = request;
+ webSSOResponse.response = responseWrapper;
+ webSSOResponse.context = context;
+ webSSOResponse.init();
+
+ // Issue a token
+ webSSOResponse.doGet();
+
+ // Check the cookie
+ Cookie cookie = responseWrapper.getCookie("hadoop-jwt");
+ assertNotNull(cookie);
+
+ JWTToken parsedToken = new JWTToken(cookie.getValue());
+ assertEquals("alice", parsedToken.getSubject());
+ assertTrue(authority.verifyToken(parsedToken));
+
+ // Verify the audiences
+ List<String> audiences = Arrays.asList(parsedToken.getAudienceClaims());
+ assertEquals(2, audiences.size());
+ assertTrue(audiences.contains("recipient1"));
+ assertTrue(audiences.contains("recipient2"));
+ }
+
+ /**
+ * A wrapper for HttpServletResponseWrapper to store the cookies
+ */
+ private static class CookieResponseWrapper extends HttpServletResponseWrapper {
+
+ private ServletOutputStream outputStream;
+ private Map<String, Cookie> cookies = new HashMap<>();
+
+ public CookieResponseWrapper(HttpServletResponse response) {
+ super(response);
+ }
+
+ public CookieResponseWrapper(HttpServletResponse response, ServletOutputStream outputStream) {
+ super(response);
+ this.outputStream = outputStream;
+ }
+
+ @Override
+ public ServletOutputStream getOutputStream() {
+ return outputStream;
+ }
+
+ @Override
+ public void addCookie(Cookie cookie) {
+ super.addCookie(cookie);
+ cookies.put(cookie.getName(), cookie);
+ }
+
+ public Cookie getCookie(String name) {
+ return cookies.get(name);
+ }
+
+ }
+
+ private static class TestJWTokenAuthority implements JWTokenAuthority {
+
+ private RSAPublicKey publicKey;
+ private RSAPrivateKey privateKey;
+
+ public TestJWTokenAuthority(RSAPublicKey publicKey, RSAPrivateKey privateKey) {
+ this.publicKey = publicKey;
+ this.privateKey = privateKey;
+ }
+
+ @Override
+ public JWT issueToken(Subject subject, String algorithm)
+ throws TokenServiceException {
+ Principal p = (Principal) subject.getPrincipals().toArray()[0];
+ return issueToken(p, algorithm);
+ }
+
+ @Override
+ public JWT issueToken(Principal p, String algorithm)
+ throws TokenServiceException {
+ return issueToken(p, null, algorithm);
+ }
+
+ @Override
+ public JWT issueToken(Principal p, String audience, String algorithm)
+ throws TokenServiceException {
+ return issueToken(p, audience, algorithm, -1);
+ }
+
+ @Override
+ public boolean verifyToken(JWT token) throws TokenServiceException {
+ JWSVerifier verifier = new RSASSAVerifier(publicKey);
+ return token.verify(verifier);
+ }
+
+ @Override
+ public JWT issueToken(Principal p, String audience, String algorithm,
+ long expires) throws TokenServiceException {
+ List<String> audiences = null;
+ if (audience != null) {
+ audiences = new ArrayList<String>();
+ audiences.add(audience);
+ }
+ return issueToken(p, audiences, algorithm, expires);
+ }
+
+ @Override
+ public JWT issueToken(Principal p, List<String> audiences, String algorithm,
+ long expires) throws TokenServiceException {
+ String[] claimArray = new String[4];
+ claimArray[0] = "KNOXSSO";
+ claimArray[1] = p.getName();
+ claimArray[2] = null;
+ if (expires == -1) {
+ claimArray[3] = null;
+ } else {
+ claimArray[3] = String.valueOf(expires);
+ }
+
+ JWTToken token = null;
+ if ("RS256".equals(algorithm)) {
+ token = new JWTToken("RS256", claimArray, audiences);
+ JWSSigner signer = new RSASSASigner(privateKey);
+ token.sign(signer);
+ } else {
+ throw new TokenServiceException("Cannot issue token - Unsupported algorithm");
+ }
+
+ return token;
+ }
+
+ @Override
+ public JWT issueToken(Principal p, String algorithm, long expiry)
+ throws TokenServiceException {
+ return issueToken(p, Collections.<String>emptyList(), algorithm, expiry);
+ }
+
+ @Override
+ public boolean verifyToken(JWT token, RSAPublicKey publicKey) throws TokenServiceException {
+ JWSVerifier verifier = new RSASSAVerifier(publicKey);
+ return token.verify(verifier);
+ }
+
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-service-knoxtoken/src/main/java/org/apache/knox/gateway/service/knoxtoken/TokenResource.java
----------------------------------------------------------------------
diff --cc gateway-service-knoxtoken/src/main/java/org/apache/knox/gateway/service/knoxtoken/TokenResource.java
index 2c77bdf,0000000..1c16ab3
mode 100644,000000..100644
--- a/gateway-service-knoxtoken/src/main/java/org/apache/knox/gateway/service/knoxtoken/TokenResource.java
+++ b/gateway-service-knoxtoken/src/main/java/org/apache/knox/gateway/service/knoxtoken/TokenResource.java
@@@ -1,183 -1,0 +1,218 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.service.knoxtoken;
+
+import java.io.IOException;
+import java.security.Principal;
++import java.security.cert.X509Certificate;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.List;
+
+import javax.annotation.PostConstruct;
+import javax.servlet.ServletContext;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.services.GatewayServices;
+import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
+import org.apache.knox.gateway.services.security.token.TokenServiceException;
+import org.apache.knox.gateway.services.security.token.impl.JWT;
+import org.apache.knox.gateway.util.JsonUtils;
+
+import static javax.ws.rs.core.MediaType.APPLICATION_JSON;
+import static javax.ws.rs.core.MediaType.APPLICATION_XML;
+
+@Path( TokenResource.RESOURCE_PATH )
+public class TokenResource {
+ private static final String EXPIRES_IN = "expires_in";
+ private static final String TOKEN_TYPE = "token_type";
+ private static final String ACCESS_TOKEN = "access_token";
+ private static final String TARGET_URL = "target_url";
+ private static final String BEARER = "Bearer ";
+ private static final String TOKEN_TTL_PARAM = "knox.token.ttl";
+ private static final String TOKEN_AUDIENCES_PARAM = "knox.token.audiences";
+ private static final String TOKEN_TARGET_URL = "knox.token.target.url";
+ private static final String TOKEN_CLIENT_DATA = "knox.token.client.data";
++ private static final String TOKEN_CLIENT_CERT_REQUIRED = "knox.token.client.cert.required";
++ private static final String TOKEN_ALLOWED_PRINCIPALS = "knox.token.allowed.principals";
+ static final String RESOURCE_PATH = "knoxtoken/api/v1/token";
+ private static TokenServiceMessages log = MessagesFactory.get( TokenServiceMessages.class );
+ private long tokenTTL = 30000l;
+ private List<String> targetAudiences = new ArrayList<>();
+ private String tokenTargetUrl = null;
+ private Map<String,Object> tokenClientDataMap = null;
++ private ArrayList<String> allowedDNs = new ArrayList<>();
++ private boolean clientCertRequired = false;
+
+ @Context
+ HttpServletRequest request;
+
+ @Context
+ HttpServletResponse response;
+
+ @Context
+ ServletContext context;
+
+ @PostConstruct
+ public void init() {
+
+ String audiences = context.getInitParameter(TOKEN_AUDIENCES_PARAM);
+ if (audiences != null) {
+ String[] auds = audiences.split(",");
+ for (int i = 0; i < auds.length; i++) {
- targetAudiences.add(auds[i]);
++ targetAudiences.add(auds[i].trim());
++ }
++ }
++
++ String clientCert = context.getInitParameter(TOKEN_CLIENT_CERT_REQUIRED);
++ clientCertRequired = "true".equals(clientCert);
++
++ String principals = context.getInitParameter(TOKEN_ALLOWED_PRINCIPALS);
++ if (principals != null) {
++ String[] dns = principals.split(";");
++ for (int i = 0; i < dns.length; i++) {
++ allowedDNs.add(dns[i]);
+ }
+ }
+
+ String ttl = context.getInitParameter(TOKEN_TTL_PARAM);
+ if (ttl != null) {
+ try {
+ tokenTTL = Long.parseLong(ttl);
+ }
+ catch (NumberFormatException nfe) {
+ log.invalidTokenTTLEncountered(ttl);
+ }
+ }
+
+ tokenTargetUrl = context.getInitParameter(TOKEN_TARGET_URL);
+
+ String clientData = context.getInitParameter(TOKEN_CLIENT_DATA);
+ if (clientData != null) {
+ tokenClientDataMap = new HashMap<>();
+ String[] tokenClientData = clientData.split(",");
+ addClientDataToMap(tokenClientData, tokenClientDataMap);
+ }
+ }
+
+ @GET
+ @Produces({APPLICATION_JSON, APPLICATION_XML})
+ public Response doGet() {
+ return getAuthenticationToken();
+ }
+
+ @POST
+ @Produces({APPLICATION_JSON, APPLICATION_XML})
+ public Response doPost() {
+ return getAuthenticationToken();
+ }
+
++ private X509Certificate extractCertificate(HttpServletRequest req) {
++ X509Certificate[] certs = (X509Certificate[]) req.getAttribute("javax.servlet.request.X509Certificate");
++ if (null != certs && certs.length > 0) {
++ return certs[0];
++ }
++ return null;
++ }
++
+ private Response getAuthenticationToken() {
++ if (clientCertRequired) {
++ X509Certificate cert = extractCertificate(request);
++ if (cert != null) {
++ if (!allowedDNs.contains(cert.getSubjectDN().getName())) {
++ return Response.status(403).entity("{ \"Unable to get token - untrusted client cert.\" }").build();
++ }
++ }
++ else {
++ return Response.status(403).entity("{ \"Unable to get token - client cert required.\" }").build();
++ }
++ }
+ GatewayServices services = (GatewayServices) request.getServletContext()
+ .getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+
+ JWTokenAuthority ts = services.getService(GatewayServices.TOKEN_SERVICE);
+ Principal p = ((HttpServletRequest)request).getUserPrincipal();
+ long expires = getExpiry();
+
+ try {
+ JWT token = null;
+ if (targetAudiences.isEmpty()) {
+ token = ts.issueToken(p, "RS256", expires);
+ } else {
+ token = ts.issueToken(p, targetAudiences, "RS256", expires);
+ }
+
+ if (token != null) {
+ String accessToken = token.toString();
+
+ HashMap<String, Object> map = new HashMap<>();
+ map.put(ACCESS_TOKEN, accessToken);
+ map.put(TOKEN_TYPE, BEARER);
+ map.put(EXPIRES_IN, expires);
+ if (tokenTargetUrl != null) {
+ map.put(TARGET_URL, tokenTargetUrl);
+ }
+ if (tokenClientDataMap != null) {
+ map.putAll(tokenClientDataMap);
+ }
+
+ String jsonResponse = JsonUtils.renderAsJsonString(map);
+
+ response.getWriter().write(jsonResponse);
+ return Response.ok().build();
+ }
+ else {
+ return Response.serverError().build();
+ }
+ }
+ catch (TokenServiceException | IOException e) {
+ log.unableToIssueToken(e);
+ }
+ return Response.ok().entity("{ \"Unable to acquire token.\" }").build();
+ }
+
+ void addClientDataToMap(String[] tokenClientData,
+ Map<String,Object> map) {
+ String[] kv = null;
+ for (int i = 0; i < tokenClientData.length; i++) {
+ kv = tokenClientData[i].split("=");
+ if (kv.length == 2) {
+ map.put(kv[0], kv[1]);
+ }
+ }
+ }
+
+ private long getExpiry() {
+ long expiry = 0l;
+ if (tokenTTL == -1) {
+ expiry = -1;
+ }
+ else {
+ expiry = System.currentTimeMillis() + tokenTTL;
+ }
+ return expiry;
+ }
+}
[06/23] knox git commit: KNOX-1069 - KnoxSSO token audience config
should trim values
Posted by mo...@apache.org.
KNOX-1069 - KnoxSSO token audience config should trim values
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/5de920bd
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/5de920bd
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/5de920bd
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 5de920bd092d2822a32aa546d01bb8e64de3a5a9
Parents: 90f1df7
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Wed Oct 4 11:00:40 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Wed Oct 4 11:00:40 2017 +0100
----------------------------------------------------------------------
.../jwt/filter/AbstractJWTFilter.java | 2 +-
.../federation/AbstractJWTFilterTest.java | 31 +++++++++++
.../gateway/service/knoxsso/WebSSOResource.java | 2 +-
.../service/knoxsso/WebSSOResourceTest.java | 58 ++++++++++++++++++++
.../service/knoxtoken/TokenResource.java | 2 +-
.../knoxtoken/TokenServiceResourceTest.java | 58 ++++++++++++++++++++
6 files changed, 150 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/5de920bd/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
index d4c6717..7f8e733 100644
--- a/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
+++ b/gateway-provider-security-jwt/src/main/java/org/apache/hadoop/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
@@ -118,7 +118,7 @@ public abstract class AbstractJWTFilter implements Filter {
String[] audArray = expectedAudiences.split(",");
audList = new ArrayList<String>();
for (String a : audArray) {
- audList.add(a);
+ audList.add(a.trim());
}
}
return audList;
http://git-wip-us.apache.org/repos/asf/knox/blob/5de920bd/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
index bdde3e6..bd34c04 100644
--- a/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
+++ b/gateway-provider-security-jwt/src/test/java/org/apache/hadoop/gateway/provider/federation/AbstractJWTFilterTest.java
@@ -203,6 +203,37 @@ public abstract class AbstractJWTFilterTest {
}
@Test
+ public void testValidAudienceJWTWhitespace() throws Exception {
+ try {
+ Properties props = getProperties();
+ props.put(getAudienceProperty(), " foo, bar ");
+ handler.init(new TestFilterConfig(props));
+
+ SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 5000), privateKey, props);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ setTokenOnRequest(request, jwt);
+
+ EasyMock.expect(request.getRequestURL()).andReturn(
+ new StringBuffer(SERVICE_URL)).anyTimes();
+ EasyMock.expect(request.getQueryString()).andReturn(null);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+ SERVICE_URL);
+ EasyMock.replay(request);
+
+ TestFilterChain chain = new TestFilterChain();
+ handler.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled );
+ Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
+ Assert.assertTrue("No PrimaryPrincipal", !principals.isEmpty());
+ Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ @Test
public void testValidVerificationPEM() throws Exception {
try {
Properties props = getProperties();
http://git-wip-us.apache.org/repos/asf/knox/blob/5de920bd/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java b/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
index 0d9e6dd..70228d3 100644
--- a/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
+++ b/gateway-service-knoxsso/src/main/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResource.java
@@ -127,7 +127,7 @@ public class WebSSOResource {
if (audiences != null) {
String[] auds = audiences.split(",");
for (int i = 0; i < auds.length; i++) {
- targetAudiences.add(auds[i]);
+ targetAudiences.add(auds[i].trim());
}
}
http://git-wip-us.apache.org/repos/asf/knox/blob/5de920bd/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java b/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
index 4e9e76b..568f0fe 100644
--- a/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
+++ b/gateway-service-knoxsso/src/test/java/org/apache/hadoop/gateway/service/knoxsso/WebSSOResourceTest.java
@@ -229,6 +229,64 @@ public class WebSSOResourceTest {
assertTrue(audiences.contains("recipient2"));
}
+ @Test
+ public void testAudiencesWhitespace() throws Exception {
+
+ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+ EasyMock.expect(context.getInitParameter("knoxsso.cookie.name")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.cookie.secure.only")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.cookie.max.age")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.cookie.domain.suffix")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.redirect.whitelist.regex")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.token.audiences")).andReturn(" recipient1, recipient2 ");
+ EasyMock.expect(context.getInitParameter("knoxsso.token.ttl")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knoxsso.enable.session")).andReturn(null);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ EasyMock.expect(request.getParameter("originalUrl")).andReturn("http://localhost:9080/service");
+ EasyMock.expect(request.getParameterMap()).andReturn(Collections.<String,String[]>emptyMap());
+ EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+
+ Principal principal = EasyMock.createNiceMock(Principal.class);
+ EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+ EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+ GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+ EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+ JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+ EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ ServletOutputStream outputStream = EasyMock.createNiceMock(ServletOutputStream.class);
+ CookieResponseWrapper responseWrapper = new CookieResponseWrapper(response, outputStream);
+
+ EasyMock.replay(principal, services, context, request);
+
+ WebSSOResource webSSOResponse = new WebSSOResource();
+ webSSOResponse.request = request;
+ webSSOResponse.response = responseWrapper;
+ webSSOResponse.context = context;
+ webSSOResponse.init();
+
+ // Issue a token
+ webSSOResponse.doGet();
+
+ // Check the cookie
+ Cookie cookie = responseWrapper.getCookie("hadoop-jwt");
+ assertNotNull(cookie);
+
+ JWTToken parsedToken = new JWTToken(cookie.getValue());
+ assertEquals("alice", parsedToken.getSubject());
+ assertTrue(authority.verifyToken(parsedToken));
+
+ // Verify the audiences
+ List<String> audiences = Arrays.asList(parsedToken.getAudienceClaims());
+ assertEquals(2, audiences.size());
+ assertTrue(audiences.contains("recipient1"));
+ assertTrue(audiences.contains("recipient2"));
+ }
+
/**
* A wrapper for HttpServletResponseWrapper to store the cookies
*/
http://git-wip-us.apache.org/repos/asf/knox/blob/5de920bd/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java b/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
index 9d8bae3..8dddf02 100644
--- a/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
+++ b/gateway-service-knoxtoken/src/main/java/org/apache/hadoop/gateway/service/knoxtoken/TokenResource.java
@@ -82,7 +82,7 @@ public class TokenResource {
if (audiences != null) {
String[] auds = audiences.split(",");
for (int i = 0; i < auds.length; i++) {
- targetAudiences.add(auds[i]);
+ targetAudiences.add(auds[i].trim());
}
}
http://git-wip-us.apache.org/repos/asf/knox/blob/5de920bd/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
----------------------------------------------------------------------
diff --git a/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java b/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
index b4e51e6..0046bd9 100644
--- a/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
+++ b/gateway-service-knoxtoken/src/test/java/org/apache/hadoop/gateway/service/knoxtoken/TokenServiceResourceTest.java
@@ -206,6 +206,64 @@ public class TokenServiceResourceTest {
}
@Test
+ public void testAudiencesWhitespace() throws Exception {
+
+ ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+ EasyMock.expect(context.getInitParameter("knox.token.audiences")).andReturn(" recipient1, recipient2 ");
+ EasyMock.expect(context.getInitParameter("knox.token.ttl")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knox.token.target.url")).andReturn(null);
+ EasyMock.expect(context.getInitParameter("knox.token.client.data")).andReturn(null);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ EasyMock.expect(request.getServletContext()).andReturn(context).anyTimes();
+ Principal principal = EasyMock.createNiceMock(Principal.class);
+ EasyMock.expect(principal.getName()).andReturn("alice").anyTimes();
+ EasyMock.expect(request.getUserPrincipal()).andReturn(principal).anyTimes();
+
+ GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+ EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE)).andReturn(services);
+
+ JWTokenAuthority authority = new TestJWTokenAuthority(publicKey, privateKey);
+ EasyMock.expect(services.getService(GatewayServices.TOKEN_SERVICE)).andReturn(authority);
+
+ StringWriter writer = new StringWriter();
+ PrintWriter printWriter = new PrintWriter(writer);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.getWriter()).andReturn(printWriter);
+
+ EasyMock.replay(principal, services, context, request, response);
+
+ TokenResource tr = new TokenResource();
+ tr.request = request;
+ tr.response = response;
+ tr.context = context;
+ tr.init();
+
+ // Issue a token
+ Response retResponse = tr.doGet();
+
+ assertEquals(200, retResponse.getStatus());
+
+ // Parse the response
+ String retString = writer.toString();
+ String accessToken = getTagValue(retString, "access_token");
+ assertNotNull(accessToken);
+ String expiry = getTagValue(retString, "expires_in");
+ assertNotNull(expiry);
+
+ // Verify the token
+ JWTToken parsedToken = new JWTToken(accessToken);
+ assertEquals("alice", parsedToken.getSubject());
+ assertTrue(authority.verifyToken(parsedToken));
+
+ // Verify the audiences
+ List<String> audiences = Arrays.asList(parsedToken.getAudienceClaims());
+ assertEquals(2, audiences.size());
+ assertTrue(audiences.contains("recipient1"));
+ assertTrue(audiences.contains("recipient2"));
+ }
+
+ @Test
public void testValidClientCert() throws Exception {
ServletContext context = EasyMock.createNiceMock(ServletContext.class);
[07/23] knox git commit: KNOX-1048 - Knoxshell samples missing
hadoop-examples.jar
Posted by mo...@apache.org.
KNOX-1048 - Knoxshell samples missing hadoop-examples.jar
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/ff3af36d
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/ff3af36d
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/ff3af36d
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: ff3af36d63609cd0b60400071e9df26ec41e96d3
Parents: 5de920b
Author: Colm O hEigeartaigh <co...@apache.org>
Authored: Wed Oct 4 15:47:01 2017 +0100
Committer: Colm O hEigeartaigh <co...@apache.org>
Committed: Wed Oct 4 15:47:01 2017 +0100
----------------------------------------------------------------------
gateway-shell-release/pom.xml | 4 ++++
1 file changed, 4 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/ff3af36d/gateway-shell-release/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-shell-release/pom.xml b/gateway-shell-release/pom.xml
index 3365641..1e6c5f8 100644
--- a/gateway-shell-release/pom.xml
+++ b/gateway-shell-release/pom.xml
@@ -170,6 +170,10 @@
<artifactId>gateway-shell-samples</artifactId>
</dependency>
<dependency>
+ <groupId>${gateway-group}</groupId>
+ <artifactId>hadoop-examples</artifactId>
+ </dependency>
+ <dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
[23/23] knox git commit: KNOX-998 - package name refactoring
Posted by mo...@apache.org.
KNOX-998 - package name refactoring
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/7d0bff16
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/7d0bff16
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/7d0bff16
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 7d0bff16e7128e5f2e10b54237cbc93f45932ffc
Parents: 78d35f1
Author: Sandeep More <mo...@apache.org>
Authored: Mon Oct 16 13:05:28 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Mon Oct 16 13:05:28 2017 -0400
----------------------------------------------------------------------
.../ambari/AmbariDynamicServiceURLCreator.java | 151 ----
.../ambari/ConditionalValueHandler.java | 24 -
.../discovery/ambari/PropertyEqualsHandler.java | 76 --
.../ambari/ServiceURLPropertyConfig.java | 324 -------
.../discovery/ambari/SimpleValueHandler.java | 32 -
.../ambari/AmbariDynamicServiceURLCreator.java | 151 ++++
.../ambari/AmbariServiceURLCreator.java | 0
.../ambari/ConditionalValueHandler.java | 24 +
.../discovery/ambari/PropertyEqualsHandler.java | 76 ++
.../ambari/ServiceURLPropertyConfig.java | 324 +++++++
.../discovery/ambari/SimpleValueHandler.java | 32 +
.../AmbariDynamicServiceURLCreatorTest.java | 876 -------------------
.../AmbariDynamicServiceURLCreatorTest.java | 876 +++++++++++++++++++
.../gateway/websockets/ProxyInboundClient.java | 107 ---
.../gateway/websockets/ProxyInboundClient.java | 107 +++
.../websockets/ProxyInboundClientTest.java | 374 --------
.../websockets/ProxyInboundClientTest.java | 374 ++++++++
17 files changed, 1964 insertions(+), 1964 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
deleted file mode 100644
index ed5d3e7..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-
-class AmbariDynamicServiceURLCreator {
-
- static final String MAPPING_CONFIG_OVERRIDE_PROPERTY = "org.apache.gateway.topology.discovery.ambari.config";
-
- private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
-
- private AmbariCluster cluster = null;
- private ServiceURLPropertyConfig config;
-
- AmbariDynamicServiceURLCreator(AmbariCluster cluster) {
- this.cluster = cluster;
-
- String mappingConfiguration = System.getProperty(MAPPING_CONFIG_OVERRIDE_PROPERTY);
- if (mappingConfiguration != null) {
- File mappingConfigFile = new File(mappingConfiguration);
- if (mappingConfigFile.exists()) {
- try {
- config = new ServiceURLPropertyConfig(mappingConfigFile);
- log.loadedComponentConfigMappings(mappingConfigFile.getAbsolutePath());
- } catch (Exception e) {
- log.failedToLoadComponentConfigMappings(mappingConfigFile.getAbsolutePath(), e);
- }
- }
- }
-
- // If there is no valid override configured, fall-back to the internal mapping configuration
- if (config == null) {
- config = new ServiceURLPropertyConfig();
- }
- }
-
- AmbariDynamicServiceURLCreator(AmbariCluster cluster, File mappingConfiguration) throws IOException {
- this.cluster = cluster;
- config = new ServiceURLPropertyConfig(new FileInputStream(mappingConfiguration));
- }
-
- AmbariDynamicServiceURLCreator(AmbariCluster cluster, String mappings) {
- this.cluster = cluster;
- config = new ServiceURLPropertyConfig(new ByteArrayInputStream(mappings.getBytes()));
- }
-
- List<String> create(String serviceName) {
- List<String> urls = new ArrayList<>();
-
- Map<String, String> placeholderValues = new HashMap<>();
- List<String> componentHostnames = new ArrayList<>();
- String hostNamePlaceholder = null;
-
- ServiceURLPropertyConfig.URLPattern pattern = config.getURLPattern(serviceName);
- if (pattern != null) {
- for (String propertyName : pattern.getPlaceholders()) {
- ServiceURLPropertyConfig.Property configProperty = config.getConfigProperty(serviceName, propertyName);
-
- String propertyValue = null;
- String propertyType = configProperty.getType();
- if (ServiceURLPropertyConfig.Property.TYPE_SERVICE.equals(propertyType)) {
- log.lookingUpServiceConfigProperty(configProperty.getService(), configProperty.getServiceConfig(), configProperty.getValue());
- AmbariCluster.ServiceConfiguration svcConfig =
- cluster.getServiceConfiguration(configProperty.getService(), configProperty.getServiceConfig());
- if (svcConfig != null) {
- propertyValue = svcConfig.getProperties().get(configProperty.getValue());
- }
- } else if (ServiceURLPropertyConfig.Property.TYPE_COMPONENT.equals(propertyType)) {
- String compName = configProperty.getComponent();
- if (compName != null) {
- AmbariComponent component = cluster.getComponent(compName);
- if (component != null) {
- if (ServiceURLPropertyConfig.Property.PROP_COMP_HOSTNAME.equals(configProperty.getValue())) {
- log.lookingUpComponentHosts(compName);
- componentHostnames.addAll(component.getHostNames());
- hostNamePlaceholder = propertyName; // Remember the host name placeholder
- } else {
- log.lookingUpComponentConfigProperty(compName, configProperty.getValue());
- propertyValue = component.getConfigProperty(configProperty.getValue());
- }
- }
- }
- } else { // Derived property
- log.handlingDerivedProperty(serviceName, configProperty.getType(), configProperty.getName());
- ServiceURLPropertyConfig.Property p = config.getConfigProperty(serviceName, configProperty.getName());
- propertyValue = p.getValue();
- if (propertyValue == null) {
- if (p.getConditionHandler() != null) {
- propertyValue = p.getConditionHandler().evaluate(config, cluster);
- }
- }
- }
-
- log.determinedPropertyValue(configProperty.getName(), propertyValue);
- placeholderValues.put(configProperty.getName(), propertyValue);
- }
-
- // For patterns with a placeholder value for the hostname (e.g., multiple URL scenarios)
- if (!componentHostnames.isEmpty()) {
- for (String componentHostname : componentHostnames) {
- String url = pattern.get().replace("{" + hostNamePlaceholder + "}", componentHostname);
- urls.add(createURL(url, placeholderValues));
- }
- } else { // Single URL result case
- urls.add(createURL(pattern.get(), placeholderValues));
- }
- }
-
- return urls;
- }
-
- private String createURL(String pattern, Map<String, String> placeholderValues) {
- String url = null;
- if (pattern != null) {
- url = pattern;
- for (String placeHolder : placeholderValues.keySet()) {
- String value = placeholderValues.get(placeHolder);
- if (value != null) {
- url = url.replace("{" + placeHolder + "}", value);
- }
- }
- }
- return url;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ConditionalValueHandler.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ConditionalValueHandler.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ConditionalValueHandler.java
deleted file mode 100644
index d76a161..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ConditionalValueHandler.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-
-interface ConditionalValueHandler {
-
- String evaluate(ServiceURLPropertyConfig config, AmbariCluster cluster);
-
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/PropertyEqualsHandler.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/PropertyEqualsHandler.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/PropertyEqualsHandler.java
deleted file mode 100644
index 642a676..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/PropertyEqualsHandler.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-
-class PropertyEqualsHandler implements ConditionalValueHandler {
-
- private String serviceName = null;
- private String propertyName = null;
- private String propertyValue = null;
- private ConditionalValueHandler affirmativeResult = null;
- private ConditionalValueHandler negativeResult = null;
-
- PropertyEqualsHandler(String serviceName,
- String propertyName,
- String propertyValue,
- ConditionalValueHandler affirmativeResult,
- ConditionalValueHandler negativeResult) {
- this.serviceName = serviceName;
- this.propertyName = propertyName;
- this.propertyValue = propertyValue;
- this.affirmativeResult = affirmativeResult;
- this.negativeResult = negativeResult;
- }
-
- @Override
- public String evaluate(ServiceURLPropertyConfig config, AmbariCluster cluster) {
- String result = null;
-
- ServiceURLPropertyConfig.Property p = config.getConfigProperty(serviceName, propertyName);
- if (p != null) {
- String value = getActualPropertyValue(cluster, p);
- if (propertyValue.equals(value)) {
- result = affirmativeResult.evaluate(config, cluster);
- } else if (negativeResult != null) {
- result = negativeResult.evaluate(config, cluster);
- }
-
- // Check if the result is a reference to a local derived property
- ServiceURLPropertyConfig.Property derived = config.getConfigProperty(serviceName, result);
- if (derived != null) {
- result = getActualPropertyValue(cluster, derived);
- }
- }
-
- return result;
- }
-
- private String getActualPropertyValue(AmbariCluster cluster, ServiceURLPropertyConfig.Property property) {
- String value = null;
- String propertyType = property.getType();
- if (ServiceURLPropertyConfig.Property.TYPE_COMPONENT.equals(propertyType)) {
- AmbariComponent component = cluster.getComponent(property.getComponent());
- if (component != null) {
- value = component.getConfigProperty(property.getValue());
- }
- } else if (ServiceURLPropertyConfig.Property.TYPE_SERVICE.equals(propertyType)) {
- value = cluster.getServiceConfiguration(property.getService(), property.getServiceConfig()).getProperties().get(property.getValue());
- }
- return value;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
deleted file mode 100644
index 3330cc3..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
+++ /dev/null
@@ -1,324 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
-import org.apache.hadoop.gateway.util.XmlUtils;
-import org.w3c.dom.Document;
-import org.w3c.dom.NamedNodeMap;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-
-import javax.xml.xpath.XPath;
-import javax.xml.xpath.XPathConstants;
-import javax.xml.xpath.XPathExpression;
-import javax.xml.xpath.XPathExpressionException;
-import javax.xml.xpath.XPathFactory;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-/**
- * Service URL pattern mapping configuration model.
- */
-class ServiceURLPropertyConfig {
-
- private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
-
- private static final String ATTR_NAME = "name";
-
- private static XPathExpression SERVICE_URL_PATTERN_MAPPINGS;
- private static XPathExpression URL_PATTERN;
- private static XPathExpression PROPERTIES;
- static {
- XPath xpath = XPathFactory.newInstance().newXPath();
- try {
- SERVICE_URL_PATTERN_MAPPINGS = xpath.compile("/service-discovery-url-mappings/service");
- URL_PATTERN = xpath.compile("url-pattern/text()");
- PROPERTIES = xpath.compile("properties/property");
- } catch (XPathExpressionException e) {
- e.printStackTrace();
- }
- }
-
- private static final String DEFAULT_SERVICE_URL_MAPPINGS = "ambari-service-discovery-url-mappings.xml";
-
- private Map<String, URLPattern> urlPatterns = new HashMap<>();
-
- private Map<String, Map<String, Property>> properties = new HashMap<>();
-
-
- /**
- * The default service URL pattern to property mapping configuration will be used.
- */
- ServiceURLPropertyConfig() {
- this(ServiceURLPropertyConfig.class.getClassLoader().getResourceAsStream(DEFAULT_SERVICE_URL_MAPPINGS));
- }
-
- /**
- * The default service URL pattern to property mapping configuration will be used.
- */
- ServiceURLPropertyConfig(File mappingConfigurationFile) throws Exception {
- this(new FileInputStream(mappingConfigurationFile));
- }
-
- /**
- *
- * @param source An InputStream for the XML content
- */
- ServiceURLPropertyConfig(InputStream source) {
- // Parse the XML, and build the model
- try {
- Document doc = XmlUtils.readXml(source);
-
- NodeList serviceNodes =
- (NodeList) SERVICE_URL_PATTERN_MAPPINGS.evaluate(doc, XPathConstants.NODESET);
- for (int i=0; i < serviceNodes.getLength(); i++) {
- Node serviceNode = serviceNodes.item(i);
- String serviceName = serviceNode.getAttributes().getNamedItem(ATTR_NAME).getNodeValue();
- properties.put(serviceName, new HashMap<String, Property>());
-
- Node urlPatternNode = (Node) URL_PATTERN.evaluate(serviceNode, XPathConstants.NODE);
- if (urlPatternNode != null) {
- urlPatterns.put(serviceName, new URLPattern(urlPatternNode.getNodeValue()));
- }
-
- NodeList propertiesNode = (NodeList) PROPERTIES.evaluate(serviceNode, XPathConstants.NODESET);
- if (propertiesNode != null) {
- processProperties(serviceName, propertiesNode);
- }
- }
- } catch (Exception e) {
- log.failedToLoadServiceDiscoveryConfiguration(e);
- } finally {
- try {
- source.close();
- } catch (IOException e) {
- // Ignore
- }
- }
- }
-
- private void processProperties(String serviceName, NodeList propertyNodes) {
- for (int i = 0; i < propertyNodes.getLength(); i++) {
- Property p = Property.createProperty(serviceName, propertyNodes.item(i));
- properties.get(serviceName).put(p.getName(), p);
- }
- }
-
- URLPattern getURLPattern(String service) {
- return urlPatterns.get(service);
- }
-
- Property getConfigProperty(String service, String property) {
- return properties.get(service).get(property);
- }
-
- static class URLPattern {
- String pattern;
- List<String> placeholders = new ArrayList<>();
-
- URLPattern(String pattern) {
- this.pattern = pattern;
-
- final Pattern regex = Pattern.compile("\\{(.*?)}", Pattern.DOTALL);
- final Matcher matcher = regex.matcher(pattern);
- while( matcher.find() ){
- placeholders.add(matcher.group(1));
- }
- }
-
- String get() {return pattern; }
- List<String> getPlaceholders() {
- return placeholders;
- }
- }
-
- static class Property {
- static final String TYPE_SERVICE = "SERVICE";
- static final String TYPE_COMPONENT = "COMPONENT";
- static final String TYPE_DERIVED = "DERIVED";
-
- static final String PROP_COMP_HOSTNAME = "component.host.name";
-
- static final String ATTR_NAME = "name";
- static final String ATTR_PROPERTY = "property";
- static final String ATTR_VALUE = "value";
-
- static XPathExpression HOSTNAME;
- static XPathExpression SERVICE_CONFIG;
- static XPathExpression COMPONENT;
- static XPathExpression CONFIG_PROPERTY;
- static XPathExpression IF;
- static XPathExpression THEN;
- static XPathExpression ELSE;
- static XPathExpression TEXT;
- static {
- XPath xpath = XPathFactory.newInstance().newXPath();
- try {
- HOSTNAME = xpath.compile("hostname");
- SERVICE_CONFIG = xpath.compile("service-config");
- COMPONENT = xpath.compile("component");
- CONFIG_PROPERTY = xpath.compile("config-property");
- IF = xpath.compile("if");
- THEN = xpath.compile("then");
- ELSE = xpath.compile("else");
- TEXT = xpath.compile("text()");
- } catch (XPathExpressionException e) {
- e.printStackTrace();
- }
- }
-
-
- String type;
- String name;
- String component;
- String service;
- String serviceConfig;
- String value;
- ConditionalValueHandler conditionHandler = null;
-
- private Property(String type,
- String propertyName,
- String component,
- String service,
- String configType,
- String value,
- ConditionalValueHandler pch) {
- this.type = type;
- this.name = propertyName;
- this.service = service;
- this.component = component;
- this.serviceConfig = configType;
- this.value = value;
- conditionHandler = pch;
- }
-
- static Property createProperty(String serviceName, Node propertyNode) {
- String propertyName = propertyNode.getAttributes().getNamedItem(ATTR_NAME).getNodeValue();
- String propertyType = null;
- String serviceType = null;
- String configType = null;
- String componentType = null;
- String value = null;
- ConditionalValueHandler pch = null;
-
- try {
- Node hostNameNode = (Node) HOSTNAME.evaluate(propertyNode, XPathConstants.NODE);
- if (hostNameNode != null) {
- value = PROP_COMP_HOSTNAME;
- }
-
- // Check for a service-config node
- Node scNode = (Node) SERVICE_CONFIG.evaluate(propertyNode, XPathConstants.NODE);
- if (scNode != null) {
- // Service config property
- propertyType = Property.TYPE_SERVICE;
- serviceType = scNode.getAttributes().getNamedItem(ATTR_NAME).getNodeValue();
- Node scTextNode = (Node) TEXT.evaluate(scNode, XPathConstants.NODE);
- configType = scTextNode.getNodeValue();
- } else { // If not service-config node, check for a component config node
- Node cNode = (Node) COMPONENT.evaluate(propertyNode, XPathConstants.NODE);
- if (cNode != null) {
- // Component config property
- propertyType = Property.TYPE_COMPONENT;
- componentType = cNode.getFirstChild().getNodeValue();
- Node cTextNode = (Node) TEXT.evaluate(cNode, XPathConstants.NODE);
- configType = cTextNode.getNodeValue();
- componentType = cTextNode.getNodeValue();
- }
- }
-
- // Check for a config property node
- Node cpNode = (Node) CONFIG_PROPERTY.evaluate(propertyNode, XPathConstants.NODE);
- if (cpNode != null) {
- // Check for a condition element
- Node ifNode = (Node) IF.evaluate(cpNode, XPathConstants.NODE);
- if (ifNode != null) {
- propertyType = TYPE_DERIVED;
- pch = getConditionHandler(serviceName, ifNode);
- } else {
- Node cpTextNode = (Node) TEXT.evaluate(cpNode, XPathConstants.NODE);
- value = cpTextNode.getNodeValue();
- }
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- // Create and return the property representation
- return new Property(propertyType, propertyName, componentType, serviceType, configType, value, pch);
- }
-
- private static ConditionalValueHandler getConditionHandler(String serviceName, Node ifNode) throws Exception {
- ConditionalValueHandler result = null;
-
- if (ifNode != null) {
- NamedNodeMap attrs = ifNode.getAttributes();
- String comparisonPropName = attrs.getNamedItem(ATTR_PROPERTY).getNodeValue();
- String comparisonValue = attrs.getNamedItem(ATTR_VALUE).getNodeValue();
-
- ConditionalValueHandler affirmativeResult = null;
- Node thenNode = (Node) THEN.evaluate(ifNode, XPathConstants.NODE);
- if (thenNode != null) {
- Node subIfNode = (Node) IF.evaluate(thenNode, XPathConstants.NODE);
- if (subIfNode != null) {
- affirmativeResult = getConditionHandler(serviceName, subIfNode);
- } else {
- affirmativeResult = new SimpleValueHandler(thenNode.getFirstChild().getNodeValue());
- }
- }
-
- ConditionalValueHandler negativeResult = null;
- Node elseNode = (Node) ELSE.evaluate(ifNode, XPathConstants.NODE);
- if (elseNode != null) {
- Node subIfNode = (Node) IF.evaluate(elseNode, XPathConstants.NODE);
- if (subIfNode != null) {
- negativeResult = getConditionHandler(serviceName, subIfNode);
- } else {
- negativeResult = new SimpleValueHandler(elseNode.getFirstChild().getNodeValue());
- }
- }
-
- result = new PropertyEqualsHandler(serviceName,
- comparisonPropName,
- comparisonValue,
- affirmativeResult,
- negativeResult);
- }
-
- return result;
- }
-
- String getType() { return type; }
- String getName() { return name; }
- String getComponent() { return component; }
- String getService() { return service; }
- String getServiceConfig() { return serviceConfig; }
- String getValue() {
- return value;
- }
- ConditionalValueHandler getConditionHandler() { return conditionHandler; }
- }
-}
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/SimpleValueHandler.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/SimpleValueHandler.java b/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/SimpleValueHandler.java
deleted file mode 100644
index 8e0cd75..0000000
--- a/gateway-discovery-ambari/src/main/java/org/apache/hadoop/gateway/topology/discovery/ambari/SimpleValueHandler.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with this
- * work for additional information regarding copyright ownership. The ASF
- * licenses this file to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.hadoop.gateway.topology.discovery.ambari;
-
-
-class SimpleValueHandler implements ConditionalValueHandler {
- private String value;
-
- SimpleValueHandler(String value) {
- this.value = value;
- }
-
- @Override
- public String evaluate(ServiceURLPropertyConfig config, AmbariCluster cluster) {
- return value;
- }
-}
-
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
new file mode 100644
index 0000000..3c2269d
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreator.java
@@ -0,0 +1,151 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+
+class AmbariDynamicServiceURLCreator {
+
+ static final String MAPPING_CONFIG_OVERRIDE_PROPERTY = "org.apache.gateway.topology.discovery.ambari.config";
+
+ private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+ private AmbariCluster cluster = null;
+ private ServiceURLPropertyConfig config;
+
+ AmbariDynamicServiceURLCreator(AmbariCluster cluster) {
+ this.cluster = cluster;
+
+ String mappingConfiguration = System.getProperty(MAPPING_CONFIG_OVERRIDE_PROPERTY);
+ if (mappingConfiguration != null) {
+ File mappingConfigFile = new File(mappingConfiguration);
+ if (mappingConfigFile.exists()) {
+ try {
+ config = new ServiceURLPropertyConfig(mappingConfigFile);
+ log.loadedComponentConfigMappings(mappingConfigFile.getAbsolutePath());
+ } catch (Exception e) {
+ log.failedToLoadComponentConfigMappings(mappingConfigFile.getAbsolutePath(), e);
+ }
+ }
+ }
+
+ // If there is no valid override configured, fall-back to the internal mapping configuration
+ if (config == null) {
+ config = new ServiceURLPropertyConfig();
+ }
+ }
+
+ AmbariDynamicServiceURLCreator(AmbariCluster cluster, File mappingConfiguration) throws IOException {
+ this.cluster = cluster;
+ config = new ServiceURLPropertyConfig(new FileInputStream(mappingConfiguration));
+ }
+
+ AmbariDynamicServiceURLCreator(AmbariCluster cluster, String mappings) {
+ this.cluster = cluster;
+ config = new ServiceURLPropertyConfig(new ByteArrayInputStream(mappings.getBytes()));
+ }
+
+ List<String> create(String serviceName) {
+ List<String> urls = new ArrayList<>();
+
+ Map<String, String> placeholderValues = new HashMap<>();
+ List<String> componentHostnames = new ArrayList<>();
+ String hostNamePlaceholder = null;
+
+ ServiceURLPropertyConfig.URLPattern pattern = config.getURLPattern(serviceName);
+ if (pattern != null) {
+ for (String propertyName : pattern.getPlaceholders()) {
+ ServiceURLPropertyConfig.Property configProperty = config.getConfigProperty(serviceName, propertyName);
+
+ String propertyValue = null;
+ String propertyType = configProperty.getType();
+ if (ServiceURLPropertyConfig.Property.TYPE_SERVICE.equals(propertyType)) {
+ log.lookingUpServiceConfigProperty(configProperty.getService(), configProperty.getServiceConfig(), configProperty.getValue());
+ AmbariCluster.ServiceConfiguration svcConfig =
+ cluster.getServiceConfiguration(configProperty.getService(), configProperty.getServiceConfig());
+ if (svcConfig != null) {
+ propertyValue = svcConfig.getProperties().get(configProperty.getValue());
+ }
+ } else if (ServiceURLPropertyConfig.Property.TYPE_COMPONENT.equals(propertyType)) {
+ String compName = configProperty.getComponent();
+ if (compName != null) {
+ AmbariComponent component = cluster.getComponent(compName);
+ if (component != null) {
+ if (ServiceURLPropertyConfig.Property.PROP_COMP_HOSTNAME.equals(configProperty.getValue())) {
+ log.lookingUpComponentHosts(compName);
+ componentHostnames.addAll(component.getHostNames());
+ hostNamePlaceholder = propertyName; // Remember the host name placeholder
+ } else {
+ log.lookingUpComponentConfigProperty(compName, configProperty.getValue());
+ propertyValue = component.getConfigProperty(configProperty.getValue());
+ }
+ }
+ }
+ } else { // Derived property
+ log.handlingDerivedProperty(serviceName, configProperty.getType(), configProperty.getName());
+ ServiceURLPropertyConfig.Property p = config.getConfigProperty(serviceName, configProperty.getName());
+ propertyValue = p.getValue();
+ if (propertyValue == null) {
+ if (p.getConditionHandler() != null) {
+ propertyValue = p.getConditionHandler().evaluate(config, cluster);
+ }
+ }
+ }
+
+ log.determinedPropertyValue(configProperty.getName(), propertyValue);
+ placeholderValues.put(configProperty.getName(), propertyValue);
+ }
+
+ // For patterns with a placeholder value for the hostname (e.g., multiple URL scenarios)
+ if (!componentHostnames.isEmpty()) {
+ for (String componentHostname : componentHostnames) {
+ String url = pattern.get().replace("{" + hostNamePlaceholder + "}", componentHostname);
+ urls.add(createURL(url, placeholderValues));
+ }
+ } else { // Single URL result case
+ urls.add(createURL(pattern.get(), placeholderValues));
+ }
+ }
+
+ return urls;
+ }
+
+ private String createURL(String pattern, Map<String, String> placeholderValues) {
+ String url = null;
+ if (pattern != null) {
+ url = pattern;
+ for (String placeHolder : placeholderValues.keySet()) {
+ String value = placeholderValues.get(placeHolder);
+ if (value != null) {
+ url = url.replace("{" + placeHolder + "}", value);
+ }
+ }
+ }
+ return url;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
deleted file mode 100644
index e69de29..0000000
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ConditionalValueHandler.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ConditionalValueHandler.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ConditionalValueHandler.java
new file mode 100644
index 0000000..168fce6
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ConditionalValueHandler.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+
+interface ConditionalValueHandler {
+
+ String evaluate(ServiceURLPropertyConfig config, AmbariCluster cluster);
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/PropertyEqualsHandler.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/PropertyEqualsHandler.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/PropertyEqualsHandler.java
new file mode 100644
index 0000000..4044d56
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/PropertyEqualsHandler.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+
+class PropertyEqualsHandler implements ConditionalValueHandler {
+
+ private String serviceName = null;
+ private String propertyName = null;
+ private String propertyValue = null;
+ private ConditionalValueHandler affirmativeResult = null;
+ private ConditionalValueHandler negativeResult = null;
+
+ PropertyEqualsHandler(String serviceName,
+ String propertyName,
+ String propertyValue,
+ ConditionalValueHandler affirmativeResult,
+ ConditionalValueHandler negativeResult) {
+ this.serviceName = serviceName;
+ this.propertyName = propertyName;
+ this.propertyValue = propertyValue;
+ this.affirmativeResult = affirmativeResult;
+ this.negativeResult = negativeResult;
+ }
+
+ @Override
+ public String evaluate(ServiceURLPropertyConfig config, AmbariCluster cluster) {
+ String result = null;
+
+ ServiceURLPropertyConfig.Property p = config.getConfigProperty(serviceName, propertyName);
+ if (p != null) {
+ String value = getActualPropertyValue(cluster, p);
+ if (propertyValue.equals(value)) {
+ result = affirmativeResult.evaluate(config, cluster);
+ } else if (negativeResult != null) {
+ result = negativeResult.evaluate(config, cluster);
+ }
+
+ // Check if the result is a reference to a local derived property
+ ServiceURLPropertyConfig.Property derived = config.getConfigProperty(serviceName, result);
+ if (derived != null) {
+ result = getActualPropertyValue(cluster, derived);
+ }
+ }
+
+ return result;
+ }
+
+ private String getActualPropertyValue(AmbariCluster cluster, ServiceURLPropertyConfig.Property property) {
+ String value = null;
+ String propertyType = property.getType();
+ if (ServiceURLPropertyConfig.Property.TYPE_COMPONENT.equals(propertyType)) {
+ AmbariComponent component = cluster.getComponent(property.getComponent());
+ if (component != null) {
+ value = component.getConfigProperty(property.getValue());
+ }
+ } else if (ServiceURLPropertyConfig.Property.TYPE_SERVICE.equals(propertyType)) {
+ value = cluster.getServiceConfiguration(property.getService(), property.getServiceConfig()).getProperties().get(property.getValue());
+ }
+ return value;
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
new file mode 100644
index 0000000..ed07873
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/ServiceURLPropertyConfig.java
@@ -0,0 +1,324 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.util.XmlUtils;
+import org.w3c.dom.Document;
+import org.w3c.dom.NamedNodeMap;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathExpression;
+import javax.xml.xpath.XPathExpressionException;
+import javax.xml.xpath.XPathFactory;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Service URL pattern mapping configuration model.
+ */
+class ServiceURLPropertyConfig {
+
+ private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
+
+ private static final String ATTR_NAME = "name";
+
+ private static XPathExpression SERVICE_URL_PATTERN_MAPPINGS;
+ private static XPathExpression URL_PATTERN;
+ private static XPathExpression PROPERTIES;
+ static {
+ XPath xpath = XPathFactory.newInstance().newXPath();
+ try {
+ SERVICE_URL_PATTERN_MAPPINGS = xpath.compile("/service-discovery-url-mappings/service");
+ URL_PATTERN = xpath.compile("url-pattern/text()");
+ PROPERTIES = xpath.compile("properties/property");
+ } catch (XPathExpressionException e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static final String DEFAULT_SERVICE_URL_MAPPINGS = "ambari-service-discovery-url-mappings.xml";
+
+ private Map<String, URLPattern> urlPatterns = new HashMap<>();
+
+ private Map<String, Map<String, Property>> properties = new HashMap<>();
+
+
+ /**
+ * The default service URL pattern to property mapping configuration will be used.
+ */
+ ServiceURLPropertyConfig() {
+ this(ServiceURLPropertyConfig.class.getClassLoader().getResourceAsStream(DEFAULT_SERVICE_URL_MAPPINGS));
+ }
+
+ /**
+ * The default service URL pattern to property mapping configuration will be used.
+ */
+ ServiceURLPropertyConfig(File mappingConfigurationFile) throws Exception {
+ this(new FileInputStream(mappingConfigurationFile));
+ }
+
+ /**
+ *
+ * @param source An InputStream for the XML content
+ */
+ ServiceURLPropertyConfig(InputStream source) {
+ // Parse the XML, and build the model
+ try {
+ Document doc = XmlUtils.readXml(source);
+
+ NodeList serviceNodes =
+ (NodeList) SERVICE_URL_PATTERN_MAPPINGS.evaluate(doc, XPathConstants.NODESET);
+ for (int i=0; i < serviceNodes.getLength(); i++) {
+ Node serviceNode = serviceNodes.item(i);
+ String serviceName = serviceNode.getAttributes().getNamedItem(ATTR_NAME).getNodeValue();
+ properties.put(serviceName, new HashMap<String, Property>());
+
+ Node urlPatternNode = (Node) URL_PATTERN.evaluate(serviceNode, XPathConstants.NODE);
+ if (urlPatternNode != null) {
+ urlPatterns.put(serviceName, new URLPattern(urlPatternNode.getNodeValue()));
+ }
+
+ NodeList propertiesNode = (NodeList) PROPERTIES.evaluate(serviceNode, XPathConstants.NODESET);
+ if (propertiesNode != null) {
+ processProperties(serviceName, propertiesNode);
+ }
+ }
+ } catch (Exception e) {
+ log.failedToLoadServiceDiscoveryConfiguration(e);
+ } finally {
+ try {
+ source.close();
+ } catch (IOException e) {
+ // Ignore
+ }
+ }
+ }
+
+ private void processProperties(String serviceName, NodeList propertyNodes) {
+ for (int i = 0; i < propertyNodes.getLength(); i++) {
+ Property p = Property.createProperty(serviceName, propertyNodes.item(i));
+ properties.get(serviceName).put(p.getName(), p);
+ }
+ }
+
+ URLPattern getURLPattern(String service) {
+ return urlPatterns.get(service);
+ }
+
+ Property getConfigProperty(String service, String property) {
+ return properties.get(service).get(property);
+ }
+
+ static class URLPattern {
+ String pattern;
+ List<String> placeholders = new ArrayList<>();
+
+ URLPattern(String pattern) {
+ this.pattern = pattern;
+
+ final Pattern regex = Pattern.compile("\\{(.*?)}", Pattern.DOTALL);
+ final Matcher matcher = regex.matcher(pattern);
+ while( matcher.find() ){
+ placeholders.add(matcher.group(1));
+ }
+ }
+
+ String get() {return pattern; }
+ List<String> getPlaceholders() {
+ return placeholders;
+ }
+ }
+
+ static class Property {
+ static final String TYPE_SERVICE = "SERVICE";
+ static final String TYPE_COMPONENT = "COMPONENT";
+ static final String TYPE_DERIVED = "DERIVED";
+
+ static final String PROP_COMP_HOSTNAME = "component.host.name";
+
+ static final String ATTR_NAME = "name";
+ static final String ATTR_PROPERTY = "property";
+ static final String ATTR_VALUE = "value";
+
+ static XPathExpression HOSTNAME;
+ static XPathExpression SERVICE_CONFIG;
+ static XPathExpression COMPONENT;
+ static XPathExpression CONFIG_PROPERTY;
+ static XPathExpression IF;
+ static XPathExpression THEN;
+ static XPathExpression ELSE;
+ static XPathExpression TEXT;
+ static {
+ XPath xpath = XPathFactory.newInstance().newXPath();
+ try {
+ HOSTNAME = xpath.compile("hostname");
+ SERVICE_CONFIG = xpath.compile("service-config");
+ COMPONENT = xpath.compile("component");
+ CONFIG_PROPERTY = xpath.compile("config-property");
+ IF = xpath.compile("if");
+ THEN = xpath.compile("then");
+ ELSE = xpath.compile("else");
+ TEXT = xpath.compile("text()");
+ } catch (XPathExpressionException e) {
+ e.printStackTrace();
+ }
+ }
+
+
+ String type;
+ String name;
+ String component;
+ String service;
+ String serviceConfig;
+ String value;
+ ConditionalValueHandler conditionHandler = null;
+
+ private Property(String type,
+ String propertyName,
+ String component,
+ String service,
+ String configType,
+ String value,
+ ConditionalValueHandler pch) {
+ this.type = type;
+ this.name = propertyName;
+ this.service = service;
+ this.component = component;
+ this.serviceConfig = configType;
+ this.value = value;
+ conditionHandler = pch;
+ }
+
+ static Property createProperty(String serviceName, Node propertyNode) {
+ String propertyName = propertyNode.getAttributes().getNamedItem(ATTR_NAME).getNodeValue();
+ String propertyType = null;
+ String serviceType = null;
+ String configType = null;
+ String componentType = null;
+ String value = null;
+ ConditionalValueHandler pch = null;
+
+ try {
+ Node hostNameNode = (Node) HOSTNAME.evaluate(propertyNode, XPathConstants.NODE);
+ if (hostNameNode != null) {
+ value = PROP_COMP_HOSTNAME;
+ }
+
+ // Check for a service-config node
+ Node scNode = (Node) SERVICE_CONFIG.evaluate(propertyNode, XPathConstants.NODE);
+ if (scNode != null) {
+ // Service config property
+ propertyType = Property.TYPE_SERVICE;
+ serviceType = scNode.getAttributes().getNamedItem(ATTR_NAME).getNodeValue();
+ Node scTextNode = (Node) TEXT.evaluate(scNode, XPathConstants.NODE);
+ configType = scTextNode.getNodeValue();
+ } else { // If not service-config node, check for a component config node
+ Node cNode = (Node) COMPONENT.evaluate(propertyNode, XPathConstants.NODE);
+ if (cNode != null) {
+ // Component config property
+ propertyType = Property.TYPE_COMPONENT;
+ componentType = cNode.getFirstChild().getNodeValue();
+ Node cTextNode = (Node) TEXT.evaluate(cNode, XPathConstants.NODE);
+ configType = cTextNode.getNodeValue();
+ componentType = cTextNode.getNodeValue();
+ }
+ }
+
+ // Check for a config property node
+ Node cpNode = (Node) CONFIG_PROPERTY.evaluate(propertyNode, XPathConstants.NODE);
+ if (cpNode != null) {
+ // Check for a condition element
+ Node ifNode = (Node) IF.evaluate(cpNode, XPathConstants.NODE);
+ if (ifNode != null) {
+ propertyType = TYPE_DERIVED;
+ pch = getConditionHandler(serviceName, ifNode);
+ } else {
+ Node cpTextNode = (Node) TEXT.evaluate(cpNode, XPathConstants.NODE);
+ value = cpTextNode.getNodeValue();
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create and return the property representation
+ return new Property(propertyType, propertyName, componentType, serviceType, configType, value, pch);
+ }
+
+ private static ConditionalValueHandler getConditionHandler(String serviceName, Node ifNode) throws Exception {
+ ConditionalValueHandler result = null;
+
+ if (ifNode != null) {
+ NamedNodeMap attrs = ifNode.getAttributes();
+ String comparisonPropName = attrs.getNamedItem(ATTR_PROPERTY).getNodeValue();
+ String comparisonValue = attrs.getNamedItem(ATTR_VALUE).getNodeValue();
+
+ ConditionalValueHandler affirmativeResult = null;
+ Node thenNode = (Node) THEN.evaluate(ifNode, XPathConstants.NODE);
+ if (thenNode != null) {
+ Node subIfNode = (Node) IF.evaluate(thenNode, XPathConstants.NODE);
+ if (subIfNode != null) {
+ affirmativeResult = getConditionHandler(serviceName, subIfNode);
+ } else {
+ affirmativeResult = new SimpleValueHandler(thenNode.getFirstChild().getNodeValue());
+ }
+ }
+
+ ConditionalValueHandler negativeResult = null;
+ Node elseNode = (Node) ELSE.evaluate(ifNode, XPathConstants.NODE);
+ if (elseNode != null) {
+ Node subIfNode = (Node) IF.evaluate(elseNode, XPathConstants.NODE);
+ if (subIfNode != null) {
+ negativeResult = getConditionHandler(serviceName, subIfNode);
+ } else {
+ negativeResult = new SimpleValueHandler(elseNode.getFirstChild().getNodeValue());
+ }
+ }
+
+ result = new PropertyEqualsHandler(serviceName,
+ comparisonPropName,
+ comparisonValue,
+ affirmativeResult,
+ negativeResult);
+ }
+
+ return result;
+ }
+
+ String getType() { return type; }
+ String getName() { return name; }
+ String getComponent() { return component; }
+ String getService() { return service; }
+ String getServiceConfig() { return serviceConfig; }
+ String getValue() {
+ return value;
+ }
+ ConditionalValueHandler getConditionHandler() { return conditionHandler; }
+ }
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/7d0bff16/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/SimpleValueHandler.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/SimpleValueHandler.java b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/SimpleValueHandler.java
new file mode 100644
index 0000000..d1678d8
--- /dev/null
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/SimpleValueHandler.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+
+class SimpleValueHandler implements ConditionalValueHandler {
+ private String value;
+
+ SimpleValueHandler(String value) {
+ this.value = value;
+ }
+
+ @Override
+ public String evaluate(ServiceURLPropertyConfig config, AmbariCluster cluster) {
+ return value;
+ }
+}
+
[13/23] knox git commit: KNOX-1079 - Regression: proxy for Atlas
fails with j_spring_security_check during login (Madhan Neethiraj via lmccay)
Posted by mo...@apache.org.
KNOX-1079 - Regression: proxy for Atlas fails with j_spring_security_check during login (Madhan Neethiraj via lmccay)
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/0719da37
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/0719da37
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/0719da37
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 0719da376065950d8786576cf92f71367d9aba8d
Parents: 2d236e7
Author: Larry McCay <lm...@hortonworks.com>
Authored: Thu Oct 12 15:03:16 2017 -0400
Committer: Larry McCay <lm...@hortonworks.com>
Committed: Thu Oct 12 15:03:16 2017 -0400
----------------------------------------------------------------------
.../src/main/resources/services/atlas/0.8.0/rewrite.xml | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/0719da37/gateway-service-definitions/src/main/resources/services/atlas/0.8.0/rewrite.xml
----------------------------------------------------------------------
diff --git a/gateway-service-definitions/src/main/resources/services/atlas/0.8.0/rewrite.xml b/gateway-service-definitions/src/main/resources/services/atlas/0.8.0/rewrite.xml
index 9db2348..84b4edf 100644
--- a/gateway-service-definitions/src/main/resources/services/atlas/0.8.0/rewrite.xml
+++ b/gateway-service-definitions/src/main/resources/services/atlas/0.8.0/rewrite.xml
@@ -66,6 +66,10 @@
<filter name="ATLAS/atlas/outbound/links">
+ <content type="*/x-javascript">
+ <apply path="j_spring_security_check" rule="ATLAS/atlas/outbound/extrapath"/>
+ <apply path="index.html" rule="ATLAS/atlas/outbound/index"/>
+ </content>
<content type="application/javascript">
<apply path="j_spring_security_check" rule="ATLAS/atlas/outbound/extrapath"/>
<apply path="index.html" rule="ATLAS/atlas/outbound/index"/>
@@ -80,4 +84,4 @@
</content>
</filter>
-</rules>
\ No newline at end of file
+</rules>
[19/23] knox git commit: Merge branch 'master' into
KNOX-998-Package_Restructuring
Posted by mo...@apache.org.
Merge branch 'master' into KNOX-998-Package_Restructuring
# Conflicts:
# gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
# gateway-discovery-ambari/src/main/resources/ambari-service-discovery-component-config-mapping.properties
# gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/PicketlinkMessages.java
# gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkConf.java
# gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java
# gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/CaptureOriginalURLFilter.java
# gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/PicketlinkIdentityAdapter.java
# gateway-provider-security-picketlink/src/test/java/org/apache/knox/gateway/picketlink/PicketlinkTest.java
# gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
# gateway-server/src/test/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandlerTest.java
# gateway-service-knoxtoken/src/test/java/org/apache/knox/gateway/service/knoxtoken/TokenServiceResourceTest.java
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/8affbc02
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/8affbc02
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/8affbc02
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 8affbc0226fb2e587bdadaac9270b071d52b8062
Parents: 557d569 92b1505
Author: Sandeep More <mo...@apache.org>
Authored: Mon Oct 16 10:06:25 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Mon Oct 16 10:06:25 2017 -0400
----------------------------------------------------------------------
.../ambari/AmbariDynamicServiceURLCreator.java | 151 ++++
.../ambari/ConditionalValueHandler.java | 24 +
.../discovery/ambari/PropertyEqualsHandler.java | 76 ++
.../ambari/ServiceURLPropertyConfig.java | 324 +++++++
.../discovery/ambari/SimpleValueHandler.java | 32 +
.../discovery/ambari/AmbariCluster.java | 7 +-
.../discovery/ambari/AmbariComponent.java | 27 +-
.../ambari/AmbariServiceDiscovery.java | 58 +-
.../ambari/AmbariServiceDiscoveryMessages.java | 64 +-
.../ambari/AmbariServiceURLCreator.java | 184 ----
...iscovery-component-config-mapping.properties | 36 +
.../ambari-service-discovery-url-mappings.xml | 398 +++++++++
.../AmbariDynamicServiceURLCreatorTest.java | 876 +++++++++++++++++++
.../ambari/AmbariServiceDiscoveryTest.java | 4 +-
.../jwt/filter/AbstractJWTFilter.java | 2 +-
.../federation/AbstractJWTFilterTest.java | 31 +
gateway-provider-security-picketlink/pom.xml | 76 --
.../gateway/picketlink/PicketlinkMessages.java | 40 -
.../picketlink/deploy/PicketlinkConf.java | 194 ----
...PicketlinkFederationProviderContributor.java | 132 ---
.../filter/CaptureOriginalURLFilter.java | 89 --
.../filter/PicketlinkIdentityAdapter.java | 102 ---
.../knox/gateway/picketlink/PicketlinkTest.java | 29 -
gateway-release/pom.xml | 4 -
.../gateway/websockets/ProxyInboundClient.java | 107 +++
.../impl/DefaultServiceRegistryService.java | 50 +-
.../security/impl/DefaultAliasService.java | 12 +-
.../simple/SimpleDescriptorHandler.java | 69 +-
.../simple/SimpleDescriptorMessages.java | 8 +-
.../websockets/GatewayWebsocketHandler.java | 41 +-
.../websockets/ProxyWebSocketAdapter.java | 19 +-
.../websockets/ProxyInboundClientTest.java | 374 ++++++++
.../simple/SimpleDescriptorHandlerTest.java | 181 +++-
.../services/ambariui/2.2.0/service.xml | 5 +
.../resources/services/atlas/0.8.0/rewrite.xml | 6 +-
.../gateway/service/knoxsso/WebSSOResource.java | 2 +-
.../service/knoxsso/WebSSOResourceTest.java | 58 ++
.../service/knoxtoken/TokenResource.java | 37 +-
.../knoxtoken/TokenServiceResourceTest.java | 203 +++++
gateway-shell-release/pom.xml | 4 +
.../knox/gateway/util/urltemplate/Parser.java | 10 +-
.../gateway/util/urltemplate/ParserTest.java | 17 +
pom.xml | 44 +-
43 files changed, 3217 insertions(+), 990 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariCluster.java
----------------------------------------------------------------------
diff --cc gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariCluster.java
index fa9d710,0000000..d65bff7
mode 100644,000000..100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariCluster.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariCluster.java
@@@ -1,114 -1,0 +1,115 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+class AmbariCluster implements ServiceDiscovery.Cluster {
+
+ private String name = null;
+
- private AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
++ private AmbariDynamicServiceURLCreator urlCreator;
+
+ private Map<String, Map<String, ServiceConfiguration>> serviceConfigurations = new HashMap<>();
+
+ private Map<String, AmbariComponent> components = null;
+
+
+ AmbariCluster(String name) {
+ this.name = name;
- components = new HashMap<String, AmbariComponent>();
++ components = new HashMap<>();
++ urlCreator = new AmbariDynamicServiceURLCreator(this);
+ }
+
+ void addServiceConfiguration(String serviceName, String configurationType, ServiceConfiguration serviceConfig) {
+ if (!serviceConfigurations.keySet().contains(serviceName)) {
+ serviceConfigurations.put(serviceName, new HashMap<String, ServiceConfiguration>());
+ }
+ serviceConfigurations.get(serviceName).put(configurationType, serviceConfig);
+ }
+
+
+ void addComponent(AmbariComponent component) {
+ components.put(component.getName(), component);
+ }
+
+
+ ServiceConfiguration getServiceConfiguration(String serviceName, String configurationType) {
+ ServiceConfiguration sc = null;
+ Map<String, ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+ if (configs != null) {
+ sc = configs.get(configurationType);
+ }
+ return sc;
+ }
+
+
+ Map<String, AmbariComponent> getComponents() {
+ return components;
+ }
+
+
+ AmbariComponent getComponent(String name) {
+ return components.get(name);
+ }
+
+
+ @Override
+ public String getName() {
+ return name;
+ }
+
+
+ @Override
+ public List<String> getServiceURLs(String serviceName) {
+ List<String> urls = new ArrayList<>();
- urls.addAll(urlCreator.create(this, serviceName));
++ urls.addAll(urlCreator.create(serviceName));
+ return urls;
+ }
+
+
+ static class ServiceConfiguration {
+
+ private String type;
+ private String version;
+ private Map<String, String> props;
+
+ ServiceConfiguration(String type, String version, Map<String, String> properties) {
+ this.type = type;
+ this.version = version;
+ this.props = properties;
+ }
+
+ public String getVersion() {
+ return version;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public Map<String, String> getProperties() {
+ return props;
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariComponent.java
----------------------------------------------------------------------
diff --cc gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariComponent.java
index 4750e7e,0000000..c8e7c6d
mode 100644,000000..100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariComponent.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariComponent.java
@@@ -1,76 -1,0 +1,85 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
++import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+class AmbariComponent {
+
+ private String clusterName = null;
+ private String serviceName = null;
+ private String name = null;
+ private String version = null;
+
- private List<String> hostNames = null;
++ private List<String> hostNames = new ArrayList<>();
+
+ private Map<String, String> properties = null;
+
+ AmbariComponent(String name,
+ String version,
+ String cluster,
+ String service,
+ List<String> hostNames,
+ Map<String, String> properties) {
+ this.name = name;
+ this.serviceName = service;
+ this.clusterName = cluster;
+ this.version = version;
- this.hostNames = hostNames;
+ this.properties = properties;
++
++ if (hostNames != null) {
++ // Add the hostnames individually to prevent adding any null values
++ for (String hostName : hostNames) {
++ if (hostName != null) {
++ this.hostNames.add(hostName);
++ }
++ }
++ }
+ }
+
- public String getVersion() {
++ String getVersion() {
+ return version;
+ }
+
- public String getName() {
++ String getName() {
+ return name;
+ }
+
- public String getServiceName() {
++ String getServiceName() {
+ return serviceName;
+ }
+
- public String getClusterName() {
++ String getClusterName() {
+ return clusterName;
+ }
+
- public List<String> getHostNames() {
++ List<String> getHostNames() {
+ return hostNames;
+ }
+
- public Map<String, String> getConfigProperties() {
++ Map<String, String> getConfigProperties() {
+ return properties;
+ }
+
- public String getConfigProperty(String propertyName) {
++ String getConfigProperty(String propertyName) {
+ return properties.get(propertyName);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
----------------------------------------------------------------------
diff --cc gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
index da03564,0000000..70af903
mode 100644,000000..100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscovery.java
@@@ -1,291 -1,0 +1,305 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
++import java.io.IOException;
++import java.util.ArrayList;
++import java.util.HashMap;
++import java.util.List;
++import java.util.Map;
++import java.util.Properties;
++
+import net.minidev.json.JSONArray;
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.knox.gateway.config.ConfigurationException;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.services.security.AliasServiceException;
+import org.apache.knox.gateway.topology.discovery.GatewayService;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.message.BasicHeader;
+import org.apache.http.util.EntityUtils;
+
- import java.io.IOException;
- import java.util.*;
-
+
+class AmbariServiceDiscovery implements ServiceDiscovery {
+
+ static final String TYPE = "AMBARI";
+
+ static final String AMBARI_CLUSTERS_URI = "/api/v1/clusters";
+
+ static final String AMBARI_HOSTROLES_URI =
+ AMBARI_CLUSTERS_URI + "/%s/services?fields=components/host_components/HostRoles";
+
+ static final String AMBARI_SERVICECONFIGS_URI =
+ AMBARI_CLUSTERS_URI + "/%s/configurations/service_config_versions?is_current=true";
+
++ private static final String COMPONENT_CONFIG_MAPPING_FILE =
++ "ambari-service-discovery-component-config-mapping.properties";
++
++ private static final AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
++
+ // Map of component names to service configuration types
+ private static Map<String, String> componentServiceConfigs = new HashMap<>();
+ static {
- componentServiceConfigs.put("NAMENODE", "hdfs-site");
- componentServiceConfigs.put("RESOURCEMANAGER", "yarn-site");
- componentServiceConfigs.put("OOZIE_SERVER", "oozie-site");
- componentServiceConfigs.put("HIVE_SERVER", "hive-site");
- componentServiceConfigs.put("WEBHCAT_SERVER", "webhcat-site");
- componentServiceConfigs.put("HBASE_MASTER", "hbase-site");
- } // TODO: Are there other service components, for which the endpoints can be discovered via Ambari?
++ try {
++ Properties configMapping = new Properties();
++ configMapping.load(AmbariServiceDiscovery.class.getClassLoader().getResourceAsStream(COMPONENT_CONFIG_MAPPING_FILE));
++ for (String componentName : configMapping.stringPropertyNames()) {
++ componentServiceConfigs.put(componentName, configMapping.getProperty(componentName));
++ }
++ } catch (Exception e) {
++ log.failedToLoadServiceDiscoveryConfiguration(COMPONENT_CONFIG_MAPPING_FILE, e);
++ }
++ }
+
+ private static final String DEFAULT_USER_ALIAS = "ambari.discovery.user";
+ private static final String DEFAULT_PWD_ALIAS = "ambari.discovery.password";
+
- private static AmbariServiceURLCreator urlCreator = new AmbariServiceURLCreator();
-
- private AmbariServiceDiscoveryMessages log = MessagesFactory.get(AmbariServiceDiscoveryMessages.class);
-
+ @GatewayService
+ private AliasService aliasService;
+
+ private CloseableHttpClient httpClient = null;
+
- private Map<String, Map<String, String>> serviceConfiguration = new HashMap<>();
-
+
+ AmbariServiceDiscovery() {
+ httpClient = org.apache.http.impl.client.HttpClients.createDefault();
+ }
+
+
+ @Override
+ public String getType() {
+ return TYPE;
+ }
+
+
+ @Override
+ public Map<String, Cluster> discover(ServiceDiscoveryConfig config) {
+ Map<String, Cluster> clusters = new HashMap<String, Cluster>();
+
+ String discoveryAddress = config.getAddress();
+
+ // Invoke Ambari REST API to discover the available clusters
+ String clustersDiscoveryURL = String.format("%s" + AMBARI_CLUSTERS_URI, discoveryAddress);
+
+ JSONObject json = invokeREST(clustersDiscoveryURL, config.getUser(), config.getPasswordAlias());
+
+ // Parse the cluster names from the response, and perform the cluster discovery
+ JSONArray clusterItems = (JSONArray) json.get("items");
+ for (Object clusterItem : clusterItems) {
+ String clusterName = (String) ((JSONObject)((JSONObject) clusterItem).get("Clusters")).get("cluster_name");
+ try {
+ Cluster c = discover(config, clusterName);
+ clusters.put(clusterName, c);
+ } catch (Exception e) {
+ log.clusterDiscoveryError(clusterName, e);
+ }
+ }
+
+ return clusters;
+ }
+
+
+ @Override
+ public Cluster discover(ServiceDiscoveryConfig config, String clusterName) {
+ AmbariCluster cluster = new AmbariCluster(clusterName);
+
+ Map<String, String> serviceComponents = new HashMap<>();
+
+ String discoveryAddress = config.getAddress();
+ String discoveryUser = config.getUser();
+ String discoveryPwdAlias = config.getPasswordAlias();
+
+ Map<String, List<String>> componentHostNames = new HashMap<>();
+ String hostRolesURL = String.format("%s" + AMBARI_HOSTROLES_URI, discoveryAddress, clusterName);
+ JSONObject hostRolesJSON = invokeREST(hostRolesURL, discoveryUser, discoveryPwdAlias);
+ if (hostRolesJSON != null) {
+ // Process the host roles JSON
+ JSONArray items = (JSONArray) hostRolesJSON.get("items");
+ for (Object obj : items) {
+ JSONArray components = (JSONArray) ((JSONObject) obj).get("components");
+ for (Object component : components) {
+ JSONArray hostComponents = (JSONArray) ((JSONObject) component).get("host_components");
+ for (Object hostComponent : hostComponents) {
+ JSONObject hostRoles = (JSONObject) ((JSONObject) hostComponent).get("HostRoles");
+ String serviceName = (String) hostRoles.get("service_name");
+ String componentName = (String) hostRoles.get("component_name");
+
+ serviceComponents.put(componentName, serviceName);
+
- // String hostName = (String) hostRoles.get("host_name");
- String hostName = (String) hostRoles.get("public_host_name"); // Assuming public host name is most applicable
- log.discoveredServiceHost(serviceName, hostName);
- if (!componentHostNames.containsKey(componentName)) {
- componentHostNames.put(componentName, new ArrayList<String>());
++ // Assuming public host name is more applicable than host_name
++ String hostName = (String) hostRoles.get("public_host_name");
++ if (hostName == null) {
++ // Some (even slightly) older versions of Ambari/HDP do not return public_host_name,
++ // so fall back to host_name in those cases.
++ hostName = (String) hostRoles.get("host_name");
++ }
++
++ if (hostName != null) {
++ log.discoveredServiceHost(serviceName, hostName);
++ if (!componentHostNames.containsKey(componentName)) {
++ componentHostNames.put(componentName, new ArrayList<String>());
++ }
++ componentHostNames.get(componentName).add(hostName);
+ }
- componentHostNames.get(componentName).add(hostName);
+ }
+ }
+ }
+ }
+
+ Map<String, Map<String, AmbariCluster.ServiceConfiguration>> serviceConfigurations =
+ new HashMap<String, Map<String, AmbariCluster.ServiceConfiguration>>();
+ String serviceConfigsURL = String.format("%s" + AMBARI_SERVICECONFIGS_URI, discoveryAddress, clusterName);
+ JSONObject serviceConfigsJSON = invokeREST(serviceConfigsURL, discoveryUser, discoveryPwdAlias);
+ if (serviceConfigsJSON != null) {
+ // Process the service configurations
+ JSONArray serviceConfigs = (JSONArray) serviceConfigsJSON.get("items");
+ for (Object serviceConfig : serviceConfigs) {
+ String serviceName = (String) ((JSONObject) serviceConfig).get("service_name");
+ JSONArray configurations = (JSONArray) ((JSONObject) serviceConfig).get("configurations");
+ for (Object configuration : configurations) {
+ String configType = (String) ((JSONObject) configuration).get("type");
+ String configVersion = String.valueOf(((JSONObject) configuration).get("version"));
+
+ Map<String, String> configProps = new HashMap<String, String>();
+ JSONObject configProperties = (JSONObject) ((JSONObject) configuration).get("properties");
+ for (String propertyName : configProperties.keySet()) {
+ configProps.put(propertyName, String.valueOf(((JSONObject) configProperties).get(propertyName)));
+ }
+ if (!serviceConfigurations.containsKey(serviceName)) {
+ serviceConfigurations.put(serviceName, new HashMap<String, AmbariCluster.ServiceConfiguration>());
+ }
+ serviceConfigurations.get(serviceName).put(configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+ cluster.addServiceConfiguration(serviceName, configType, new AmbariCluster.ServiceConfiguration(configType, configVersion, configProps));
+ }
+ }
+ }
+
+ // Construct the AmbariCluster model
+ for (String componentName : serviceComponents.keySet()) {
+ String serviceName = serviceComponents.get(componentName);
+ List<String> hostNames = componentHostNames.get(componentName);
+
+ Map<String, AmbariCluster.ServiceConfiguration> configs = serviceConfigurations.get(serviceName);
+ String configType = componentServiceConfigs.get(componentName);
+ if (configType != null) {
+ AmbariCluster.ServiceConfiguration svcConfig = configs.get(configType);
+ AmbariComponent c = new AmbariComponent(componentName,
+ svcConfig.getVersion(),
+ clusterName,
+ serviceName,
+ hostNames,
+ svcConfig.getProperties());
+ cluster.addComponent(c);
+ }
+ }
+
+ return cluster;
+ }
+
+
+ protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+ JSONObject result = null;
+
+ CloseableHttpResponse response = null;
+ try {
+ HttpGet request = new HttpGet(url);
+
+ // If no configured username, then use default username alias
+ String password = null;
+ if (username == null) {
+ if (aliasService != null) {
+ try {
+ char[] defaultUser = aliasService.getPasswordFromAliasForGateway(DEFAULT_USER_ALIAS);
+ if (defaultUser != null) {
+ username = new String(defaultUser);
+ }
+ } catch (AliasServiceException e) {
+ log.aliasServiceUserError(DEFAULT_USER_ALIAS, e.getLocalizedMessage());
+ }
+ }
+
+ // If username is still null
+ if (username == null) {
+ log.aliasServiceUserNotFound();
+ throw new ConfigurationException("No username is configured for Ambari service discovery.");
+ }
+ }
+
+ if (aliasService != null) {
+ // If not password alias is configured, then try the default alias
+ if (passwordAlias == null) {
+ passwordAlias = DEFAULT_PWD_ALIAS;
+ }
+ try {
+ char[] pwd = aliasService.getPasswordFromAliasForGateway(passwordAlias);
+ if (pwd != null) {
+ password = new String(pwd);
+ }
+
+ } catch (AliasServiceException e) {
+ log.aliasServicePasswordError(passwordAlias, e.getLocalizedMessage());
+ }
+ }
+
+ // If the password could not be determined
+ if (password == null) {
+ log.aliasServicePasswordNotFound();
+ throw new ConfigurationException("No password is configured for Ambari service discovery.");
+ }
+
+ // Add an auth header if credentials are available
+ String encodedCreds =
+ org.apache.commons.codec.binary.Base64.encodeBase64String((username + ":" + password).getBytes());
+ request.addHeader(new BasicHeader("Authorization", "Basic " + encodedCreds));
+
+ response = httpClient.execute(request);
+
+ if (HttpStatus.SC_OK == response.getStatusLine().getStatusCode()) {
+ HttpEntity entity = response.getEntity();
+ if (entity != null) {
+ result = (JSONObject) JSONValue.parse((EntityUtils.toString(entity)));
+ log.debugJSON(result.toJSONString());
+ } else {
+ log.noJSON(url);
+ }
+ } else {
+ log.unexpectedRestResponseStatusCode(url, response.getStatusLine().getStatusCode());
+ }
+
+ } catch (IOException e) {
+ log.restInvocationError(url, e);
+ } finally {
+ if(response != null) {
+ try {
+ response.close();
+ } catch (IOException e) {
+ // Ignore
+ }
+ }
+ }
+ return result;
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
----------------------------------------------------------------------
diff --cc gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
index 2a153bb,0000000..d91edef
mode 100644,000000..100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryMessages.java
@@@ -1,81 -1,0 +1,121 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import org.apache.knox.gateway.i18n.messages.Message;
+import org.apache.knox.gateway.i18n.messages.MessageLevel;
+import org.apache.knox.gateway.i18n.messages.Messages;
+import org.apache.knox.gateway.i18n.messages.StackTrace;
+
- @Messages(logger="org.apache.gateway.topology.discovery.ambari")
++@Messages(logger="org.apache.hadoop.gateway.topology.discovery.ambari")
+public interface AmbariServiceDiscoveryMessages {
+
+ @Message(level = MessageLevel.ERROR,
- text = "Encountered an error during cluster {0} discovery: {1}")
++ text = "Failed to load service discovery configuration: {1}")
++ void failedToLoadServiceDiscoveryConfiguration(@StackTrace(level = MessageLevel.ERROR) Exception e);
++
++ @Message(level = MessageLevel.ERROR,
++ text = "Failed to load service discovery configuration {0}: {1}")
++ void failedToLoadServiceDiscoveryConfiguration(final String configuration,
++ @StackTrace(level = MessageLevel.ERROR) Exception e);
++
++ @Message(level = MessageLevel.ERROR,
++ text = "Encountered an error during cluster {0} discovery: {1}")
+ void clusterDiscoveryError(final String clusterName,
+ @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+ @Message(level = MessageLevel.DEBUG,
- text = "REST invocation {0} failed: {1}")
++ text = "REST invocation {0} failed: {1}")
+ void restInvocationError(final String url,
+ @StackTrace(level = MessageLevel.ERROR) Exception e);
+
+
+ @Message(level = MessageLevel.ERROR,
- text = "Encountered an error attempting to determine the user for alias {0} : {1}")
++ text = "Encountered an error attempting to determine the user for alias {0} : {1}")
+ void aliasServiceUserError(final String alias, final String error);
+
+
+ @Message(level = MessageLevel.ERROR,
- text = "Encountered an error attempting to determine the password for alias {0} : {1}")
++ text = "Encountered an error attempting to determine the password for alias {0} : {1}")
+ void aliasServicePasswordError(final String alias, final String error);
+
+
+ @Message(level = MessageLevel.ERROR,
- text = "No user configured for Ambari service discovery.")
++ text = "No user configured for Ambari service discovery.")
+ void aliasServiceUserNotFound();
+
+
+ @Message(level = MessageLevel.ERROR,
- text = "No password configured for Ambari service discovery.")
++ text = "No password configured for Ambari service discovery.")
+ void aliasServicePasswordNotFound();
+
+
+ @Message(level = MessageLevel.ERROR,
- text = "Unexpected REST invocation response code for {0} : {1}")
++ text = "Unexpected REST invocation response code for {0} : {1}")
+ void unexpectedRestResponseStatusCode(final String url, int responseStatusCode);
+
+
+ @Message(level = MessageLevel.ERROR,
- text = "REST invocation {0} yielded a response without any JSON.")
++ text = "REST invocation {0} yielded a response without any JSON.")
+ void noJSON(final String url);
+
+
+ @Message(level = MessageLevel.DEBUG,
- text = "REST invocation result: {0}")
++ text = "REST invocation result: {0}")
+ void debugJSON(final String json);
+
++ @Message(level = MessageLevel.DEBUG,
++ text = "Loaded component configuration mappings: {0}")
++ void loadedComponentConfigMappings(final String mappings);
+
- @Message(level = MessageLevel.INFO,
- text = "Discovered: Service: {0}, Host: {1}")
++ @Message(level = MessageLevel.ERROR,
++ text = "Failed to load component configuration property mappings {0}: {1}")
++ void failedToLoadComponentConfigMappings(final String mappings,
++ @StackTrace(level = MessageLevel.ERROR) Exception e);
++
++ @Message(level = MessageLevel.DEBUG,
++ text = "Discovered: Service: {0}, Host: {1}")
+ void discoveredServiceHost(final String serviceName, final String hostName);
+
+
++ @Message(level = MessageLevel.DEBUG,
++ text = "Querying the cluster for the {0} configuration ({1}) property: {2}")
++ void lookingUpServiceConfigProperty(final String serviceName, final String configType, final String propertyName);
++
++
++ @Message(level = MessageLevel.DEBUG,
++ text = "Querying the cluster for the {0} component configuration property: {1}")
++ void lookingUpComponentConfigProperty(final String componentName, final String propertyName);
++
++
++ @Message(level = MessageLevel.DEBUG,
++ text = "Querying the cluster for the {0} component's hosts")
++ void lookingUpComponentHosts(final String componentName);
++
++
++ @Message(level = MessageLevel.DEBUG,
++ text = "Handling a derived service URL mapping property for the {0} service: type = {1}, name = {2}")
++ void handlingDerivedProperty(final String serviceName, final String propertyType, final String propertyName);
++
++
++ @Message(level = MessageLevel.DEBUG,
++ text = "Determined the service URL mapping property {0} value: {1}")
++ void determinedPropertyValue(final String propertyName, final String propertyValue);
+
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
----------------------------------------------------------------------
diff --cc gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
index 302eda7,0000000..e69de29
mode 100644,000000..100644
--- a/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
+++ b/gateway-discovery-ambari/src/main/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceURLCreator.java
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --cc gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
index ec8aed2,0000000..21627ad
mode 100644,000000..100644
--- a/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
+++ b/gateway-discovery-ambari/src/test/java/org/apache/knox/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
@@@ -1,856 -1,0 +1,858 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.discovery.ambari;
+
+import net.minidev.json.JSONObject;
+import net.minidev.json.JSONValue;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryConfig;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
- import static org.junit.Assert.*;
++import static org.junit.Assert.assertNotNull;
++import static org.junit.Assert.assertEquals;
++import static org.junit.Assert.assertTrue;
+
+
+/**
+ * Test the Ambari ServiceDiscovery implementation.
+ *
+ * N.B. These tests do NOT verify Ambari API responses. They DO validate the Ambari ServiceDiscovery implementation's
+ * treatment of the responses as they were observed at the time the tests are developed.
+ */
+public class AmbariServiceDiscoveryTest {
+
+ @Test
+ public void testSingleClusterDiscovery() throws Exception {
+ final String discoveryAddress = "http://ambarihost:8080";
+ final String clusterName = "testCluster";
+ ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+ ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+ EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+ EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+ EasyMock.replay(sdc);
+
+ ServiceDiscovery.Cluster cluster = sd.discover(sdc, clusterName);
+ assertNotNull(cluster);
+ assertEquals(clusterName, cluster.getName());
+ assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+ assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+// printServiceURLs(cluster);
+ }
+
+
+ @Test
+ public void testBulkClusterDiscovery() throws Exception {
+ final String discoveryAddress = "http://ambarihost:8080";
+ final String clusterName = "anotherCluster";
+ ServiceDiscovery sd = new TestAmbariServiceDiscovery(clusterName);
+
+ ServiceDiscoveryConfig sdc = EasyMock.createNiceMock(ServiceDiscoveryConfig.class);
+ EasyMock.expect(sdc.getAddress()).andReturn(discoveryAddress).anyTimes();
+ EasyMock.expect(sdc.getUser()).andReturn(null).anyTimes();
+ EasyMock.replay(sdc);
+
+ Map<String, ServiceDiscovery.Cluster> clusters = sd.discover(sdc);
+ assertNotNull(clusters);
+ assertEquals(1, clusters.size());
+ ServiceDiscovery.Cluster cluster = clusters.get(clusterName);
+ assertNotNull(cluster);
+ assertEquals(clusterName, cluster.getName());
+ assertTrue(AmbariCluster.class.isAssignableFrom(cluster.getClass()));
+ assertEquals(6, ((AmbariCluster) cluster).getComponents().size());
+
+// printServiceURLs(cluster, "NAMENODE", "WEBHCAT", "OOZIE", "RESOURCEMANAGER");
+ }
+
+
+ private static void printServiceURLs(ServiceDiscovery.Cluster cluster) {
+ final String[] services = new String[]{"NAMENODE",
+ "JOBTRACKER",
+ "WEBHDFS",
+ "WEBHCAT",
+ "OOZIE",
+ "WEBHBASE",
+ "HIVE",
+ "RESOURCEMANAGER"};
+ printServiceURLs(cluster, services);
+ }
+
+
+ private static void printServiceURLs(ServiceDiscovery.Cluster cluster, String...services) {
+ for (String name : services) {
+ StringBuilder sb = new StringBuilder();
+ List<String> urls = cluster.getServiceURLs(name);
+ if (urls != null && !urls.isEmpty()) {
+ for (String url : urls) {
+ sb.append(url);
+ sb.append(" ");
+ }
+ }
+ System.out.println(String.format("%18s: %s", name, sb.toString()));
+ }
+ }
+
+
+ /**
+ * ServiceDiscovery implementation derived from AmbariServiceDiscovery, so the invokeREST method can be overridden
+ * to eliminate the need to perform actual HTTP interactions with a real Ambari endpoint.
+ */
+ private static final class TestAmbariServiceDiscovery extends AmbariServiceDiscovery {
+
+ final static String CLUSTER_PLACEHOLDER = "CLUSTER_NAME";
+
+ private Map<String, JSONObject> cannedResponses = new HashMap<>();
+
+ TestAmbariServiceDiscovery(String clusterName) {
+ cannedResponses.put(AMBARI_CLUSTERS_URI,
+ (JSONObject) JSONValue.parse(CLUSTERS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+ clusterName)));
+
+ cannedResponses.put(String.format(AMBARI_HOSTROLES_URI, clusterName),
+ (JSONObject) JSONValue.parse(HOSTROLES_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+ clusterName)));
+
+ cannedResponses.put(String.format(AMBARI_SERVICECONFIGS_URI, clusterName),
+ (JSONObject) JSONValue.parse(SERVICECONFIGS_JSON_TEMPLATE.replaceAll(CLUSTER_PLACEHOLDER,
+ clusterName)));
+ }
+
+ @Override
+ protected JSONObject invokeREST(String url, String username, String passwordAlias) {
+ return cannedResponses.get(url.substring(url.indexOf("/api")));
+ }
+ }
+
+
+ ////////////////////////////////////////////////////////////////////////
+ // JSON response templates, based on actual response content excerpts
+ ////////////////////////////////////////////////////////////////////////
+
+ private static final String CLUSTERS_JSON_TEMPLATE =
+ "{\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters\",\n" +
+ " \"items\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"Clusters\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"version\" : \"HDP-2.6\"\n" +
+ " }\n" +
+ " }\n" +
+ " ]" +
+ "}";
+
+
+ private static final String HOSTROLES_JSON_TEMPLATE =
+ "{\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services?fields=components/host_components/HostRoles\",\n" +
+ " \"items\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS\",\n" +
+ " \"ServiceInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"service_name\" : \"AMBARI_METRICS\"\n" +
+ " },\n" +
+ " \"components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/AMBARI_METRICS/components/METRICS_COLLECTOR\",\n" +
+ " \"ServiceComponentInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+ " \"service_name\" : \"AMBARI_METRICS\"\n" +
+ " },\n" +
+ " \"host_components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/METRICS_COLLECTOR\",\n" +
+ " \"HostRoles\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"METRICS_COLLECTOR\",\n" +
+ " \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+ " \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+ " \"service_name\" : \"AMBARI_METRICS\",\n" +
+ " \"stack_id\" : \"HDP-2.6\",\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HBASE/components/HBASE_MASTER\",\n" +
+ " \"ServiceComponentInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"HBASE_MASTER\",\n" +
+ " \"service_name\" : \"HBASE\"\n" +
+ " },\n" +
+ " \"host_components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/HBASE_MASTER\",\n" +
+ " \"HostRoles\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"HBASE_MASTER\",\n" +
+ " \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+ " \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+ " \"service_name\" : \"HBASE\",\n" +
+ " \"stack_id\" : \"HDP-2.6\",\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS\",\n" +
+ " \"ServiceInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"service_name\" : \"HDFS\"\n" +
+ " },\n" +
+ " \"components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/NAMENODE\",\n" +
+ " \"ServiceComponentInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"NAMENODE\",\n" +
+ " \"service_name\" : \"HDFS\"\n" +
+ " },\n" +
+ " \"host_components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/NAMENODE\",\n" +
+ " \"HostRoles\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"NAMENODE\",\n" +
+ " \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+ " \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+ " \"service_name\" : \"HDFS\",\n" +
+ " \"stack_id\" : \"HDP-2.6\",\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HDFS/components/SECONDARY_NAMENODE\",\n" +
+ " \"ServiceComponentInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+ " \"service_name\" : \"HDFS\"\n" +
+ " },\n" +
+ " \"host_components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/SECONDARY_NAMENODE\",\n" +
+ " \"HostRoles\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"SECONDARY_NAMENODE\",\n" +
+ " \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"service_name\" : \"HDFS\",\n" +
+ " \"stack_id\" : \"HDP-2.6\",\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE\",\n" +
+ " \"ServiceInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"service_name\" : \"HIVE\"\n" +
+ " },\n" +
+ " \"components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HCAT\",\n" +
+ " \"ServiceComponentInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"HCAT\",\n" +
+ " \"service_name\" : \"HIVE\"\n" +
+ " },\n" +
+ " \"host_components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/HCAT\",\n" +
+ " \"HostRoles\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"HCAT\",\n" +
+ " \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+ " \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+ " \"service_name\" : \"HIVE\",\n" +
+ " \"stack_id\" : \"HDP-2.6\",\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_METASTORE\",\n" +
+ " \"ServiceComponentInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"HIVE_METASTORE\",\n" +
+ " \"service_name\" : \"HIVE\"\n" +
+ " },\n" +
+ " \"host_components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_METASTORE\",\n" +
+ " \"HostRoles\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"HIVE_METASTORE\",\n" +
+ " \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"service_name\" : \"HIVE\",\n" +
+ " \"stack_id\" : \"HDP-2.6\",\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/HIVE_SERVER\",\n" +
+ " \"ServiceComponentInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"HIVE_SERVER\",\n" +
+ " \"service_name\" : \"HIVE\"\n" +
+ " },\n" +
+ " \"host_components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/HIVE_SERVER\",\n" +
+ " \"HostRoles\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"HIVE_SERVER\",\n" +
+ " \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"service_name\" : \"HIVE\",\n" +
+ " \"stack_id\" : \"HDP-2.6\",\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/HIVE/components/WEBHCAT_SERVER\",\n" +
+ " \"ServiceComponentInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+ " \"service_name\" : \"HIVE\"\n" +
+ " },\n" +
+ " \"host_components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/WEBHCAT_SERVER\",\n" +
+ " \"HostRoles\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"WEBHCAT_SERVER\",\n" +
+ " \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"service_name\" : \"HIVE\",\n" +
+ " \"stack_id\" : \"HDP-2.6\",\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE\",\n" +
+ " \"ServiceInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"service_name\" : \"OOZIE\"\n" +
+ " },\n" +
+ " \"components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/OOZIE/components/OOZIE_SERVER\",\n" +
+ " \"ServiceComponentInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"OOZIE_SERVER\",\n" +
+ " \"service_name\" : \"OOZIE\"\n" +
+ " },\n" +
+ " \"host_components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/OOZIE_SERVER\",\n" +
+ " \"HostRoles\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"OOZIE_SERVER\",\n" +
+ " \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"service_name\" : \"OOZIE\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN\",\n" +
+ " \"ServiceInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"service_name\" : \"YARN\"\n" +
+ " },\n" +
+ " \"components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/APP_TIMELINE_SERVER\",\n" +
+ " \"ServiceComponentInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+ " \"service_name\" : \"YARN\"\n" +
+ " },\n" +
+ " \"host_components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/APP_TIMELINE_SERVER\",\n" +
+ " \"HostRoles\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"APP_TIMELINE_SERVER\",\n" +
+ " \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"service_name\" : \"YARN\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/NODEMANAGER\",\n" +
+ " \"ServiceComponentInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"NODEMANAGER\",\n" +
+ " \"service_name\" : \"YARN\"\n" +
+ " },\n" +
+ " \"host_components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/NODEMANAGER\",\n" +
+ " \"HostRoles\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"NODEMANAGER\",\n" +
+ " \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+ " \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+ " \"service_name\" : \"YARN\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/YARN/components/RESOURCEMANAGER\",\n" +
+ " \"ServiceComponentInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"RESOURCEMANAGER\",\n" +
+ " \"service_name\" : \"YARN\"\n" +
+ " },\n" +
+ " \"host_components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/RESOURCEMANAGER\",\n" +
+ " \"HostRoles\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"RESOURCEMANAGER\",\n" +
+ " \"ha_state\" : \"ACTIVE\",\n" +
+ " \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"service_name\" : \"YARN\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ " ]\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER\",\n" +
+ " \"ServiceInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"service_name\" : \"ZOOKEEPER\"\n" +
+ " },\n" +
+ " \"components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/services/ZOOKEEPER/components/ZOOKEEPER_SERVER\",\n" +
+ " \"ServiceComponentInfo\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+ " \"service_name\" : \"ZOOKEEPER\"\n" +
+ " },\n" +
+ " \"host_components\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6401.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+ " \"HostRoles\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+ " \"host_name\" : \"c6401.ambari.apache.org\",\n" +
+ " \"public_host_name\" : \"c6401.ambari.apache.org\",\n" +
+ " \"service_name\" : \"ZOOKEEPER\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6402.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+ " \"HostRoles\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+ " \"host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"public_host_name\" : \"c6402.ambari.apache.org\",\n" +
+ " \"service_name\" : \"ZOOKEEPER\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/hosts/c6403.ambari.apache.org/host_components/ZOOKEEPER_SERVER\",\n" +
+ " \"HostRoles\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"component_name\" : \"ZOOKEEPER_SERVER\",\n" +
+ " \"host_name\" : \"c6403.ambari.apache.org\",\n" +
+ " \"public_host_name\" : \"c6403.ambari.apache.org\",\n" +
+ " \"service_name\" : \"ZOOKEEPER\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " }\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ " ]\n" +
+ " }\n" +
+ " ]\n" +
+ "}\n";
+
+
+ private static final String SERVICECONFIGS_JSON_TEMPLATE =
+ "{\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?is_current=true\",\n" +
+ " \"items\" : [\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HBASE&service_config_version=1\",\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"configurations\" : [\n" +
+ " {\n" +
+ " \"Config\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " },\n" +
+ " \"type\" : \"hbase-site\",\n" +
+ " \"tag\" : \"version1503410563715\",\n" +
+ " \"version\" : 1,\n" +
+ " \"properties\" : {\n" +
+ " \"hbase.master.info.bindAddress\" : \"0.0.0.0\",\n" +
+ " \"hbase.master.info.port\" : \"16010\",\n" +
+ " \"hbase.master.port\" : \"16000\",\n" +
+ " \"hbase.regionserver.info.port\" : \"16030\",\n" +
+ " \"hbase.regionserver.port\" : \"16020\",\n" +
+ " \"hbase.zookeeper.property.clientPort\" : \"2181\",\n" +
+ " \"hbase.zookeeper.quorum\" : \"c6403.ambari.apache.org,c6402.ambari.apache.org,c6401.ambari.apache.org\",\n" +
+ " \"hbase.zookeeper.useMulti\" : \"true\",\n" +
+ " \"zookeeper.znode.parent\" : \"/hbase-unsecure\"\n" +
+ " },\n" +
+ " \"properties_attributes\" : { }\n" +
+ " },\n" +
+ " ],\n" +
+ " \"is_current\" : true,\n" +
+ " \"service_config_version\" : 1,\n" +
+ " \"service_config_version_note\" : \"Initial configurations for HBase\",\n" +
+ " \"service_name\" : \"HBASE\",\n" +
+ " \"stack_id\" : \"HDP-2.6\",\n" +
+ " \"user\" : \"admin\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HDFS&service_config_version=2\",\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"configurations\" : [\n" +
+ " {\n" +
+ " \"Config\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " },\n" +
+ " \"type\" : \"hdfs-site\",\n" +
+ " \"tag\" : \"version1\",\n" +
+ " \"version\" : 1,\n" +
+ " \"properties\" : {\n" +
+ " \"dfs.cluster.administrators\" : \" hdfs\",\n" +
+ " \"dfs.datanode.address\" : \"0.0.0.0:50010\",\n" +
+ " \"dfs.datanode.http.address\" : \"0.0.0.0:50075\",\n" +
+ " \"dfs.datanode.https.address\" : \"0.0.0.0:50475\",\n" +
+ " \"dfs.datanode.ipc.address\" : \"0.0.0.0:8010\",\n" +
+ " \"dfs.http.policy\" : \"HTTP_ONLY\",\n" +
+ " \"dfs.https.port\" : \"50470\",\n" +
+ " \"dfs.journalnode.http-address\" : \"0.0.0.0:8480\",\n" +
+ " \"dfs.journalnode.https-address\" : \"0.0.0.0:8481\",\n" +
+ " \"dfs.namenode.http-address\" : \"c6401.ambari.apache.org:50070\",\n" +
+ " \"dfs.namenode.https-address\" : \"c6401.ambari.apache.org:50470\",\n" +
+ " \"dfs.namenode.rpc-address\" : \"c6401.ambari.apache.org:8020\",\n" +
+ " \"dfs.namenode.secondary.http-address\" : \"c6402.ambari.apache.org:50090\",\n" +
+ " \"dfs.webhdfs.enabled\" : \"true\"\n" +
+ " },\n" +
+ " \"properties_attributes\" : {\n" +
+ " \"final\" : {\n" +
+ " \"dfs.webhdfs.enabled\" : \"true\",\n" +
+ " \"dfs.namenode.http-address\" : \"true\",\n" +
+ " \"dfs.support.append\" : \"true\",\n" +
+ " \"dfs.namenode.name.dir\" : \"true\",\n" +
+ " \"dfs.datanode.failed.volumes.tolerated\" : \"true\",\n" +
+ " \"dfs.datanode.data.dir\" : \"true\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"Config\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " },\n" +
+ " \"type\" : \"core-site\",\n" +
+ " \"tag\" : \"version1502131215159\",\n" +
+ " \"version\" : 2,\n" +
+ " \"properties\" : {\n" +
+ " \"hadoop.http.authentication.simple.anonymous.allowed\" : \"true\",\n" +
+ " \"net.topology.script.file.name\" : \"/etc/hadoop/conf/topology_script.py\"\n" +
+ " },\n" +
+ " \"properties_attributes\" : {\n" +
+ " \"final\" : {\n" +
+ " \"fs.defaultFS\" : \"true\"\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"is_current\" : true,\n" +
+ " \"service_config_version\" : 2,\n" +
+ " \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+ " \"service_name\" : \"HDFS\",\n" +
+ " \"stack_id\" : \"HDP-2.6\",\n" +
+ " \"user\" : \"admin\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=HIVE&service_config_version=3\",\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"configurations\" : [\n" +
+ " {\n" +
+ " \"Config\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " },\n" +
+ " \"type\" : \"hive-env\",\n" +
+ " \"tag\" : \"version1\",\n" +
+ " \"version\" : 1,\n" +
+ " \"properties\" : {\n" +
+ " \"hive_security_authorization\" : \"None\",\n" +
+ " \"webhcat_user\" : \"hcat\"\n" +
+ " },\n" +
+ " \"properties_attributes\" : { }\n" +
+ " },\n" +
+ " {\n" +
+ " \"Config\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " },\n" +
+ " \"type\" : \"hiveserver2-site\",\n" +
+ " \"tag\" : \"version1\",\n" +
+ " \"version\" : 1,\n" +
+ " \"properties\" : {\n" +
+ " \"hive.metastore.metrics.enabled\" : \"true\",\n" +
+ " \"hive.security.authorization.enabled\" : \"false\",\n" +
+ " \"hive.service.metrics.hadoop2.component\" : \"hiveserver2\",\n" +
+ " \"hive.service.metrics.reporter\" : \"HADOOP2\"\n" +
+ " },\n" +
+ " \"properties_attributes\" : { }\n" +
+ " },\n" +
+ " {\n" +
+ " \"Config\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " },\n" +
+ " \"type\" : \"hive-interactive-site\",\n" +
+ " \"tag\" : \"version1\",\n" +
+ " \"version\" : 1,\n" +
+ " \"properties\" : {\n" +
+ " \"hive.server2.enable.doAs\" : \"false\",\n" +
+ " \"hive.server2.tez.default.queues\" : \"default\",\n" +
+ " \"hive.server2.tez.initialize.default.sessions\" : \"true\",\n" +
+ " \"hive.server2.tez.sessions.custom.queue.allowed\" : \"ignore\",\n" +
+ " \"hive.server2.tez.sessions.per.default.queue\" : \"1\",\n" +
+ " \"hive.server2.tez.sessions.restricted.configs\" : \"hive.execution.mode,hive.execution.engine\",\n" +
+ " \"hive.server2.thrift.http.port\" : \"10501\",\n" +
+ " \"hive.server2.thrift.port\" : \"10500\",\n" +
+ " \"hive.server2.webui.port\" : \"10502\",\n" +
+ " \"hive.server2.webui.use.ssl\" : \"false\",\n" +
+ " \"hive.server2.zookeeper.namespace\" : \"hiveserver2-hive2\"\n" +
+ " },\n" +
+ " \"properties_attributes\" : { }\n" +
+ " },\n" +
+ " {\n" +
+ " \"Config\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " },\n" +
+ " \"type\" : \"tez-interactive-site\",\n" +
+ " \"tag\" : \"version1\",\n" +
+ " \"version\" : 1,\n" +
+ " \"properties\" : {\n" +
+ " \"tez.am.am-rm.heartbeat.interval-ms.max\" : \"10000\",\n" +
+ " \"tez.am.client.heartbeat.poll.interval.millis\" : \"6000\",\n" +
+ " \"tez.am.client.heartbeat.timeout.secs\" : \"90\"\n" +
+ " },\n" +
+ " \"properties_attributes\" : { }\n" +
+ " },\n" +
+ " {\n" +
+ " \"Config\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " },\n" +
+ " \"type\" : \"hive-site\",\n" +
+ " \"tag\" : \"version1502130841736\",\n" +
+ " \"version\" : 2,\n" +
+ " \"properties\" : {\n" +
+ " \"hive.metastore.sasl.enabled\" : \"false\",\n" +
+ " \"hive.metastore.server.max.threads\" : \"100000\",\n" +
+ " \"hive.metastore.uris\" : \"thrift://c6402.ambari.apache.org:9083\",\n" +
+ " \"hive.server2.allow.user.substitution\" : \"true\",\n" +
+ " \"hive.server2.authentication\" : \"NONE\",\n" +
+ " \"hive.server2.authentication.spnego.keytab\" : \"HTTP/_HOST@EXAMPLE.COM\",\n" +
+ " \"hive.server2.authentication.spnego.principal\" : \"/etc/security/keytabs/spnego.service.keytab\",\n" +
+ " \"hive.server2.enable.doAs\" : \"true\",\n" +
+ " \"hive.server2.support.dynamic.service.discovery\" : \"true\",\n" +
+ " \"hive.server2.thrift.http.path\" : \"cliservice\",\n" +
+ " \"hive.server2.thrift.http.port\" : \"10001\",\n" +
+ " \"hive.server2.thrift.max.worker.threads\" : \"500\",\n" +
+ " \"hive.server2.thrift.port\" : \"10000\",\n" +
+ " \"hive.server2.thrift.sasl.qop\" : \"auth\",\n" +
+ " \"hive.server2.transport.mode\" : \"http\",\n" +
+ " \"hive.server2.use.SSL\" : \"false\",\n" +
+ " \"hive.server2.zookeeper.namespace\" : \"hiveserver2\"\n" +
+ " },\n" +
+ " \"properties_attributes\" : {\n" +
+ " \"hidden\" : {\n" +
+ " \"javax.jdo.option.ConnectionPassword\" : \"HIVE_CLIENT,WEBHCAT_SERVER,HCAT,CONFIG_DOWNLOAD\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " {\n" +
+ " \"Config\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " },\n" +
+ " \"type\" : \"webhcat-site\",\n" +
+ " \"tag\" : \"version1502131111746\",\n" +
+ " \"version\" : 2,\n" +
+ " \"properties\" : {\n" +
+ " \"templeton.port\" : \"50111\",\n" +
+ " \"templeton.zookeeper.hosts\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+ " \"webhcat.proxyuser.knox.groups\" : \"users\",\n" +
+ " \"webhcat.proxyuser.knox.hosts\" : \"*\",\n" +
+ " \"webhcat.proxyuser.root.groups\" : \"*\",\n" +
+ " \"webhcat.proxyuser.root.hosts\" : \"c6401.ambari.apache.org\"\n" +
+ " },\n" +
+ " \"properties_attributes\" : { }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"createtime\" : 1502131110745,\n" +
+ " \"group_id\" : -1,\n" +
+ " \"group_name\" : \"Default\",\n" +
+ " \"hosts\" : [ ],\n" +
+ " \"is_cluster_compatible\" : true,\n" +
+ " \"is_current\" : true,\n" +
+ " \"service_config_version\" : 3,\n" +
+ " \"service_config_version_note\" : \"knox trusted proxy support\",\n" +
+ " \"service_name\" : \"HIVE\",\n" +
+ " \"stack_id\" : \"HDP-2.6\",\n" +
+ " \"user\" : \"admin\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=OOZIE&service_config_version=3\",\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"configurations\" : [\n" +
+ " {\n" +
+ " \"Config\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " },\n" +
+ " \"type\" : \"oozie-site\",\n" +
+ " \"tag\" : \"version1502131137103\",\n" +
+ " \"version\" : 3,\n" +
+ " \"properties\" : {\n" +
+ " \"oozie.base.url\" : \"http://c6402.ambari.apache.org:11000/oozie\",\n" +
+ " },\n" +
+ " \"properties_attributes\" : { }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"is_current\" : true,\n" +
+ " \"service_config_version\" : 3,\n" +
+ " \"service_name\" : \"OOZIE\",\n" +
+ " \"stack_id\" : \"HDP-2.6\",\n" +
+ " \"user\" : \"admin\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=TEZ&service_config_version=1\",\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"configurations\" : [\n" +
+ " {\n" +
+ " \"Config\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " },\n" +
+ " \"type\" : \"tez-site\",\n" +
+ " \"tag\" : \"version1\",\n" +
+ " \"version\" : 1,\n" +
+ " \"properties\" : {\n" +
+ " \"tez.use.cluster.hadoop-libs\" : \"false\"\n" +
+ " },\n" +
+ " \"properties_attributes\" : { }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"createtime\" : 1502122253525,\n" +
+ " \"group_id\" : -1,\n" +
+ " \"group_name\" : \"Default\",\n" +
+ " \"hosts\" : [ ],\n" +
+ " \"is_cluster_compatible\" : true,\n" +
+ " \"is_current\" : true,\n" +
+ " \"service_config_version\" : 1,\n" +
+ " \"service_config_version_note\" : \"Initial configurations for Tez\",\n" +
+ " \"service_name\" : \"TEZ\",\n" +
+ " \"stack_id\" : \"HDP-2.6\",\n" +
+ " \"user\" : \"admin\"\n" +
+ " },\n" +
+ " {\n" +
+ " \"href\" : \"http://c6401.ambari.apache.org:8080/api/v1/clusters/"+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"/configurations/service_config_versions?service_name=YARN&service_config_version=1\",\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"configurations\" : [\n" +
+ " {\n" +
+ " \"Config\" : {\n" +
+ " \"cluster_name\" : \""+TestAmbariServiceDiscovery.CLUSTER_PLACEHOLDER+"\",\n" +
+ " \"stack_id\" : \"HDP-2.6\"\n" +
+ " },\n" +
+ " \"type\" : \"yarn-site\",\n" +
+ " \"tag\" : \"version1\",\n" +
+ " \"version\" : 1,\n" +
+ " \"properties\" : {\n" +
+ " \"hadoop.registry.rm.enabled\" : \"true\",\n" +
+ " \"hadoop.registry.zk.quorum\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\",\n" +
+ " \"yarn.acl.enable\" : \"false\",\n" +
+ " \"yarn.http.policy\" : \"HTTP_ONLY\",\n" +
+ " \"yarn.nodemanager.address\" : \"0.0.0.0:45454\",\n" +
+ " \"yarn.nodemanager.bind-host\" : \"0.0.0.0\",\n" +
+ " \"yarn.resourcemanager.address\" : \"c6402.ambari.apache.org:8050\",\n" +
+ " \"yarn.resourcemanager.admin.address\" : \"c6402.ambari.apache.org:8141\",\n" +
+ " \"yarn.resourcemanager.ha.enabled\" : \"false\",\n" +
+ " \"yarn.resourcemanager.hostname\" : \"c6402.ambari.apache.org\",\n" +
+ " \"yarn.resourcemanager.resource-tracker.address\" : \"c6402.ambari.apache.org:8025\",\n" +
+ " \"yarn.resourcemanager.scheduler.address\" : \"c6402.ambari.apache.org:8030\",\n" +
+ " \"yarn.resourcemanager.webapp.address\" : \"c6402.ambari.apache.org:8088\",\n" +
+ " \"yarn.resourcemanager.webapp.delegation-token-auth-filter.enabled\" : \"false\",\n" +
+ " \"yarn.resourcemanager.webapp.https.address\" : \"c6402.ambari.apache.org:8090\",\n" +
+ " \"yarn.resourcemanager.zk-address\" : \"c6403.ambari.apache.org:2181,c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181\"\n" +
+ " },\n" +
+ " \"properties_attributes\" : { }\n" +
+ " }\n" +
+ " ],\n" +
+ " \"is_current\" : true,\n" +
+ " \"service_config_version\" : 1,\n" +
+ " \"service_name\" : \"YARN\",\n" +
+ " \"stack_id\" : \"HDP-2.6\",\n" +
+ " \"user\" : \"admin\"\n" +
+ " }\n" +
+ " ]\n" +
+ "}";
+
+}
[10/23] knox git commit: KNOX-1064 - Externalize Hadoop Service
Configuration Details and Service URL Creation (Phil Zampino via Sandeep
More)
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
new file mode 100644
index 0000000..dd35dbb
--- /dev/null
+++ b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariDynamicServiceURLCreatorTest.java
@@ -0,0 +1,876 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.gateway.topology.discovery.ambari;
+
+import org.apache.commons.io.FileUtils;
+import org.easymock.EasyMock;
+import org.junit.Test;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import static junit.framework.TestCase.assertTrue;
+import static junit.framework.TestCase.fail;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+
+public class AmbariDynamicServiceURLCreatorTest {
+
+ @Test
+ public void testHiveURLFromInternalMapping() throws Exception {
+ testHiveURL(null);
+ }
+
+ @Test
+ public void testHiveURLFromExternalMapping() throws Exception {
+ testHiveURL(TEST_MAPPING_CONFIG);
+ }
+
+ private void testHiveURL(Object mappingConfiguration) throws Exception {
+
+ final String SERVICE_NAME = "HIVE";
+ final String[] HOSTNAMES = {"host3", "host2", "host4"};
+ final String HTTP_PATH = "cliservice";
+ final String HTTP_PORT = "10001";
+ final String BINARY_PORT = "10000";
+
+ String expectedScheme = "http";
+
+ final List<String> hiveServerHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent hiveServer = EasyMock.createNiceMock(AmbariComponent.class);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("HIVE_SERVER")).andReturn(hiveServer).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Configure HTTP Transport
+ EasyMock.expect(hiveServer.getHostNames()).andReturn(hiveServerHosts).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.use.SSL")).andReturn("false").anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.path")).andReturn(HTTP_PATH).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.transport.mode")).andReturn("http").anyTimes();
+ EasyMock.replay(hiveServer);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+ List<String> urls = builder.create(SERVICE_NAME);
+ assertEquals(HOSTNAMES.length, urls.size());
+ validateServiceURLs(urls, HOSTNAMES, expectedScheme, HTTP_PORT, HTTP_PATH);
+
+ // Configure BINARY Transport
+ EasyMock.reset(hiveServer);
+ EasyMock.expect(hiveServer.getHostNames()).andReturn(hiveServerHosts).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.use.SSL")).andReturn("false").anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.path")).andReturn("").anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.port")).andReturn(BINARY_PORT).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.transport.mode")).andReturn("binary").anyTimes();
+ EasyMock.replay(hiveServer);
+
+ // Run the test
+ urls = builder.create(SERVICE_NAME);
+ assertEquals(HOSTNAMES.length, urls.size());
+ validateServiceURLs(urls, HOSTNAMES, expectedScheme, HTTP_PORT, "");
+
+ // Configure HTTPS Transport
+ EasyMock.reset(hiveServer);
+ EasyMock.expect(hiveServer.getHostNames()).andReturn(hiveServerHosts).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.use.SSL")).andReturn("true").anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.path")).andReturn(HTTP_PATH).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.thrift.http.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(hiveServer.getConfigProperty("hive.server2.transport.mode")).andReturn("http").anyTimes();
+ EasyMock.replay(hiveServer);
+
+ // Run the test
+ expectedScheme = "https";
+ urls = builder.create(SERVICE_NAME);
+ assertEquals(HOSTNAMES.length, urls.size());
+ validateServiceURLs(urls, HOSTNAMES, expectedScheme, HTTP_PORT, HTTP_PATH);
+ }
+
+ @Test
+ public void testResourceManagerURLFromInternalMapping() throws Exception {
+ testResourceManagerURL(null);
+ }
+
+ @Test
+ public void testResourceManagerURLFromExternalMapping() throws Exception {
+ testResourceManagerURL(TEST_MAPPING_CONFIG);
+ }
+
+ private void testResourceManagerURL(Object mappingConfiguration) throws Exception {
+
+ final String HTTP_ADDRESS = "host2:1111";
+ final String HTTPS_ADDRESS = "host2:22222";
+
+ // HTTP
+ AmbariComponent resman = EasyMock.createNiceMock(AmbariComponent.class);
+ setResourceManagerComponentExpectations(resman, HTTP_ADDRESS, HTTPS_ADDRESS, "HTTP");
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("RESOURCEMANAGER")).andReturn(resman).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+ String url = builder.create("RESOURCEMANAGER").get(0);
+ assertEquals("http://" + HTTP_ADDRESS + "/ws", url);
+
+ // HTTPS
+ EasyMock.reset(resman);
+ setResourceManagerComponentExpectations(resman, HTTP_ADDRESS, HTTPS_ADDRESS, "HTTPS_ONLY");
+
+ // Run the test
+ url = builder.create("RESOURCEMANAGER").get(0);
+ assertEquals("https://" + HTTPS_ADDRESS + "/ws", url);
+ }
+
+ private void setResourceManagerComponentExpectations(final AmbariComponent resmanMock,
+ final String httpAddress,
+ final String httpsAddress,
+ final String httpPolicy) {
+ EasyMock.expect(resmanMock.getConfigProperty("yarn.resourcemanager.webapp.address")).andReturn(httpAddress).anyTimes();
+ EasyMock.expect(resmanMock.getConfigProperty("yarn.resourcemanager.webapp.https.address")).andReturn(httpsAddress).anyTimes();
+ EasyMock.expect(resmanMock.getConfigProperty("yarn.http.policy")).andReturn(httpPolicy).anyTimes();
+ EasyMock.replay(resmanMock);
+ }
+
+ @Test
+ public void testJobTrackerURLFromInternalMapping() throws Exception {
+ testJobTrackerURL(null);
+ }
+
+ @Test
+ public void testJobTrackerURLFromExternalMapping() throws Exception {
+ testJobTrackerURL(TEST_MAPPING_CONFIG);
+ }
+
+ private void testJobTrackerURL(Object mappingConfiguration) throws Exception {
+ final String ADDRESS = "host2:5678";
+
+ AmbariComponent resman = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(resman.getConfigProperty("yarn.resourcemanager.address")).andReturn(ADDRESS).anyTimes();
+ EasyMock.replay(resman);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("RESOURCEMANAGER")).andReturn(resman).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+ String url = builder.create("JOBTRACKER").get(0);
+ assertEquals("rpc://" + ADDRESS, url);
+ }
+
+ @Test
+ public void testNameNodeURLFromInternalMapping() throws Exception {
+ testNameNodeURL(null);
+ }
+
+ @Test
+ public void testNameNodeURLFromExternalMapping() throws Exception {
+ testNameNodeURL(TEST_MAPPING_CONFIG);
+ }
+
+ private void testNameNodeURL(Object mappingConfiguration) throws Exception {
+ final String ADDRESS = "host1:1234";
+
+ AmbariComponent namenode = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(namenode.getConfigProperty("dfs.namenode.rpc-address")).andReturn(ADDRESS).anyTimes();
+ EasyMock.replay(namenode);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("NAMENODE")).andReturn(namenode).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+ String url = builder.create("NAMENODE").get(0);
+ assertEquals("hdfs://" + ADDRESS, url);
+ }
+
+ @Test
+ public void testWebHCatURLFromInternalMapping() throws Exception {
+ testWebHCatURL(null);
+ }
+
+ @Test
+ public void testWebHCatURLFromExternalMapping() throws Exception {
+ testWebHCatURL(TEST_MAPPING_CONFIG);
+ }
+
+ private void testWebHCatURL(Object mappingConfiguration) throws Exception {
+
+ final String HOSTNAME = "host3";
+ final String PORT = "1919";
+
+ AmbariComponent webhcatServer = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(webhcatServer.getConfigProperty("templeton.port")).andReturn(PORT).anyTimes();
+ List<String> webHcatServerHosts = Collections.singletonList(HOSTNAME);
+ EasyMock.expect(webhcatServer.getHostNames()).andReturn(webHcatServerHosts).anyTimes();
+ EasyMock.replay(webhcatServer);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("WEBHCAT_SERVER")).andReturn(webhcatServer).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+ String url = builder.create("WEBHCAT").get(0);
+ assertEquals("http://" + HOSTNAME + ":" + PORT + "/templeton", url);
+ }
+
+ @Test
+ public void testOozieURLFromInternalMapping() throws Exception {
+ testOozieURL(null);
+ }
+
+ @Test
+ public void testOozieURLFromExternalMapping() throws Exception {
+ testOozieURL(TEST_MAPPING_CONFIG);
+ }
+
+ private void testOozieURL(Object mappingConfiguration) throws Exception {
+ final String URL = "http://host3:2222";
+
+ AmbariComponent oozieServer = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(oozieServer.getConfigProperty("oozie.base.url")).andReturn(URL).anyTimes();
+ EasyMock.replay(oozieServer);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("OOZIE_SERVER")).andReturn(oozieServer).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+ String url = builder.create("OOZIE").get(0);
+ assertEquals(URL, url);
+ }
+
+ @Test
+ public void testWebHBaseURLFromInternalMapping() throws Exception {
+ testWebHBaseURL(null);
+ }
+
+ @Test
+ public void testWebHBaseURLFromExternalMapping() throws Exception {
+ testWebHBaseURL(TEST_MAPPING_CONFIG);
+ }
+
+ private void testWebHBaseURL(Object mappingConfiguration) throws Exception {
+ final String[] HOSTNAMES = {"host2", "host4"};
+
+ AmbariComponent hbaseMaster = EasyMock.createNiceMock(AmbariComponent.class);
+ List<String> hbaseMasterHosts = Arrays.asList(HOSTNAMES);
+ EasyMock.expect(hbaseMaster.getHostNames()).andReturn(hbaseMasterHosts).anyTimes();
+ EasyMock.replay(hbaseMaster);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("HBASE_MASTER")).andReturn(hbaseMaster).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, mappingConfiguration);
+ List<String> urls = builder.create("WEBHBASE");
+ validateServiceURLs(urls, HOSTNAMES, "http", "60080", null);
+ }
+
+ @Test
+ public void testWebHdfsURLFromInternalMapping() throws Exception {
+ testWebHdfsURL(null);
+ }
+
+ @Test
+ public void testWebHdfsURLFromExternalMapping() throws Exception {
+ testWebHdfsURL(TEST_MAPPING_CONFIG);
+ }
+
+ @Test
+ public void testWebHdfsURLFromSystemPropertyOverride() throws Exception {
+ // Write the test mapping configuration to a temp file
+ File mappingFile = File.createTempFile("mapping-config", "xml");
+ FileUtils.write(mappingFile, OVERRIDE_MAPPING_FILE_CONTENTS, "utf-8");
+
+ // Set the system property to point to the temp file
+ System.setProperty(AmbariDynamicServiceURLCreator.MAPPING_CONFIG_OVERRIDE_PROPERTY,
+ mappingFile.getAbsolutePath());
+ try {
+ final String ADDRESS = "host3:1357";
+ // The URL creator should apply the file contents, and create the URL accordingly
+ String url = getTestWebHdfsURL(ADDRESS, null);
+
+ // Verify the URL matches the pattern from the file
+ assertEquals("http://" + ADDRESS + "/webhdfs/OVERRIDE", url);
+ } finally {
+ // Reset the system property, and delete the temp file
+ System.clearProperty(AmbariDynamicServiceURLCreator.MAPPING_CONFIG_OVERRIDE_PROPERTY);
+ mappingFile.delete();
+ }
+ }
+
+ private void testWebHdfsURL(Object mappingConfiguration) throws Exception {
+ final String ADDRESS = "host3:1357";
+ assertEquals("http://" + ADDRESS + "/webhdfs", getTestWebHdfsURL(ADDRESS, mappingConfiguration));
+ }
+
+
+ private String getTestWebHdfsURL(String address, Object mappingConfiguration) throws Exception {
+ AmbariCluster.ServiceConfiguration hdfsSC = EasyMock.createNiceMock(AmbariCluster.ServiceConfiguration.class);
+ Map<String, String> hdfsProps = new HashMap<>();
+ hdfsProps.put("dfs.namenode.http-address", address);
+ EasyMock.expect(hdfsSC.getProperties()).andReturn(hdfsProps).anyTimes();
+ EasyMock.replay(hdfsSC);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getServiceConfiguration("HDFS", "hdfs-site")).andReturn(hdfsSC).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Create the URL
+ AmbariDynamicServiceURLCreator creator = newURLCreator(cluster, mappingConfiguration);
+ return creator.create("WEBHDFS").get(0);
+ }
+
+
+ @Test
+ public void testAtlasApiURL() throws Exception {
+ final String ATLAS_REST_ADDRESS = "http://host2:21000";
+
+ AmbariComponent atlasServer = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(atlasServer.getConfigProperty("atlas.rest.address")).andReturn(ATLAS_REST_ADDRESS).anyTimes();
+ EasyMock.replay(atlasServer);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("ATLAS_SERVER")).andReturn(atlasServer).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("ATLAS-API");
+ assertEquals(1, urls.size());
+ assertEquals(ATLAS_REST_ADDRESS, urls.get(0));
+ }
+
+
+ @Test
+ public void testAtlasURL() throws Exception {
+ final String HTTP_PORT = "8787";
+ final String HTTPS_PORT = "8989";
+
+ final String[] HOSTNAMES = {"host1", "host4"};
+ final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent atlasServer = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(atlasServer.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(atlasServer.getConfigProperty("atlas.enableTLS")).andReturn("false").anyTimes();
+ EasyMock.expect(atlasServer.getConfigProperty("atlas.server.http.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(atlasServer.getConfigProperty("atlas.server.https.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(atlasServer);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("ATLAS_SERVER")).andReturn(atlasServer).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("ATLAS");
+ validateServiceURLs(urls, HOSTNAMES, "http", HTTP_PORT, null);
+
+ EasyMock.reset(atlasServer);
+ EasyMock.expect(atlasServer.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(atlasServer.getConfigProperty("atlas.enableTLS")).andReturn("true").anyTimes();
+ EasyMock.expect(atlasServer.getConfigProperty("atlas.server.http.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(atlasServer.getConfigProperty("atlas.server.https.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(atlasServer);
+
+ // Run the test
+ urls = builder.create("ATLAS");
+ validateServiceURLs(urls, HOSTNAMES, "https", HTTPS_PORT, null);
+ }
+
+
+ @Test
+ public void testZeppelinURL() throws Exception {
+ final String HTTP_PORT = "8787";
+ final String HTTPS_PORT = "8989";
+
+ final String[] HOSTNAMES = {"host1", "host4"};
+ final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent zeppelinMaster = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("false").anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(zeppelinMaster);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("ZEPPELIN_MASTER")).andReturn(zeppelinMaster).anyTimes();
+ EasyMock.replay(cluster);
+
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+
+ // Run the test
+ validateServiceURLs(builder.create("ZEPPELIN"), HOSTNAMES, "http", HTTP_PORT, null);
+
+ EasyMock.reset(zeppelinMaster);
+ EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("true").anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(zeppelinMaster);
+
+ // Run the test
+ validateServiceURLs(builder.create("ZEPPELIN"), HOSTNAMES, "https", HTTPS_PORT, null);
+ }
+
+
+ @Test
+ public void testZeppelinUiURL() throws Exception {
+ final String HTTP_PORT = "8787";
+ final String HTTPS_PORT = "8989";
+
+ final String[] HOSTNAMES = {"host1", "host4"};
+ final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent zeppelinMaster = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("false").anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(zeppelinMaster);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("ZEPPELIN_MASTER")).andReturn(zeppelinMaster).anyTimes();
+ EasyMock.replay(cluster);
+
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+
+ // Run the test
+ validateServiceURLs(builder.create("ZEPPELINUI"), HOSTNAMES, "http", HTTP_PORT, null);
+
+ EasyMock.reset(zeppelinMaster);
+ EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("true").anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(zeppelinMaster);
+
+ // Run the test
+ validateServiceURLs(builder.create("ZEPPELINUI"), HOSTNAMES, "https", HTTPS_PORT, null);
+ }
+
+
+ @Test
+ public void testZeppelinWsURL() throws Exception {
+ final String HTTP_PORT = "8787";
+ final String HTTPS_PORT = "8989";
+
+ final String[] HOSTNAMES = {"host1", "host4"};
+ final List<String> atlastServerHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent zeppelinMaster = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("false").anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(zeppelinMaster);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("ZEPPELIN_MASTER")).andReturn(zeppelinMaster).anyTimes();
+ EasyMock.replay(cluster);
+
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+
+ // Run the test
+ validateServiceURLs(builder.create("ZEPPELINWS"), HOSTNAMES, "ws", HTTP_PORT, null);
+
+ EasyMock.reset(zeppelinMaster);
+ EasyMock.expect(zeppelinMaster.getHostNames()).andReturn(atlastServerHosts).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.ssl")).andReturn("true").anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.port")).andReturn(HTTP_PORT).anyTimes();
+ EasyMock.expect(zeppelinMaster.getConfigProperty("zeppelin.server.ssl.port")).andReturn(HTTPS_PORT).anyTimes();
+ EasyMock.replay(zeppelinMaster);
+
+ // Run the test
+ validateServiceURLs(builder.create("ZEPPELINWS"), HOSTNAMES, "wss", HTTPS_PORT, null);
+ }
+
+
+ @Test
+ public void testDruidCoordinatorURL() throws Exception {
+ final String PORT = "8787";
+
+ final String[] HOSTNAMES = {"host3", "host2"};
+ final List<String> druidCoordinatorHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent druidCoordinator = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(druidCoordinator.getHostNames()).andReturn(druidCoordinatorHosts).anyTimes();
+ EasyMock.expect(druidCoordinator.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+ EasyMock.replay(druidCoordinator);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("DRUID_COORDINATOR")).andReturn(druidCoordinator).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("DRUID-COORDINATOR");
+ validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+ }
+
+
+ @Test
+ public void testDruidBrokerURL() throws Exception {
+ final String PORT = "8181";
+
+ final String[] HOSTNAMES = {"host4", "host3"};
+ final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent druidBroker = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(druidBroker.getHostNames()).andReturn(druidHosts).anyTimes();
+ EasyMock.expect(druidBroker.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+ EasyMock.replay(druidBroker);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("DRUID_BROKER")).andReturn(druidBroker).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("DRUID-BROKER");
+ validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+ }
+
+
+ @Test
+ public void testDruidRouterURL() throws Exception {
+ final String PORT = "8282";
+
+ final String[] HOSTNAMES = {"host5", "host7"};
+ final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent druidRouter = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(druidRouter.getHostNames()).andReturn(druidHosts).anyTimes();
+ EasyMock.expect(druidRouter.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+ EasyMock.replay(druidRouter);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("DRUID_ROUTER")).andReturn(druidRouter).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("DRUID-ROUTER");
+ validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+ }
+
+
+ @Test
+ public void testDruidOverlordURL() throws Exception {
+ final String PORT = "8383";
+
+ final String[] HOSTNAMES = {"host4", "host1"};
+ final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent druidOverlord = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(druidOverlord.getHostNames()).andReturn(druidHosts).anyTimes();
+ EasyMock.expect(druidOverlord.getConfigProperty("druid.port")).andReturn(PORT).anyTimes();
+ EasyMock.replay(druidOverlord);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("DRUID_OVERLORD")).andReturn(druidOverlord).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("DRUID-OVERLORD");
+ validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+ }
+
+
+ @Test
+ public void testDruidSupersetURL() throws Exception {
+ final String PORT = "8484";
+
+ final String[] HOSTNAMES = {"host4", "host1"};
+ final List<String> druidHosts = Arrays.asList(HOSTNAMES);
+
+ AmbariComponent druidSuperset = EasyMock.createNiceMock(AmbariComponent.class);
+ EasyMock.expect(druidSuperset.getHostNames()).andReturn(druidHosts).anyTimes();
+ EasyMock.expect(druidSuperset.getConfigProperty("SUPERSET_WEBSERVER_PORT")).andReturn(PORT).anyTimes();
+ EasyMock.replay(druidSuperset);
+
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("DRUID_SUPERSET")).andReturn(druidSuperset).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("SUPERSET");
+ validateServiceURLs(urls, HOSTNAMES, "http", PORT, null);
+ }
+
+
+ @Test
+ public void testMissingServiceComponentURL() throws Exception {
+ AmbariCluster cluster = EasyMock.createNiceMock(AmbariCluster.class);
+ EasyMock.expect(cluster.getComponent("DRUID_BROKER")).andReturn(null).anyTimes();
+ EasyMock.expect(cluster.getComponent("HIVE_SERVER")).andReturn(null).anyTimes();
+ EasyMock.replay(cluster);
+
+ // Run the test
+ AmbariDynamicServiceURLCreator builder = newURLCreator(cluster, null);
+ List<String> urls = builder.create("DRUID-BROKER");
+ assertNotNull(urls);
+ assertEquals(1, urls.size());
+ assertEquals("http://{HOST}:{PORT}", urls.get(0));
+
+ urls = builder.create("HIVE");
+ assertNotNull(urls);
+ assertEquals(1, urls.size());
+ assertEquals("http://{HOST}:{PORT}/{PATH}", urls.get(0));
+ }
+
+
+ /**
+ * Convenience method for creating AmbariDynamicServiceURLCreator instances from different mapping configuration
+ * input sources.
+ *
+ * @param cluster The Ambari ServiceDiscovery Cluster model
+ * @param mappingConfig The mapping configuration, or null if the internal config should be used.
+ *
+ * @return An AmbariDynamicServiceURLCreator instance, capable of creating service URLs based on the specified
+ * cluster's configuration details.
+ */
+ private static AmbariDynamicServiceURLCreator newURLCreator(AmbariCluster cluster, Object mappingConfig) throws Exception {
+ AmbariDynamicServiceURLCreator result = null;
+
+ if (mappingConfig == null) {
+ result = new AmbariDynamicServiceURLCreator(cluster);
+ } else {
+ if (mappingConfig instanceof String) {
+ result = new AmbariDynamicServiceURLCreator(cluster, (String) mappingConfig);
+ } else if (mappingConfig instanceof File) {
+ result = new AmbariDynamicServiceURLCreator(cluster, (File) mappingConfig);
+ }
+ }
+
+ return result;
+ }
+
+
+ /**
+ * Validate the specifed HIVE URLs.
+ *
+ * @param urlsToValidate The URLs to validate
+ * @param hostNames The host names expected in the test URLs
+ * @param scheme The expected scheme for the URLs
+ * @param port The expected port for the URLs
+ * @param path The expected path for the URLs
+ */
+ private static void validateServiceURLs(List<String> urlsToValidate,
+ String[] hostNames,
+ String scheme,
+ String port,
+ String path) throws MalformedURLException {
+
+ List<String> hostNamesToTest = new LinkedList<>(Arrays.asList(hostNames));
+ for (String url : urlsToValidate) {
+ URI test = null;
+ try {
+ // Make sure it's a valid URL
+ test = new URI(url);
+ } catch (URISyntaxException e) {
+ fail(e.getMessage());
+ }
+
+ // Validate the scheme
+ assertEquals(scheme, test.getScheme());
+
+ // Validate the port
+ assertEquals(port, String.valueOf(test.getPort()));
+
+ // If the expected path is not specified, don't validate it
+ if (path != null) {
+ assertEquals("/" + path, test.getPath());
+ }
+
+ // Validate the host name
+ assertTrue(hostNamesToTest.contains(test.getHost()));
+ hostNamesToTest.remove(test.getHost());
+ }
+ assertTrue(hostNamesToTest.isEmpty());
+ }
+
+
+ private static final String TEST_MAPPING_CONFIG =
+ "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
+ "<service-discovery-url-mappings>\n" +
+ " <service name=\"NAMENODE\">\n" +
+ " <url-pattern>hdfs://{DFS_NAMENODE_RPC_ADDRESS}</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"DFS_NAMENODE_RPC_ADDRESS\">\n" +
+ " <component>NAMENODE</component>\n" +
+ " <config-property>dfs.namenode.rpc-address</config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ "\n" +
+ " <service name=\"JOBTRACKER\">\n" +
+ " <url-pattern>rpc://{YARN_RM_ADDRESS}</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"YARN_RM_ADDRESS\">\n" +
+ " <component>RESOURCEMANAGER</component>\n" +
+ " <config-property>yarn.resourcemanager.address</config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ "\n" +
+ " <service name=\"WEBHDFS\">\n" +
+ " <url-pattern>http://{WEBHDFS_ADDRESS}/webhdfs</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"WEBHDFS_ADDRESS\">\n" +
+ " <service-config name=\"HDFS\">hdfs-site</service-config>\n" +
+ " <config-property>dfs.namenode.http-address</config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ "\n" +
+ " <service name=\"WEBHCAT\">\n" +
+ " <url-pattern>http://{HOST}:{PORT}/templeton</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"HOST\">\n" +
+ " <component>WEBHCAT_SERVER</component>\n" +
+ " <hostname/>\n" +
+ " </property>\n" +
+ " <property name=\"PORT\">\n" +
+ " <component>WEBHCAT_SERVER</component>\n" +
+ " <config-property>templeton.port</config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ "\n" +
+ " <service name=\"OOZIE\">\n" +
+ " <url-pattern>{OOZIE_ADDRESS}</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"OOZIE_ADDRESS\">\n" +
+ " <component>OOZIE_SERVER</component>\n" +
+ " <config-property>oozie.base.url</config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ "\n" +
+ " <service name=\"WEBHBASE\">\n" +
+ " <url-pattern>http://{HOST}:60080</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"HOST\">\n" +
+ " <component>HBASE_MASTER</component>\n" +
+ " <hostname/>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ " <service name=\"RESOURCEMANAGER\">\n" +
+ " <url-pattern>{SCHEME}://{WEBAPP_ADDRESS}/ws</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"WEBAPP_HTTP_ADDRESS\">\n" +
+ " <component>RESOURCEMANAGER</component>\n" +
+ " <config-property>yarn.resourcemanager.webapp.address</config-property>\n" +
+ " </property>\n" +
+ " <property name=\"WEBAPP_HTTPS_ADDRESS\">\n" +
+ " <component>RESOURCEMANAGER</component>\n" +
+ " <config-property>yarn.resourcemanager.webapp.https.address</config-property>\n" +
+ " </property>\n" +
+ " <property name=\"HTTP_POLICY\">\n" +
+ " <component>RESOURCEMANAGER</component>\n" +
+ " <config-property>yarn.http.policy</config-property>\n" +
+ " </property>\n" +
+ " <property name=\"SCHEME\">\n" +
+ " <config-property>\n" +
+ " <if property=\"HTTP_POLICY\" value=\"HTTPS_ONLY\">\n" +
+ " <then>https</then>\n" +
+ " <else>http</else>\n" +
+ " </if>\n" +
+ " </config-property>\n" +
+ " </property>\n" +
+ " <property name=\"WEBAPP_ADDRESS\">\n" +
+ " <component>RESOURCEMANAGER</component>\n" +
+ " <config-property>\n" +
+ " <if property=\"HTTP_POLICY\" value=\"HTTPS_ONLY\">\n" +
+ " <then>WEBAPP_HTTPS_ADDRESS</then>\n" +
+ " <else>WEBAPP_HTTP_ADDRESS</else>\n" +
+ " </if>\n" +
+ " </config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ " <service name=\"HIVE\">\n" +
+ " <url-pattern>{SCHEME}://{HOST}:{PORT}/{PATH}</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"HOST\">\n" +
+ " <component>HIVE_SERVER</component>\n" +
+ " <hostname/>\n" +
+ " </property>\n" +
+ " <property name=\"USE_SSL\">\n" +
+ " <component>HIVE_SERVER</component>\n" +
+ " <config-property>hive.server2.use.SSL</config-property>\n" +
+ " </property>\n" +
+ " <property name=\"PATH\">\n" +
+ " <component>HIVE_SERVER</component>\n" +
+ " <config-property>hive.server2.thrift.http.path</config-property>\n" +
+ " </property>\n" +
+ " <property name=\"PORT\">\n" +
+ " <component>HIVE_SERVER</component>\n" +
+ " <config-property>hive.server2.thrift.http.port</config-property>\n" +
+ " </property>\n" +
+ " <property name=\"SCHEME\">\n" +
+ " <config-property>\n" +
+ " <if property=\"USE_SSL\" value=\"true\">\n" +
+ " <then>https</then>\n" +
+ " <else>http</else>\n" +
+ " </if>\n" +
+ " </config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ "</service-discovery-url-mappings>\n";
+
+
+ private static final String OVERRIDE_MAPPING_FILE_CONTENTS =
+ "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n" +
+ "<service-discovery-url-mappings>\n" +
+ " <service name=\"WEBHDFS\">\n" +
+ " <url-pattern>http://{WEBHDFS_ADDRESS}/webhdfs/OVERRIDE</url-pattern>\n" +
+ " <properties>\n" +
+ " <property name=\"WEBHDFS_ADDRESS\">\n" +
+ " <service-config name=\"HDFS\">hdfs-site</service-config>\n" +
+ " <config-property>dfs.namenode.http-address</config-property>\n" +
+ " </property>\n" +
+ " </properties>\n" +
+ " </service>\n" +
+ "</service-discovery-url-mappings>\n";
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
----------------------------------------------------------------------
diff --git a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
index 1e5e7b2..f7f0553 100644
--- a/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
+++ b/gateway-discovery-ambari/src/test/java/org/apache/hadoop/gateway/topology/discovery/ambari/AmbariServiceDiscoveryTest.java
@@ -27,7 +27,9 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
/**
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
index fb563fa..521b5b4 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandler.java
@@ -16,15 +16,28 @@
*/
package org.apache.hadoop.gateway.topology.simple;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.InputStreamReader;
+import java.io.IOException;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
import org.apache.hadoop.gateway.i18n.messages.MessagesFactory;
import org.apache.hadoop.gateway.services.Service;
import org.apache.hadoop.gateway.topology.discovery.DefaultServiceDiscoveryConfig;
import org.apache.hadoop.gateway.topology.discovery.ServiceDiscovery;
import org.apache.hadoop.gateway.topology.discovery.ServiceDiscoveryFactory;
-import java.io.*;
-import java.util.*;
-
/**
* Processes simple topology descriptors, producing full topology files, which can subsequently be deployed to the
@@ -78,19 +91,29 @@ public class SimpleDescriptorHandler {
descServiceURLs = cluster.getServiceURLs(serviceName);
}
- // If there is at least one URL associated with the service, then add it to the map
+ // Validate the discovered service URLs
+ List<String> validURLs = new ArrayList<>();
if (descServiceURLs != null && !descServiceURLs.isEmpty()) {
- serviceURLs.put(serviceName, descServiceURLs);
+ // Validate the URL(s)
+ for (String descServiceURL : descServiceURLs) {
+ if (validateURL(serviceName, descServiceURL)) {
+ validURLs.add(descServiceURL);
+ }
+ }
+ }
+
+ // If there is at least one valid URL associated with the service, then add it to the map
+ if (!validURLs.isEmpty()) {
+ serviceURLs.put(serviceName, validURLs);
} else {
log.failedToDiscoverClusterServiceURLs(serviceName, cluster.getName());
- throw new IllegalStateException("ServiceDiscovery failed to resolve any URLs for " + serviceName +
- ". Topology update aborted!");
}
}
} else {
log.failedToDiscoverClusterServices(desc.getClusterName());
}
+ BufferedWriter fw = null;
topologyDescriptor = null;
File providerConfig = null;
try {
@@ -110,7 +133,7 @@ public class SimpleDescriptorHandler {
topologyFilename = desc.getClusterName();
}
topologyDescriptor = new File(destDirectory, topologyFilename + ".xml");
- FileWriter fw = new FileWriter(topologyDescriptor);
+ fw = new BufferedWriter(new FileWriter(topologyDescriptor));
fw.write("<topology>\n");
@@ -123,8 +146,12 @@ public class SimpleDescriptorHandler {
}
policyReader.close();
+ // Sort the service names to write the services alphabetically
+ List<String> serviceNames = new ArrayList<>(serviceURLs.keySet());
+ Collections.sort(serviceNames);
+
// Write the service declarations
- for (String serviceName : serviceURLs.keySet()) {
+ for (String serviceName : serviceNames) {
fw.write(" <service>\n");
fw.write(" <role>" + serviceName + "</role>\n");
for (String url : serviceURLs.get(serviceName)) {
@@ -136,16 +163,37 @@ public class SimpleDescriptorHandler {
fw.write("</topology>\n");
fw.flush();
- fw.close();
} catch (IOException e) {
log.failedToGenerateTopologyFromSimpleDescriptor(topologyDescriptor.getName(), e);
topologyDescriptor.delete();
+ } finally {
+ if (fw != null) {
+ try {
+ fw.close();
+ } catch (IOException e) {
+ // ignore
+ }
+ }
}
result.put("topology", topologyDescriptor);
return result;
}
+ private static boolean validateURL(String serviceName, String url) {
+ boolean result = false;
+
+ if (url != null && !url.isEmpty()) {
+ try {
+ new URI(url);
+ result = true;
+ } catch (URISyntaxException e) {
+ log.serviceURLValidationFailed(serviceName, url, e);
+ }
+ }
+
+ return result;
+ }
private static File resolveProviderConfigurationReference(String reference, File srcDirectory) {
File providerConfig;
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
index cf9aa28..2a2c4c1 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorMessages.java
@@ -29,7 +29,7 @@ public interface SimpleDescriptorMessages {
void failedToDiscoverClusterServices(final String cluster);
@Message(level = MessageLevel.ERROR,
- text = "No URLs were discovered for {0} in the {1} cluster.")
+ text = "No valid URLs were discovered for {0} in the {1} cluster.")
void failedToDiscoverClusterServiceURLs(final String serviceName, final String clusterName);
@Message(level = MessageLevel.ERROR,
@@ -37,6 +37,12 @@ public interface SimpleDescriptorMessages {
void failedToResolveProviderConfigRef(final String providerConfigRef);
@Message(level = MessageLevel.ERROR,
+ text = "URL validation failed for {0} URL {1} : {2}")
+ void serviceURLValidationFailed(final String serviceName,
+ final String url,
+ @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+ @Message(level = MessageLevel.ERROR,
text = "Error generating topology {0} from simple descriptor: {1}")
void failedToGenerateTopologyFromSimpleDescriptor(final String topologyFile,
@StackTrace( level = MessageLevel.DEBUG ) Exception e );
http://git-wip-us.apache.org/repos/asf/knox/blob/7b401def/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
index 90c7146..f79ef23 100644
--- a/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/topology/simple/SimpleDescriptorHandlerTest.java
@@ -17,6 +17,23 @@
*/
package org.apache.hadoop.gateway.topology.simple;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.xml.xpath.XPath;
+import javax.xml.xpath.XPathConstants;
+import javax.xml.xpath.XPathFactory;
+
+import org.apache.commons.io.FileUtils;
import org.apache.hadoop.gateway.topology.validation.TopologyValidator;
import org.apache.hadoop.gateway.util.XmlUtils;
import org.easymock.EasyMock;
@@ -26,91 +43,89 @@ import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
-import javax.xml.xpath.XPath;
-import javax.xml.xpath.XPathConstants;
-import javax.xml.xpath.XPathFactory;
-import java.io.*;
-import java.util.*;
-
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
public class SimpleDescriptorHandlerTest {
private static final String TEST_PROVIDER_CONFIG =
- " <gateway>\n" +
- " <provider>\n" +
- " <role>authentication</role>\n" +
- " <name>ShiroProvider</name>\n" +
- " <enabled>true</enabled>\n" +
- " <param>\n" +
- " <!-- \n" +
- " session timeout in minutes, this is really idle timeout,\n" +
- " defaults to 30mins, if the property value is not defined,, \n" +
- " current client authentication would expire if client idles contiuosly for more than this value\n" +
- " -->\n" +
- " <name>sessionTimeout</name>\n" +
- " <value>30</value>\n" +
- " </param>\n" +
- " <param>\n" +
- " <name>main.ldapRealm</name>\n" +
- " <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
- " </param>\n" +
- " <param>\n" +
- " <name>main.ldapContextFactory</name>\n" +
- " <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
- " </param>\n" +
- " <param>\n" +
- " <name>main.ldapRealm.contextFactory</name>\n" +
- " <value>$ldapContextFactory</value>\n" +
- " </param>\n" +
- " <param>\n" +
- " <name>main.ldapRealm.userDnTemplate</name>\n" +
- " <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
- " </param>\n" +
- " <param>\n" +
- " <name>main.ldapRealm.contextFactory.url</name>\n" +
- " <value>ldap://localhost:33389</value>\n" +
- " </param>\n" +
- " <param>\n" +
- " <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
- " <value>simple</value>\n" +
- " </param>\n" +
- " <param>\n" +
- " <name>urls./**</name>\n" +
- " <value>authcBasic</value>\n" +
- " </param>\n" +
- " </provider>\n" +
- "\n" +
- " <provider>\n" +
- " <role>identity-assertion</role>\n" +
- " <name>Default</name>\n" +
- " <enabled>true</enabled>\n" +
- " </provider>\n" +
- "\n" +
- " <!--\n" +
- " Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.\n" +
- " For example, a hadoop service running in AWS may return a response that includes URLs containing the\n" +
- " some AWS internal host name. If the client needs to make a subsequent request to the host identified\n" +
- " in those URLs they need to be mapped to external host names that the client Knox can use to connect.\n" +
- "\n" +
- " If the external hostname and internal host names are same turn of this provider by setting the value of\n" +
- " enabled parameter as false.\n" +
- "\n" +
- " The name parameter specifies the external host names in a comma separated list.\n" +
- " The value parameter specifies corresponding internal host names in a comma separated list.\n" +
- "\n" +
- " Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out\n" +
- " of box sandbox.xml. This is because Sandbox uses port mapping to allow clients to connect to the\n" +
- " Hadoop services using localhost. In real clusters, external host names would almost never be localhost.\n" +
- " -->\n" +
- " <provider>\n" +
- " <role>hostmap</role>\n" +
- " <name>static</name>\n" +
- " <enabled>true</enabled>\n" +
- " <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
- " </provider>\n" +
- " </gateway>\n";
+ " <gateway>\n" +
+ " <provider>\n" +
+ " <role>authentication</role>\n" +
+ " <name>ShiroProvider</name>\n" +
+ " <enabled>true</enabled>\n" +
+ " <param>\n" +
+ " <!-- \n" +
+ " session timeout in minutes, this is really idle timeout,\n" +
+ " defaults to 30mins, if the property value is not defined,, \n" +
+ " current client authentication would expire if client idles contiuosly for more than this value\n" +
+ " -->\n" +
+ " <name>sessionTimeout</name>\n" +
+ " <value>30</value>\n" +
+ " </param>\n" +
+ " <param>\n" +
+ " <name>main.ldapRealm</name>\n" +
+ " <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>\n" +
+ " </param>\n" +
+ " <param>\n" +
+ " <name>main.ldapContextFactory</name>\n" +
+ " <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>\n" +
+ " </param>\n" +
+ " <param>\n" +
+ " <name>main.ldapRealm.contextFactory</name>\n" +
+ " <value>$ldapContextFactory</value>\n" +
+ " </param>\n" +
+ " <param>\n" +
+ " <name>main.ldapRealm.userDnTemplate</name>\n" +
+ " <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>\n" +
+ " </param>\n" +
+ " <param>\n" +
+ " <name>main.ldapRealm.contextFactory.url</name>\n" +
+ " <value>ldap://localhost:33389</value>\n" +
+ " </param>\n" +
+ " <param>\n" +
+ " <name>main.ldapRealm.contextFactory.authenticationMechanism</name>\n" +
+ " <value>simple</value>\n" +
+ " </param>\n" +
+ " <param>\n" +
+ " <name>urls./**</name>\n" +
+ " <value>authcBasic</value>\n" +
+ " </param>\n" +
+ " </provider>\n" +
+ "\n" +
+ " <provider>\n" +
+ " <role>identity-assertion</role>\n" +
+ " <name>Default</name>\n" +
+ " <enabled>true</enabled>\n" +
+ " </provider>\n" +
+ "\n" +
+ " <!--\n" +
+ " Defines rules for mapping host names internal to a Hadoop cluster to externally accessible host names.\n" +
+ " For example, a hadoop service running in AWS may return a response that includes URLs containing the\n" +
+ " some AWS internal host name. If the client needs to make a subsequent request to the host identified\n" +
+ " in those URLs they need to be mapped to external host names that the client Knox can use to connect.\n" +
+ "\n" +
+ " If the external hostname and internal host names are same turn of this provider by setting the value of\n" +
+ " enabled parameter as false.\n" +
+ "\n" +
+ " The name parameter specifies the external host names in a comma separated list.\n" +
+ " The value parameter specifies corresponding internal host names in a comma separated list.\n" +
+ "\n" +
+ " Note that when you are using Sandbox, the external hostname needs to be localhost, as seen in out\n" +
+ " of box sandbox.xml. This is because Sandbox uses port mapping to allow clients to connect to the\n" +
+ " Hadoop services using localhost. In real clusters, external host names would almost never be localhost.\n" +
+ " -->\n" +
+ " <provider>\n" +
+ " <role>hostmap</role>\n" +
+ " <name>static</name>\n" +
+ " <enabled>true</enabled>\n" +
+ " <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>\n" +
+ " </provider>\n" +
+ " </gateway>\n";
/**
@@ -134,7 +149,7 @@ public class SimpleDescriptorHandlerTest {
serviceURLs.put("WEBHBASE", null);
serviceURLs.put("HIVE", null);
serviceURLs.put("RESOURCEMANAGER", null);
- serviceURLs.put("AMBARIUI", Arrays.asList("http://c6401.ambari.apache.org:8080"));
+ serviceURLs.put("AMBARIUI", Collections.singletonList("http://c6401.ambari.apache.org:8080"));
// Write the externalized provider config to a temp file
File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", TEST_PROVIDER_CONFIG);
@@ -225,14 +240,152 @@ public class SimpleDescriptorHandlerTest {
}
- private File writeProviderConfig(String path, String content) throws IOException {
- File f = new File(path);
+ /**
+ * KNOX-1006
+ *
+ * Verify the behavior of the SimpleDescriptorHandler when service discovery fails to produce a valid URL for
+ * a service.
+ *
+ * N.B. This test depends on the PropertiesFileServiceDiscovery extension being configured:
+ * org.apache.hadoop.gateway.topology.discovery.test.extension.PropertiesFileServiceDiscovery
+ */
+ @Test
+ public void testInvalidServiceURLFromDiscovery() throws Exception {
+ final String CLUSTER_NAME = "myproperties";
+
+ // Configure the PropertiesFile Service Discovery implementation for this test
+ final String DEFAULT_VALID_SERVICE_URL = "http://localhost:9999/thiswillwork";
+ Properties serviceDiscoverySourceProps = new Properties();
+ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".NAMENODE",
+ DEFAULT_VALID_SERVICE_URL.replace("http", "hdfs"));
+ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".JOBTRACKER",
+ DEFAULT_VALID_SERVICE_URL.replace("http", "rpc"));
+ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHDFS", DEFAULT_VALID_SERVICE_URL);
+ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHCAT", DEFAULT_VALID_SERVICE_URL);
+ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".OOZIE", DEFAULT_VALID_SERVICE_URL);
+ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".WEBHBASE", DEFAULT_VALID_SERVICE_URL);
+ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".HIVE", "{SCHEME}://localhost:10000/");
+ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".RESOURCEMANAGER", DEFAULT_VALID_SERVICE_URL);
+ serviceDiscoverySourceProps.setProperty(CLUSTER_NAME + ".AMBARIUI", DEFAULT_VALID_SERVICE_URL);
+ File serviceDiscoverySource = File.createTempFile("service-discovery", ".properties");
+ serviceDiscoverySourceProps.store(new FileOutputStream(serviceDiscoverySource),
+ "Test Service Discovery Source");
+
+ // Prepare a mock SimpleDescriptor
+ final String type = "PROPERTIES_FILE";
+ final String address = serviceDiscoverySource.getAbsolutePath();
+ final Map<String, List<String>> serviceURLs = new HashMap<>();
+ serviceURLs.put("NAMENODE", null);
+ serviceURLs.put("JOBTRACKER", null);
+ serviceURLs.put("WEBHDFS", null);
+ serviceURLs.put("WEBHCAT", null);
+ serviceURLs.put("OOZIE", null);
+ serviceURLs.put("WEBHBASE", null);
+ serviceURLs.put("HIVE", null);
+ serviceURLs.put("RESOURCEMANAGER", null);
+ serviceURLs.put("AMBARIUI", Collections.singletonList("http://c6401.ambari.apache.org:8080"));
- Writer fw = new FileWriter(f);
- fw.write(content);
- fw.flush();
- fw.close();
+ // Write the externalized provider config to a temp file
+ File providerConfig = writeProviderConfig("ambari-cluster-policy.xml", TEST_PROVIDER_CONFIG);
+
+ File topologyFile = null;
+ try {
+ File destDir = (new File(".")).getCanonicalFile();
+
+ // Mock out the simple descriptor
+ SimpleDescriptor testDescriptor = EasyMock.createNiceMock(SimpleDescriptor.class);
+ EasyMock.expect(testDescriptor.getName()).andReturn("mysimpledescriptor").anyTimes();
+ EasyMock.expect(testDescriptor.getDiscoveryAddress()).andReturn(address).anyTimes();
+ EasyMock.expect(testDescriptor.getDiscoveryType()).andReturn(type).anyTimes();
+ EasyMock.expect(testDescriptor.getDiscoveryUser()).andReturn(null).anyTimes();
+ EasyMock.expect(testDescriptor.getProviderConfig()).andReturn(providerConfig.getAbsolutePath()).anyTimes();
+ EasyMock.expect(testDescriptor.getClusterName()).andReturn(CLUSTER_NAME).anyTimes();
+ List<SimpleDescriptor.Service> serviceMocks = new ArrayList<>();
+ for (String serviceName : serviceURLs.keySet()) {
+ SimpleDescriptor.Service svc = EasyMock.createNiceMock(SimpleDescriptor.Service.class);
+ EasyMock.expect(svc.getName()).andReturn(serviceName).anyTimes();
+ EasyMock.expect(svc.getURLs()).andReturn(serviceURLs.get(serviceName)).anyTimes();
+ EasyMock.replay(svc);
+ serviceMocks.add(svc);
+ }
+ EasyMock.expect(testDescriptor.getServices()).andReturn(serviceMocks).anyTimes();
+ EasyMock.replay(testDescriptor);
+
+ // Invoke the simple descriptor handler
+ Map<String, File> files =
+ SimpleDescriptorHandler.handle(testDescriptor,
+ providerConfig.getParentFile(), // simple desc co-located with provider config
+ destDir);
+
+ topologyFile = files.get("topology");
+ // Validate the resulting topology descriptor
+ assertTrue(topologyFile.exists());
+
+ // Validate the topology descriptor's correctness
+ TopologyValidator validator = new TopologyValidator( topologyFile.getAbsolutePath() );
+ if( !validator.validateTopology() ){
+ throw new SAXException( validator.getErrorString() );
+ }
+
+ XPathFactory xPathfactory = XPathFactory.newInstance();
+ XPath xpath = xPathfactory.newXPath();
+
+ // Parse the topology descriptor
+ Document topologyXml = XmlUtils.readXml(topologyFile);
+
+ // Validate the provider configuration
+ Document extProviderConf = XmlUtils.readXml(new ByteArrayInputStream(TEST_PROVIDER_CONFIG.getBytes()));
+ Node gatewayNode = (Node) xpath.compile("/topology/gateway").evaluate(topologyXml, XPathConstants.NODE);
+ assertTrue("Resulting provider config should be identical to the referenced content.",
+ extProviderConf.getDocumentElement().isEqualNode(gatewayNode));
+
+ // Validate the service declarations
+ List<String> topologyServices = new ArrayList<>();
+ Map<String, List<String>> topologyServiceURLs = new HashMap<>();
+ NodeList serviceNodes =
+ (NodeList) xpath.compile("/topology/service").evaluate(topologyXml, XPathConstants.NODESET);
+ for (int serviceNodeIndex=0; serviceNodeIndex < serviceNodes.getLength(); serviceNodeIndex++) {
+ Node serviceNode = serviceNodes.item(serviceNodeIndex);
+ Node roleNode = (Node) xpath.compile("role/text()").evaluate(serviceNode, XPathConstants.NODE);
+ assertNotNull(roleNode);
+ String role = roleNode.getNodeValue();
+ topologyServices.add(role);
+ NodeList urlNodes = (NodeList) xpath.compile("url/text()").evaluate(serviceNode, XPathConstants.NODESET);
+ for(int urlNodeIndex = 0 ; urlNodeIndex < urlNodes.getLength(); urlNodeIndex++) {
+ Node urlNode = urlNodes.item(urlNodeIndex);
+ assertNotNull(urlNode);
+ String url = urlNode.getNodeValue();
+ assertNotNull("Every declared service should have a URL.", url);
+ if (!topologyServiceURLs.containsKey(role)) {
+ topologyServiceURLs.put(role, new ArrayList<String>());
+ }
+ topologyServiceURLs.get(role).add(url);
+ }
+ }
+
+ // There should not be a service element for HIVE, since it had no valid URLs
+ assertEquals("Unexpected number of service declarations.", serviceURLs.size() - 1, topologyServices.size());
+ assertFalse("The HIVE service should have been omitted from the generated topology.", topologyServices.contains("HIVE"));
+
+ assertEquals("Unexpected number of service URLs.", serviceURLs.size() - 1, topologyServiceURLs.size());
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ fail(e.getMessage());
+ } finally {
+ serviceDiscoverySource.delete();
+ providerConfig.delete();
+ if (topologyFile != null) {
+ topologyFile.delete();
+ }
+ }
+ }
+
+
+ private File writeProviderConfig(String path, String content) throws IOException {
+ File f = new File(path);
+ FileUtils.write(f, content);
return f;
}
[18/23] knox git commit: Merge branch 'master' into
KNOX-998-Package_Restructuring
Posted by mo...@apache.org.
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-provider-security-jwt/src/main/java/org/apache/knox/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-jwt/src/main/java/org/apache/knox/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
index 802019b,0000000..077fa05
mode 100644,000000..100644
--- a/gateway-provider-security-jwt/src/main/java/org/apache/knox/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
+++ b/gateway-provider-security-jwt/src/main/java/org/apache/knox/gateway/provider/federation/jwt/filter/AbstractJWTFilter.java
@@@ -1,278 -1,0 +1,278 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.provider.federation.jwt.filter;
+
+import java.io.IOException;
+import java.security.Principal;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+import java.security.interfaces.RSAPublicKey;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import javax.security.auth.Subject;
+import javax.servlet.Filter;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.knox.gateway.audit.api.Action;
+import org.apache.knox.gateway.audit.api.ActionOutcome;
+import org.apache.knox.gateway.audit.api.AuditContext;
+import org.apache.knox.gateway.audit.api.AuditService;
+import org.apache.knox.gateway.audit.api.AuditServiceFactory;
+import org.apache.knox.gateway.audit.api.Auditor;
+import org.apache.knox.gateway.audit.api.ResourceType;
+import org.apache.knox.gateway.audit.log4j.audit.AuditConstants;
+import org.apache.knox.gateway.filter.AbstractGatewayFilter;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.provider.federation.jwt.JWTMessages;
+import org.apache.knox.gateway.security.PrimaryPrincipal;
+import org.apache.knox.gateway.services.GatewayServices;
+import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
+import org.apache.knox.gateway.services.security.token.TokenServiceException;
+import org.apache.knox.gateway.services.security.token.impl.JWTToken;
+
+/**
+ *
+ */
+public abstract class AbstractJWTFilter implements Filter {
+ /**
+ * If specified, this configuration property refers to a value which the issuer of a received
+ * token must match. Otherwise, the default value "KNOXSSO" is used
+ */
+ public static final String JWT_EXPECTED_ISSUER = "jwt.expected.issuer";
+ public static final String JWT_DEFAULT_ISSUER = "KNOXSSO";
+
+ static JWTMessages log = MessagesFactory.get( JWTMessages.class );
+ private static AuditService auditService = AuditServiceFactory.getAuditService();
+ private static Auditor auditor = auditService.getAuditor(
+ AuditConstants.DEFAULT_AUDITOR_NAME, AuditConstants.KNOX_SERVICE_NAME,
+ AuditConstants.KNOX_COMPONENT_NAME );
+
+ protected List<String> audiences;
+ protected JWTokenAuthority authority;
+ protected RSAPublicKey publicKey = null;
+ private String expectedIssuer;
+
+ public abstract void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
+ throws IOException, ServletException;
+
+ /**
+ *
+ */
+ public AbstractJWTFilter() {
+ super();
+ }
+
+ @Override
+ public void init( FilterConfig filterConfig ) throws ServletException {
+ ServletContext context = filterConfig.getServletContext();
+ if (context != null) {
+ GatewayServices services = (GatewayServices) context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE);
+ if (services != null) {
+ authority = (JWTokenAuthority) services.getService(GatewayServices.TOKEN_SERVICE);
+ }
+ }
+ }
+
+ protected void configureExpectedIssuer(FilterConfig filterConfig) {
+ expectedIssuer = filterConfig.getInitParameter(JWT_EXPECTED_ISSUER);;
+ if (expectedIssuer == null) {
+ expectedIssuer = JWT_DEFAULT_ISSUER;
+ }
+ }
+
+ /**
+ * @param expectedAudiences
+ * @return
+ */
+ protected List<String> parseExpectedAudiences(String expectedAudiences) {
+ ArrayList<String> audList = null;
+ // setup the list of valid audiences for token validation
+ if (expectedAudiences != null) {
+ // parse into the list
+ String[] audArray = expectedAudiences.split(",");
+ audList = new ArrayList<String>();
+ for (String a : audArray) {
- audList.add(a);
++ audList.add(a.trim());
+ }
+ }
+ return audList;
+ }
+
+ protected boolean tokenIsStillValid(JWTToken jwtToken) {
+ // if there is no expiration date then the lifecycle is tied entirely to
+ // the cookie validity - otherwise ensure that the current time is before
+ // the designated expiration time
+ Date expires = jwtToken.getExpiresDate();
+ return (expires == null || expires != null && new Date().before(expires));
+ }
+
+ /**
+ * Validate whether any of the accepted audience claims is present in the
+ * issued token claims list for audience. Override this method in subclasses
+ * in order to customize the audience validation behavior.
+ *
+ * @param jwtToken
+ * the JWT token where the allowed audiences will be found
+ * @return true if an expected audience is present, otherwise false
+ */
+ protected boolean validateAudiences(JWTToken jwtToken) {
+ boolean valid = false;
+
+ String[] tokenAudienceList = jwtToken.getAudienceClaims();
+ // if there were no expected audiences configured then just
+ // consider any audience acceptable
+ if (audiences == null) {
+ valid = true;
+ } else {
+ // if any of the configured audiences is found then consider it
+ // acceptable
+ if (tokenAudienceList != null) {
+ for (String aud : tokenAudienceList) {
+ if (audiences.contains(aud)) {
+ log.jwtAudienceValidated();
+ valid = true;
+ break;
+ }
+ }
+ }
+ }
+ return valid;
+ }
+
+ protected void continueWithEstablishedSecurityContext(Subject subject, final HttpServletRequest request, final HttpServletResponse response, final FilterChain chain) throws IOException, ServletException {
+ Principal principal = (Principal) subject.getPrincipals(PrimaryPrincipal.class).toArray()[0];
+ AuditContext context = auditService.getContext();
+ if (context != null) {
+ context.setUsername( principal.getName() );
+ String sourceUri = (String)request.getAttribute( AbstractGatewayFilter.SOURCE_REQUEST_CONTEXT_URL_ATTRIBUTE_NAME );
+ if (sourceUri != null) {
+ auditor.audit( Action.AUTHENTICATION , sourceUri, ResourceType.URI, ActionOutcome.SUCCESS );
+ }
+ }
+
+ try {
+ Subject.doAs(
+ subject,
+ new PrivilegedExceptionAction<Object>() {
+ @Override
+ public Object run() throws Exception {
+ chain.doFilter(request, response);
+ return null;
+ }
+ }
+ );
+ }
+ catch (PrivilegedActionException e) {
+ Throwable t = e.getCause();
+ if (t instanceof IOException) {
+ throw (IOException) t;
+ }
+ else if (t instanceof ServletException) {
+ throw (ServletException) t;
+ }
+ else {
+ throw new ServletException(t);
+ }
+ }
+ }
+
+ protected Subject createSubjectFromToken(JWTToken token) {
+ final String principal = token.getSubject();
+
+ @SuppressWarnings("rawtypes")
+ HashSet emptySet = new HashSet();
+ Set<Principal> principals = new HashSet<>();
+ Principal p = new PrimaryPrincipal(principal);
+ principals.add(p);
+
+ // The newly constructed Sets check whether this Subject has been set read-only
+ // before permitting subsequent modifications. The newly created Sets also prevent
+ // illegal modifications by ensuring that callers have sufficient permissions.
+ //
+ // To modify the Principals Set, the caller must have AuthPermission("modifyPrincipals").
+ // To modify the public credential Set, the caller must have AuthPermission("modifyPublicCredentials").
+ // To modify the private credential Set, the caller must have AuthPermission("modifyPrivateCredentials").
+ javax.security.auth.Subject subject = new javax.security.auth.Subject(true, principals, emptySet, emptySet);
+ return subject;
+ }
+
+ protected boolean validateToken(HttpServletRequest request, HttpServletResponse response,
+ FilterChain chain, JWTToken token)
+ throws IOException, ServletException {
+ boolean verified = false;
+ try {
+ if (publicKey == null) {
+ verified = authority.verifyToken(token);
+ }
+ else {
+ verified = authority.verifyToken(token, publicKey);
+ }
+ } catch (TokenServiceException e) {
+ log.unableToVerifyToken(e);
+ }
+
+ if (verified) {
+ // confirm that issue matches intended target
+ if (expectedIssuer.equals(token.getIssuer())) {
+ // if there is no expiration data then the lifecycle is tied entirely to
+ // the cookie validity - otherwise ensure that the current time is before
+ // the designated expiration time
+ if (tokenIsStillValid(token)) {
+ boolean audValid = validateAudiences(token);
+ if (audValid) {
+ return true;
+ }
+ else {
+ log.failedToValidateAudience();
+ handleValidationError(request, response, HttpServletResponse.SC_BAD_REQUEST,
+ "Bad request: missing required token audience");
+ }
+ }
+ else {
+ log.tokenHasExpired();
+ handleValidationError(request, response, HttpServletResponse.SC_BAD_REQUEST,
+ "Bad request: token has expired");
+ }
+ }
+ else {
+ handleValidationError(request, response, HttpServletResponse.SC_UNAUTHORIZED, null);
+ }
+ }
+ else {
+ log.failedToVerifyTokenSignature();
+ handleValidationError(request, response, HttpServletResponse.SC_UNAUTHORIZED, null);
+ }
+
+ return false;
+ }
+
+ protected abstract void handleValidationError(HttpServletRequest request, HttpServletResponse response, int status,
+ String error) throws IOException;
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/AbstractJWTFilterTest.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/AbstractJWTFilterTest.java
index 361a1ff,0000000..9888eab
mode 100644,000000..100644
--- a/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/AbstractJWTFilterTest.java
+++ b/gateway-provider-security-jwt/src/test/java/org/apache/knox/gateway/provider/federation/AbstractJWTFilterTest.java
@@@ -1,636 -1,0 +1,667 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.provider.federation;
+
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.security.AccessController;
+import java.security.KeyPair;
+import java.security.KeyPairGenerator;
+import java.security.NoSuchAlgorithmException;
+import java.security.Principal;
+import java.security.PublicKey;
+import java.security.cert.Certificate;
+import java.security.interfaces.RSAPrivateKey;
+import java.security.interfaces.RSAPublicKey;
+import java.text.MessageFormat;
+import java.util.Enumeration;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Properties;
+import java.util.Date;
+import java.util.Set;
+
+import javax.security.auth.Subject;
+import javax.servlet.FilterChain;
+import javax.servlet.FilterConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.ServletRequest;
+import javax.servlet.ServletResponse;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.codec.binary.Base64;
+import org.apache.knox.gateway.provider.federation.jwt.filter.AbstractJWTFilter;
+import org.apache.knox.gateway.provider.federation.jwt.filter.SSOCookieFederationFilter;
+import org.apache.knox.gateway.security.PrimaryPrincipal;
+import org.apache.knox.gateway.services.security.impl.X509CertificateUtil;
+import org.apache.knox.gateway.services.security.token.JWTokenAuthority;
+import org.apache.knox.gateway.services.security.token.TokenServiceException;
+import org.apache.knox.gateway.services.security.token.impl.JWT;
+import org.apache.knox.gateway.services.security.token.impl.JWTToken;
+import org.easymock.EasyMock;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.nimbusds.jose.*;
+import com.nimbusds.jwt.JWTClaimsSet;
+import com.nimbusds.jwt.SignedJWT;
+import com.nimbusds.jose.crypto.RSASSASigner;
+import com.nimbusds.jose.crypto.RSASSAVerifier;
+
+public abstract class AbstractJWTFilterTest {
+ private static final String SERVICE_URL = "https://localhost:8888/resource";
+ private static final String dnTemplate = "CN={0},OU=Test,O=Hadoop,L=Test,ST=Test,C=US";
+
+ protected AbstractJWTFilter handler = null;
+ protected static RSAPublicKey publicKey = null;
+ protected static RSAPrivateKey privateKey = null;
+ protected static String pem = null;
+
+ protected abstract void setTokenOnRequest(HttpServletRequest request, SignedJWT jwt);
+ protected abstract void setGarbledTokenOnRequest(HttpServletRequest request, SignedJWT jwt);
+ protected abstract String getAudienceProperty();
+ protected abstract String getVerificationPemProperty();
+
+ private static String buildDistinguishedName(String hostname) {
+ MessageFormat headerFormatter = new MessageFormat(dnTemplate);
+ String[] paramArray = new String[1];
+ paramArray[0] = hostname;
+ String dn = headerFormatter.format(paramArray);
+ return dn;
+ }
+
+ @BeforeClass
+ public static void generateKeys() throws Exception, NoSuchAlgorithmException {
+ KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
+ kpg.initialize(2048);
+ KeyPair KPair = kpg.generateKeyPair();
+ String dn = buildDistinguishedName(InetAddress.getLocalHost().getHostName());
+ Certificate cert = X509CertificateUtil.generateCertificate(dn, KPair, 365, "SHA1withRSA");
+ byte[] data = cert.getEncoded();
+ Base64 encoder = new Base64( 76, "\n".getBytes( "ASCII" ) );
+ pem = new String(encoder.encodeToString( data ).getBytes( "ASCII" )).trim();
+
+ publicKey = (RSAPublicKey) KPair.getPublic();
+ privateKey = (RSAPrivateKey) KPair.getPrivate();
+ }
+
+ @After
+ public void teardown() throws Exception {
+ handler.destroy();
+ }
+
+ @Test
+ public void testValidJWT() throws Exception {
+ try {
+ Properties props = getProperties();
+ handler.init(new TestFilterConfig(props));
+
+ SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 5000), privateKey, props);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ setTokenOnRequest(request, jwt);
+
+ EasyMock.expect(request.getRequestURL()).andReturn(
+ new StringBuffer(SERVICE_URL)).anyTimes();
+ EasyMock.expect(request.getQueryString()).andReturn(null);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+ SERVICE_URL);
+ EasyMock.replay(request);
+
+ TestFilterChain chain = new TestFilterChain();
+ handler.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled );
+ Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
+ Assert.assertTrue("No PrimaryPrincipal", !principals.isEmpty());
+ Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ @Test
+ public void testValidAudienceJWT() throws Exception {
+ try {
+ Properties props = getProperties();
+ props.put(getAudienceProperty(), "bar");
+ handler.init(new TestFilterConfig(props));
+
+ SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 5000), privateKey, props);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ setTokenOnRequest(request, jwt);
+
+ EasyMock.expect(request.getRequestURL()).andReturn(
+ new StringBuffer(SERVICE_URL)).anyTimes();
+ EasyMock.expect(request.getQueryString()).andReturn(null);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+ SERVICE_URL);
+ EasyMock.replay(request);
+
+ TestFilterChain chain = new TestFilterChain();
+ handler.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled );
+ Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
+ Assert.assertTrue("No PrimaryPrincipal", !principals.isEmpty());
+ Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ @Test
+ public void testInvalidAudienceJWT() throws Exception {
+ try {
+ Properties props = getProperties();
+ props.put(getAudienceProperty(), "foo");
+ props.put("sso.authentication.provider.url", "https://localhost:8443/gateway/knoxsso/api/v1/websso");
+
+ handler.init(new TestFilterConfig(props));
+
+ SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 5000), privateKey, props);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ setTokenOnRequest(request, jwt);
+
+ EasyMock.expect(request.getRequestURL()).andReturn(
+ new StringBuffer(SERVICE_URL)).anyTimes();
+ EasyMock.expect(request.getQueryString()).andReturn(null);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+ SERVICE_URL);
+ EasyMock.replay(request);
+
+ TestFilterChain chain = new TestFilterChain();
+ handler.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be true.", !chain.doFilterCalled);
+ Assert.assertTrue("No Subject should be returned.", chain.subject == null);
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ @Test
++ public void testValidAudienceJWTWhitespace() throws Exception {
++ try {
++ Properties props = getProperties();
++ props.put(getAudienceProperty(), " foo, bar ");
++ handler.init(new TestFilterConfig(props));
++
++ SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 5000), privateKey, props);
++
++ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
++ setTokenOnRequest(request, jwt);
++
++ EasyMock.expect(request.getRequestURL()).andReturn(
++ new StringBuffer(SERVICE_URL)).anyTimes();
++ EasyMock.expect(request.getQueryString()).andReturn(null);
++ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
++ EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
++ SERVICE_URL);
++ EasyMock.replay(request);
++
++ TestFilterChain chain = new TestFilterChain();
++ handler.doFilter(request, response, chain);
++ Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled );
++ Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
++ Assert.assertTrue("No PrimaryPrincipal", !principals.isEmpty());
++ Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
++ } catch (ServletException se) {
++ fail("Should NOT have thrown a ServletException.");
++ }
++ }
++
++ @Test
+ public void testValidVerificationPEM() throws Exception {
+ try {
+ Properties props = getProperties();
+
+// System.out.println("+" + pem + "+");
+
+ props.put(getAudienceProperty(), "bar");
+ props.put("sso.authentication.provider.url", "https://localhost:8443/gateway/knoxsso/api/v1/websso");
+ props.put(getVerificationPemProperty(), pem);
+ handler.init(new TestFilterConfig(props));
+
+ SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 50000), privateKey, props);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ setTokenOnRequest(request, jwt);
+
+ EasyMock.expect(request.getRequestURL()).andReturn(
+ new StringBuffer(SERVICE_URL)).anyTimes();
+ EasyMock.expect(request.getQueryString()).andReturn(null);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+ SERVICE_URL);
+ EasyMock.replay(request);
+
+ TestFilterChain chain = new TestFilterChain();
+ handler.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled );
+ Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
+ Assert.assertTrue("No PrimaryPrincipal", !principals.isEmpty());
+ Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ @Test
+ public void testExpiredJWT() throws Exception {
+ try {
+ Properties props = getProperties();
+ handler.init(new TestFilterConfig(props));
+
+ SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() - 1000), privateKey, props);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ setTokenOnRequest(request, jwt);
+
+ EasyMock.expect(request.getRequestURL()).andReturn(
+ new StringBuffer(SERVICE_URL)).anyTimes();
+ EasyMock.expect(request.getQueryString()).andReturn(null);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+ SERVICE_URL);
+ EasyMock.replay(request);
+
+ TestFilterChain chain = new TestFilterChain();
+ handler.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be false.", !chain.doFilterCalled);
+ Assert.assertTrue("No Subject should be returned.", chain.subject == null);
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ @Test
+ public void testValidJWTNoExpiration() throws Exception {
+ try {
+ Properties props = getProperties();
+ handler.init(new TestFilterConfig(props));
+
+ SignedJWT jwt = getJWT("alice", null, privateKey, props);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ setTokenOnRequest(request, jwt);
+
+ EasyMock.expect(request.getRequestURL()).andReturn(
+ new StringBuffer(SERVICE_URL)).anyTimes();
+ EasyMock.expect(request.getQueryString()).andReturn(null);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+ SERVICE_URL).anyTimes();
+ EasyMock.replay(request);
+
+ TestFilterChain chain = new TestFilterChain();
+ handler.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled );
+ Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
+ Assert.assertTrue("No PrimaryPrincipal", !principals.isEmpty());
+ Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ @Test
+ public void testUnableToParseJWT() throws Exception {
+ try {
+ Properties props = getProperties();
+ handler.init(new TestFilterConfig(props));
+
+ SignedJWT jwt = getJWT("bob", new Date(new Date().getTime() + 5000), privateKey, props);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ setGarbledTokenOnRequest(request, jwt);
+
+ EasyMock.expect(request.getRequestURL()).andReturn(
+ new StringBuffer(SERVICE_URL)).anyTimes();
+ EasyMock.expect(request.getQueryString()).andReturn(null);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+ SERVICE_URL).anyTimes();
+ EasyMock.replay(request);
+
+ TestFilterChain chain = new TestFilterChain();
+ handler.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be true.", !chain.doFilterCalled);
+ Assert.assertTrue("No Subject should be returned.", chain.subject == null);
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ @Test
+ public void testFailedSignatureValidationJWT() throws Exception {
+ try {
+ // Create a private key to sign the token
+ KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
+ kpg.initialize(1024);
+
+ KeyPair kp = kpg.genKeyPair();
+
+ Properties props = getProperties();
+ handler.init(new TestFilterConfig(props));
+
+ SignedJWT jwt = getJWT("bob", new Date(new Date().getTime() + 5000),
+ (RSAPrivateKey)kp.getPrivate(), props);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ setTokenOnRequest(request, jwt);
+
+ EasyMock.expect(request.getRequestURL()).andReturn(
+ new StringBuffer(SERVICE_URL)).anyTimes();
+ EasyMock.expect(request.getQueryString()).andReturn(null);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+ SERVICE_URL).anyTimes();
+ EasyMock.replay(request);
+
+ TestFilterChain chain = new TestFilterChain();
+ handler.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be true.", !chain.doFilterCalled);
+ Assert.assertTrue("No Subject should be returned.", chain.subject == null);
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ @Test
+ public void testInvalidVerificationPEM() throws Exception {
+ try {
+ Properties props = getProperties();
+
+ KeyPairGenerator kpg = KeyPairGenerator.getInstance("RSA");
+ kpg.initialize(1024);
+
+ KeyPair KPair = kpg.generateKeyPair();
+ String dn = buildDistinguishedName(InetAddress.getLocalHost().getHostName());
+ Certificate cert = X509CertificateUtil.generateCertificate(dn, KPair, 365, "SHA1withRSA");
+ byte[] data = cert.getEncoded();
+ Base64 encoder = new Base64( 76, "\n".getBytes( "ASCII" ) );
+ String failingPem = new String(encoder.encodeToString( data ).getBytes( "ASCII" )).trim();
+
+ props.put(getAudienceProperty(), "bar");
+ props.put(getVerificationPemProperty(), failingPem);
+ handler.init(new TestFilterConfig(props));
+
+ SignedJWT jwt = getJWT("alice", new Date(new Date().getTime() + 50000), privateKey, props);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ setTokenOnRequest(request, jwt);
+
+ EasyMock.expect(request.getRequestURL()).andReturn(
+ new StringBuffer(SERVICE_URL)).anyTimes();
+ EasyMock.expect(request.getQueryString()).andReturn(null);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(SERVICE_URL);
+ EasyMock.replay(request);
+
+ TestFilterChain chain = new TestFilterChain();
+ handler.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be true.", chain.doFilterCalled == false);
+ Assert.assertTrue("No Subject should be returned.", chain.subject == null);
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ @Test
+ public void testInvalidIssuer() throws Exception {
+ try {
+ Properties props = getProperties();
+ handler.init(new TestFilterConfig(props));
+
+ SignedJWT jwt = getJWT("new-issuer", "alice", new Date(new Date().getTime() + 5000), privateKey);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ setTokenOnRequest(request, jwt);
+
+ EasyMock.expect(request.getRequestURL()).andReturn(
+ new StringBuffer(SERVICE_URL)).anyTimes();
+ EasyMock.expect(request.getQueryString()).andReturn(null);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+ SERVICE_URL);
+ EasyMock.replay(request);
+
+ TestFilterChain chain = new TestFilterChain();
+ handler.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be true.", !chain.doFilterCalled);
+ Assert.assertTrue("No Subject should be returned.", chain.subject == null);
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ @Test
+ public void testValidIssuerViaConfig() throws Exception {
+ try {
+ Properties props = getProperties();
+ props.setProperty(AbstractJWTFilter.JWT_EXPECTED_ISSUER, "new-issuer");
+ handler.init(new TestFilterConfig(props));
+
+ SignedJWT jwt = getJWT("new-issuer", "alice", new Date(new Date().getTime() + 5000), privateKey);
+
+ HttpServletRequest request = EasyMock.createNiceMock(HttpServletRequest.class);
+ setTokenOnRequest(request, jwt);
+
+ EasyMock.expect(request.getRequestURL()).andReturn(
+ new StringBuffer(SERVICE_URL)).anyTimes();
+ EasyMock.expect(request.getQueryString()).andReturn(null);
+ HttpServletResponse response = EasyMock.createNiceMock(HttpServletResponse.class);
+ EasyMock.expect(response.encodeRedirectURL(SERVICE_URL)).andReturn(
+ SERVICE_URL);
+ EasyMock.replay(request);
+
+ TestFilterChain chain = new TestFilterChain();
+ handler.doFilter(request, response, chain);
+ Assert.assertTrue("doFilterCalled should not be false.", chain.doFilterCalled);
+ Set<PrimaryPrincipal> principals = chain.subject.getPrincipals(PrimaryPrincipal.class);
+ Assert.assertTrue("No PrimaryPrincipal", principals.size() > 0);
+ Assert.assertEquals("Not the expected principal", "alice", ((Principal)principals.toArray()[0]).getName());
+ } catch (ServletException se) {
+ fail("Should NOT have thrown a ServletException.");
+ }
+ }
+
+ protected Properties getProperties() {
+ Properties props = new Properties();
+ props.setProperty(
+ SSOCookieFederationFilter.SSO_AUTHENTICATION_PROVIDER_URL,
+ "https://localhost:8443/authserver");
+ return props;
+ }
+
+ protected SignedJWT getJWT(String sub, Date expires, RSAPrivateKey privateKey,
+ Properties props) throws Exception {
+ return getJWT(AbstractJWTFilter.JWT_DEFAULT_ISSUER, sub, expires, privateKey);
+ }
+
+ protected SignedJWT getJWT(String issuer, String sub, Date expires, RSAPrivateKey privateKey)
+ throws Exception {
+ List<String> aud = new ArrayList<String>();
+ aud.add("bar");
+
+ JWTClaimsSet claims = new JWTClaimsSet.Builder()
+ .issuer(issuer)
+ .subject(sub)
+ .audience(aud)
+ .expirationTime(expires)
+ .claim("scope", "openid")
+ .build();
+
+ JWSHeader header = new JWSHeader.Builder(JWSAlgorithm.RS256).build();
+
+ SignedJWT signedJWT = new SignedJWT(header, claims);
+ JWSSigner signer = new RSASSASigner(privateKey);
+
+ signedJWT.sign(signer);
+
+ return signedJWT;
+ }
+
+ protected static class TestFilterConfig implements FilterConfig {
+ Properties props = null;
+
+ public TestFilterConfig(Properties props) {
+ this.props = props;
+ }
+
+ @Override
+ public String getFilterName() {
+ return null;
+ }
+
+ /* (non-Javadoc)
+ * @see javax.servlet.FilterConfig#getServletContext()
+ */
+ @Override
+ public ServletContext getServletContext() {
+// JWTokenAuthority authority = EasyMock.createNiceMock(JWTokenAuthority.class);
+// GatewayServices services = EasyMock.createNiceMock(GatewayServices.class);
+// EasyMock.expect(services.getService("TokenService").andReturn(authority));
+// ServletContext context = EasyMock.createNiceMock(ServletContext.class);
+// EasyMock.expect(context.getAttribute(GatewayServices.GATEWAY_SERVICES_ATTRIBUTE).andReturn(new DefaultGatewayServices()));
+ return null;
+ }
+
+ /* (non-Javadoc)
+ * @see javax.servlet.FilterConfig#getInitParameter(java.lang.String)
+ */
+ @Override
+ public String getInitParameter(String name) {
+ return props.getProperty(name, null);
+ }
+
+ /* (non-Javadoc)
+ * @see javax.servlet.FilterConfig#getInitParameterNames()
+ */
+ @Override
+ public Enumeration<String> getInitParameterNames() {
+ return null;
+ }
+
+ }
+
+ protected static class TestJWTokenAuthority implements JWTokenAuthority {
+
+ private PublicKey verifyingKey;
+
+ public TestJWTokenAuthority(PublicKey verifyingKey) {
+ this.verifyingKey = verifyingKey;
+ }
+
+ /* (non-Javadoc)
+ * @see JWTokenAuthority#issueToken(javax.security.auth.Subject, java.lang.String)
+ */
+ @Override
+ public JWT issueToken(Subject subject, String algorithm)
+ throws TokenServiceException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ /* (non-Javadoc)
+ * @see JWTokenAuthority#issueToken(java.security.Principal, java.lang.String)
+ */
+ @Override
+ public JWT issueToken(Principal p, String algorithm)
+ throws TokenServiceException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ /* (non-Javadoc)
+ * @see JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, java.lang.String)
+ */
+ @Override
+ public JWT issueToken(Principal p, String audience, String algorithm)
+ throws TokenServiceException {
+ return null;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.knox.gateway.services.security.token.JWTokenAuthority#verifyToken(org.apache.knox.gateway.services.security.token.impl.JWT)
+ */
+ @Override
+ public boolean verifyToken(JWT token) throws TokenServiceException {
+ JWSVerifier verifier = new RSASSAVerifier((RSAPublicKey) verifyingKey);
+ return token.verify(verifier);
+ }
+
+ /* (non-Javadoc)
+ * @see JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, java.lang.String, long)
+ */
+ @Override
+ public JWT issueToken(Principal p, String audience, String algorithm,
+ long expires) throws TokenServiceException {
+ return null;
+ }
+
+ @Override
+ public JWT issueToken(Principal p, List<String> audiences, String algorithm,
+ long expires) throws TokenServiceException {
+ return null;
+ }
+
+ /* (non-Javadoc)
+ * @see JWTokenAuthority#issueToken(java.security.Principal, java.lang.String, long)
+ */
+ @Override
+ public JWT issueToken(Principal p, String algorithm, long expires)
+ throws TokenServiceException {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public boolean verifyToken(JWT token, RSAPublicKey publicKey) throws TokenServiceException {
+ JWSVerifier verifier = new RSASSAVerifier(publicKey);
+ return token.verify(verifier);
+ }
+
+ }
+
+ protected static class TestFilterChain implements FilterChain {
+ boolean doFilterCalled = false;
+ Subject subject = null;
+
+ /* (non-Javadoc)
+ * @see javax.servlet.FilterChain#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse)
+ */
+ @Override
+ public void doFilter(ServletRequest request, ServletResponse response)
+ throws IOException, ServletException {
+ doFilterCalled = true;
+
+ subject = Subject.getSubject( AccessController.getContext() );
+ }
+
+ }
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/PicketlinkMessages.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/PicketlinkMessages.java
index 86f2854,0000000..e69de29
mode 100644,000000..100644
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/PicketlinkMessages.java
+++ b/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/PicketlinkMessages.java
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkConf.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkConf.java
index 5b3b6e0,0000000..e69de29
mode 100644,000000..100644
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkConf.java
+++ b/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkConf.java
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java
index d13bdaa,0000000..e69de29
mode 100644,000000..100644
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java
+++ b/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/CaptureOriginalURLFilter.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/CaptureOriginalURLFilter.java
index b062013,0000000..e69de29
mode 100644,000000..100644
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/CaptureOriginalURLFilter.java
+++ b/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/CaptureOriginalURLFilter.java
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/PicketlinkIdentityAdapter.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/PicketlinkIdentityAdapter.java
index e3811b4,0000000..e69de29
mode 100644,000000..100644
--- a/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/PicketlinkIdentityAdapter.java
+++ b/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/PicketlinkIdentityAdapter.java
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-provider-security-picketlink/src/test/java/org/apache/knox/gateway/picketlink/PicketlinkTest.java
----------------------------------------------------------------------
diff --cc gateway-provider-security-picketlink/src/test/java/org/apache/knox/gateway/picketlink/PicketlinkTest.java
index a0cd7be,0000000..e69de29
mode 100644,000000..100644
--- a/gateway-provider-security-picketlink/src/test/java/org/apache/knox/gateway/picketlink/PicketlinkTest.java
+++ b/gateway-provider-security-picketlink/src/test/java/org/apache/knox/gateway/picketlink/PicketlinkTest.java
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-release/pom.xml
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-server/src/main/java/org/apache/knox/gateway/services/registry/impl/DefaultServiceRegistryService.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/services/registry/impl/DefaultServiceRegistryService.java
index 84330c7,0000000..075eda1
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/services/registry/impl/DefaultServiceRegistryService.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/services/registry/impl/DefaultServiceRegistryService.java
@@@ -1,207 -1,0 +1,207 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.services.registry.impl;
+
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonParseException;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.io.FileUtils;
+import org.apache.knox.gateway.GatewayMessages;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.services.Service;
+import org.apache.knox.gateway.services.ServiceLifecycleException;
+import org.apache.knox.gateway.services.registry.ServiceRegistry;
+import org.apache.knox.gateway.services.security.CryptoService;
+
+import java.io.File;
+import java.io.IOException;
++import java.security.SecureRandom;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
- import java.util.Random;
+
+public class DefaultServiceRegistryService implements ServiceRegistry, Service {
+ private static GatewayMessages LOG = MessagesFactory.get( GatewayMessages.class );
-
++
+ protected char[] chars = { 'a', 'b', 'c', 'd', 'e', 'f', 'g',
+ 'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w',
+ 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K',
+ 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
+ '2', '3', '4', '5', '6', '7', '8', '9',};
+
+ private CryptoService crypto;
+ private Registry registry = new Registry();
+
+ private String registryFileName;
-
++
+ public DefaultServiceRegistryService() {
+ }
-
++
+ public void setCryptoService(CryptoService crypto) {
+ this.crypto = crypto;
+ }
-
++
+ public String getRegistrationCode(String clusterName) {
+ String code = generateRegCode(16);
+ byte[] signature = crypto.sign("SHA256withRSA","gateway-identity",code);
+ String encodedSig = Base64.encodeBase64URLSafeString(signature);
-
++
+ return code + "::" + encodedSig;
+ }
-
++
+ private String generateRegCode(int length) {
- StringBuffer sb = new StringBuffer();
- Random r = new Random();
++ StringBuilder sb = new StringBuilder();
++ SecureRandom r = new SecureRandom();
+ for (int i = 0; i < length; i++) {
+ sb.append(chars[r.nextInt(chars.length)]);
+ }
+ return sb.toString();
+ }
-
++
+ public void removeClusterServices(String clusterName) {
+ registry.remove(clusterName);
+ }
+
+ public boolean registerService(String regCode, String clusterName, String serviceName, List<String> urls) {
+ boolean rc = false;
+ // verify the signature of the regCode
+ if (regCode == null) {
+ throw new IllegalArgumentException("Registration Code must not be null.");
+ }
+ String[] parts = regCode.split("::");
-
++
+ // part one is the code and part two is the signature
+ boolean verified = crypto.verify("SHA256withRSA", "gateway-identity", parts[0], Base64.decodeBase64(parts[1]));
+ if (verified) {
+ HashMap<String,RegEntry> clusterServices = registry.get(clusterName);
+ if (clusterServices == null) {
+ synchronized(this) {
+ clusterServices = new HashMap<>();
+ registry.put(clusterName, clusterServices);
+ }
+ }
+ RegEntry regEntry = new RegEntry();
+ regEntry.setClusterName(clusterName);
+ regEntry.setServiceName(serviceName);
+ regEntry.setUrls(urls);
+ clusterServices.put(serviceName , regEntry);
+ String json = renderAsJsonString(registry);
+ try {
+ FileUtils.write(new File(registryFileName), json);
+ rc = true;
+ } catch (IOException e) {
+ // log appropriately
+ e.printStackTrace(); //TODO: I18N
+ }
+ }
-
++
+ return rc;
+ }
-
++
+ private String renderAsJsonString(HashMap<String,HashMap<String,RegEntry>> registry) {
+ String json = null;
+ ObjectMapper mapper = new ObjectMapper();
-
++
+ try {
+ // write JSON to a file
+ json = mapper.writeValueAsString((Object)registry);
-
++
+ } catch ( JsonProcessingException e ) {
+ e.printStackTrace(); //TODO: I18N
+ }
+ return json;
+ }
-
++
+ @Override
+ public String lookupServiceURL(String clusterName, String serviceName) {
+ List<String> urls = lookupServiceURLs( clusterName, serviceName );
+ if ( urls != null && !urls.isEmpty() ) {
+ return urls.get( 0 );
+ }
+ return null;
+ }
+
+ @Override
+ public List<String> lookupServiceURLs( String clusterName, String serviceName ) {
+ RegEntry entry = null;
- HashMap clusterServices = registry.get(clusterName);
++ HashMap<String, RegEntry> clusterServices = registry.get(clusterName);
+ if (clusterServices != null) {
- entry = (RegEntry) clusterServices.get(serviceName);
++ entry = clusterServices.get(serviceName);
+ if( entry != null ) {
+ return entry.getUrls();
+ }
+ }
+ return null;
+ }
-
++
+ private HashMap<String, HashMap<String,RegEntry>> getMapFromJsonString(String json) {
+ Registry map = null;
- JsonFactory factory = new JsonFactory();
- ObjectMapper mapper = new ObjectMapper(factory);
- TypeReference<Registry> typeRef
- = new TypeReference<Registry>() {};
++ JsonFactory factory = new JsonFactory();
++ ObjectMapper mapper = new ObjectMapper(factory);
++ TypeReference<Registry> typeRef
++ = new TypeReference<Registry>() {};
+ try {
+ map = mapper.readValue(json, typeRef);
+ } catch (JsonParseException e) {
+ LOG.failedToGetMapFromJsonString( json, e );
+ } catch (JsonMappingException e) {
+ LOG.failedToGetMapFromJsonString( json, e );
+ } catch (IOException e) {
+ LOG.failedToGetMapFromJsonString( json, e );
- }
++ }
+ return map;
- }
++ }
+
+ @Override
+ public void init(GatewayConfig config, Map<String, String> options)
+ throws ServiceLifecycleException {
+ String securityDir = config.getGatewaySecurityDir();
+ String filename = "registry";
+ setupRegistryFile(securityDir, filename);
+ }
+
+ protected void setupRegistryFile(String securityDir, String filename) throws ServiceLifecycleException {
+ File registryFile = new File(securityDir, filename);
+ if (registryFile.exists()) {
+ try {
+ String json = FileUtils.readFileToString(registryFile);
+ Registry reg = (Registry) getMapFromJsonString(json);
+ if (reg != null) {
+ registry = reg;
+ }
+ } catch (Exception e) {
+ throw new ServiceLifecycleException("Unable to load the persisted registry.", e);
+ }
+ }
+ registryFileName = registryFile.getAbsolutePath();
+ }
+
+ @Override
+ public void start() throws ServiceLifecycleException {
+ }
+
+ @Override
+ public void stop() throws ServiceLifecycleException {
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-server/src/main/java/org/apache/knox/gateway/services/security/impl/DefaultAliasService.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/services/security/impl/DefaultAliasService.java
index f52a7b3,0000000..b5e62ab
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/services/security/impl/DefaultAliasService.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/services/security/impl/DefaultAliasService.java
@@@ -1,217 -1,0 +1,217 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.services.security.impl;
+
+import java.security.KeyStore;
+import java.security.KeyStoreException;
++import java.security.SecureRandom;
+import java.security.cert.Certificate;
+import java.util.ArrayList;
+import java.util.Enumeration;
+import java.util.List;
+import java.util.Map;
- import java.util.Random;
+
+import org.apache.knox.gateway.GatewayMessages;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.services.ServiceLifecycleException;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.services.security.AliasServiceException;
+import org.apache.knox.gateway.services.security.KeystoreService;
+import org.apache.knox.gateway.services.security.KeystoreServiceException;
+import org.apache.knox.gateway.services.security.MasterService;
+
+public class DefaultAliasService implements AliasService {
+ private static final GatewayMessages LOG = MessagesFactory.get( GatewayMessages.class );
+
- private static final String GATEWAY_IDENTITY_PASSPHRASE = "gateway-identity-passphrase";
++ private static final String GATEWAY_IDENTITY_PASSPHRASE = "gateway-identity-passphrase";
+
+ protected char[] chars = { 'a', 'b', 'c', 'd', 'e', 'f', 'g',
+ 'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w',
+ 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K',
+ 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
+ '2', '3', '4', '5', '6', '7', '8', '9',};
+
+ private KeystoreService keystoreService;
+ private MasterService masterService;
+
+ @Override
+ public void init(GatewayConfig config, Map<String, String> options)
+ throws ServiceLifecycleException {
+ }
+
+ @Override
+ public void start() throws ServiceLifecycleException {
+ }
+
+ @Override
+ public void stop() throws ServiceLifecycleException {
+ }
+
+ @Override
+ public char[] getGatewayIdentityPassphrase() throws AliasServiceException {
+ char[] passphrase = getPasswordFromAliasForGateway(GATEWAY_IDENTITY_PASSPHRASE);
+ if (passphrase == null) {
+ passphrase = masterService.getMasterSecret();
+ }
+ return passphrase;
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.knox.gateway.services.security.impl.AliasService#getAliasForCluster(java.lang.String, java.lang.String)
+ */
+ @Override
+ public char[] getPasswordFromAliasForCluster(String clusterName, String alias)
+ throws AliasServiceException {
+ return getPasswordFromAliasForCluster(clusterName, alias, false);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.knox.gateway.services.security.impl.AliasService#getAliasForCluster(java.lang.String, java.lang.String, boolean)
+ */
+ @Override
+ public char[] getPasswordFromAliasForCluster(String clusterName, String alias, boolean generate)
+ throws AliasServiceException {
+ char[] credential = null;
+ try {
+ credential = keystoreService.getCredentialForCluster(clusterName, alias);
+ if (credential == null) {
+ if (generate) {
+ generateAliasForCluster(clusterName, alias);
+ credential = keystoreService.getCredentialForCluster(clusterName, alias);
+ }
+ }
+ } catch (KeystoreServiceException e) {
+ LOG.failedToGetCredentialForCluster(clusterName, e);
+ throw new AliasServiceException(e);
+ }
+ return credential;
+ }
+
+ private String generatePassword(int length) {
- StringBuffer sb = new StringBuffer();
- Random r = new Random();
++ StringBuilder sb = new StringBuilder();
++ SecureRandom r = new SecureRandom();
+ for (int i = 0; i < length; i++) {
+ sb.append(chars[r.nextInt(chars.length)]);
+ }
+ return sb.toString();
+ }
-
++
+ public void setKeystoreService(KeystoreService ks) {
+ this.keystoreService = ks;
+ }
+
+ public void setMasterService(MasterService ms) {
+ this.masterService = ms;
-
++
+ }
+
+ @Override
+ public void generateAliasForCluster(String clusterName, String alias)
+ throws AliasServiceException {
+ try {
+ keystoreService.getCredentialStoreForCluster(clusterName);
+ } catch (KeystoreServiceException e) {
+ LOG.failedToGenerateAliasForCluster(clusterName, e);
+ throw new AliasServiceException(e);
+ }
+ String passwordString = generatePassword(16);
+ addAliasForCluster(clusterName, alias, passwordString);
+ }
+
+ /* (non-Javadoc)
+ * @see org.apache.knox.gateway.services.security.impl.AliasService#addAliasForCluster(java.lang.String, java.lang.String, java.lang.String)
+ */
+ @Override
+ public void addAliasForCluster(String clusterName, String alias, String value) {
+ try {
+ keystoreService.addCredentialForCluster(clusterName, alias, value);
+ } catch (KeystoreServiceException e) {
+ LOG.failedToAddCredentialForCluster(clusterName, e);
+ }
+ }
+
+ @Override
+ public void removeAliasForCluster(String clusterName, String alias)
+ throws AliasServiceException {
+ try {
+ keystoreService.removeCredentialForCluster(clusterName, alias);
+ } catch (KeystoreServiceException e) {
+ throw new AliasServiceException(e);
+ }
+ }
+
+ @Override
+ public char[] getPasswordFromAliasForGateway(String alias)
+ throws AliasServiceException {
+ return getPasswordFromAliasForCluster("__gateway", alias);
+ }
+
+ @Override
+ public void generateAliasForGateway(String alias)
+ throws AliasServiceException {
+ generateAliasForCluster("__gateway", alias);
+ }
+
+ /* (non-Javadoc)
+ * @see AliasService#getCertificateForGateway(java.lang.String)
+ */
+ @Override
+ public Certificate getCertificateForGateway(String alias) {
+ Certificate cert = null;
+ try {
+ cert = this.keystoreService.getKeystoreForGateway().getCertificate(alias);
+ } catch (KeyStoreException e) {
+ LOG.unableToRetrieveCertificateForGateway(e);
+ // should we throw an exception?
+ } catch (KeystoreServiceException e) {
+ LOG.unableToRetrieveCertificateForGateway(e);
+ }
+ return cert;
+ }
+
+ /* (non-Javadoc)
+ * @see AliasService#getAliasesForCluster(java.lang.String)
+ */
+ @Override
+ public List<String> getAliasesForCluster(String clusterName) {
+ ArrayList<String> list = new ArrayList<String>();
+ KeyStore keyStore;
+ try {
+ keyStore = keystoreService.getCredentialStoreForCluster(clusterName);
+ if (keyStore != null) {
+ String alias = null;
+ try {
+ Enumeration<String> e = keyStore.aliases();
+ while (e.hasMoreElements()) {
+ alias = e.nextElement();
+ // only include the metadata key names in the list of names
+ if (!alias.contains("@")) {
+ list.add(alias);
+ }
+ }
+ } catch (KeyStoreException e) {
+ LOG.failedToGetCredentialForCluster(clusterName, e);
+ }
+ }
+ } catch (KeystoreServiceException kse) {
+ LOG.failedToGetCredentialForCluster(clusterName, kse);
+ }
+ return list;
+ }
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
index c4a3914,0000000..16d5b81
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorHandler.java
@@@ -1,187 -1,0 +1,234 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.simple;
+
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.services.Service;
+import org.apache.knox.gateway.topology.discovery.DefaultServiceDiscoveryConfig;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscovery;
+import org.apache.knox.gateway.topology.discovery.ServiceDiscoveryFactory;
++import java.io.BufferedWriter;
++import java.io.File;
++import java.io.FileInputStream;
++import java.io.FileWriter;
++import java.io.InputStreamReader;
++import java.io.IOException;
++
++import java.net.URI;
++import java.net.URISyntaxException;
++
++import java.util.ArrayList;
++import java.util.Collections;
++import java.util.HashMap;
++import java.util.List;
++import java.util.Map;
+
- import java.io.*;
- import java.util.*;
+
+
+/**
+ * Processes simple topology descriptors, producing full topology files, which can subsequently be deployed to the
+ * gateway.
+ */
+public class SimpleDescriptorHandler {
+
+ private static final Service[] NO_GATEWAY_SERVICES = new Service[]{};
+
+ private static final SimpleDescriptorMessages log = MessagesFactory.get(SimpleDescriptorMessages.class);
+
+ public static Map<String, File> handle(File desc) throws IOException {
+ return handle(desc, NO_GATEWAY_SERVICES);
+ }
+
+ public static Map<String, File> handle(File desc, Service...gatewayServices) throws IOException {
+ return handle(desc, desc.getParentFile(), gatewayServices);
+ }
+
+ public static Map<String, File> handle(File desc, File destDirectory) throws IOException {
+ return handle(desc, destDirectory, NO_GATEWAY_SERVICES);
+ }
+
+ public static Map<String, File> handle(File desc, File destDirectory, Service...gatewayServices) throws IOException {
+ return handle(SimpleDescriptorFactory.parse(desc.getAbsolutePath()), desc.getParentFile(), destDirectory, gatewayServices);
+ }
+
+ public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory) {
+ return handle(desc, srcDirectory, destDirectory, NO_GATEWAY_SERVICES);
+ }
+
+ public static Map<String, File> handle(SimpleDescriptor desc, File srcDirectory, File destDirectory, Service...gatewayServices) {
+ Map<String, File> result = new HashMap<>();
+
+ File topologyDescriptor;
+
+ DefaultServiceDiscoveryConfig sdc = new DefaultServiceDiscoveryConfig(desc.getDiscoveryAddress());
+ sdc.setUser(desc.getDiscoveryUser());
+ sdc.setPasswordAlias(desc.getDiscoveryPasswordAlias());
- ServiceDiscovery sd = ServiceDiscoveryFactory
- .get(desc.getDiscoveryType(), gatewayServices);
++ ServiceDiscovery sd = ServiceDiscoveryFactory.get(desc.getDiscoveryType(), gatewayServices);
+ ServiceDiscovery.Cluster cluster = sd.discover(sdc, desc.getClusterName());
+
+ Map<String, List<String>> serviceURLs = new HashMap<>();
+
+ if (cluster != null) {
+ for (SimpleDescriptor.Service descService : desc.getServices()) {
+ String serviceName = descService.getName();
+
+ List<String> descServiceURLs = descService.getURLs();
+ if (descServiceURLs == null || descServiceURLs.isEmpty()) {
+ descServiceURLs = cluster.getServiceURLs(serviceName);
+ }
+
- // If there is at least one URL associated with the service, then add it to the map
++ // Validate the discovered service URLs
++ List<String> validURLs = new ArrayList<>();
+ if (descServiceURLs != null && !descServiceURLs.isEmpty()) {
- serviceURLs.put(serviceName, descServiceURLs);
++ // Validate the URL(s)
++ for (String descServiceURL : descServiceURLs) {
++ if (validateURL(serviceName, descServiceURL)) {
++ validURLs.add(descServiceURL);
++ }
++ }
++ }
++
++ // If there is at least one valid URL associated with the service, then add it to the map
++ if (!validURLs.isEmpty()) {
++ serviceURLs.put(serviceName, validURLs);
+ } else {
+ log.failedToDiscoverClusterServiceURLs(serviceName, cluster.getName());
- throw new IllegalStateException("ServiceDiscovery failed to resolve any URLs for " + serviceName +
- ". Topology update aborted!");
+ }
+ }
+ } else {
+ log.failedToDiscoverClusterServices(desc.getClusterName());
+ }
+
++ BufferedWriter fw = null;
+ topologyDescriptor = null;
+ File providerConfig = null;
+ try {
+ // Verify that the referenced provider configuration exists before attempting to reading it
+ providerConfig = resolveProviderConfigurationReference(desc.getProviderConfig(), srcDirectory);
+ if (providerConfig == null) {
+ log.failedToResolveProviderConfigRef(desc.getProviderConfig());
+ throw new IllegalArgumentException("Unresolved provider configuration reference: " +
+ desc.getProviderConfig() + " ; Topology update aborted!");
+ }
+ result.put("reference", providerConfig);
+
+ // TODO: Should the contents of the provider config be validated before incorporating it into the topology?
+
+ String topologyFilename = desc.getName();
+ if (topologyFilename == null) {
+ topologyFilename = desc.getClusterName();
+ }
+ topologyDescriptor = new File(destDirectory, topologyFilename + ".xml");
- FileWriter fw = new FileWriter(topologyDescriptor);
++ fw = new BufferedWriter(new FileWriter(topologyDescriptor));
+
+ fw.write("<topology>\n");
+
+ // Copy the externalized provider configuration content into the topology descriptor in-line
+ InputStreamReader policyReader = new InputStreamReader(new FileInputStream(providerConfig));
+ char[] buffer = new char[1024];
+ int count;
+ while ((count = policyReader.read(buffer)) > 0) {
+ fw.write(buffer, 0, count);
+ }
+ policyReader.close();
+
++ // Sort the service names to write the services alphabetically
++ List<String> serviceNames = new ArrayList<>(serviceURLs.keySet());
++ Collections.sort(serviceNames);
++
+ // Write the service declarations
- for (String serviceName : serviceURLs.keySet()) {
++ for (String serviceName : serviceNames) {
+ fw.write(" <service>\n");
+ fw.write(" <role>" + serviceName + "</role>\n");
+ for (String url : serviceURLs.get(serviceName)) {
+ fw.write(" <url>" + url + "</url>\n");
+ }
+ fw.write(" </service>\n");
+ }
+
+ fw.write("</topology>\n");
+
+ fw.flush();
- fw.close();
+ } catch (IOException e) {
+ log.failedToGenerateTopologyFromSimpleDescriptor(topologyDescriptor.getName(), e);
+ topologyDescriptor.delete();
++ } finally {
++ if (fw != null) {
++ try {
++ fw.close();
++ } catch (IOException e) {
++ // ignore
++ }
++ }
+ }
+
+ result.put("topology", topologyDescriptor);
+ return result;
+ }
+
++ private static boolean validateURL(String serviceName, String url) {
++ boolean result = false;
++
++ if (url != null && !url.isEmpty()) {
++ try {
++ new URI(url);
++ result = true;
++ } catch (URISyntaxException e) {
++ log.serviceURLValidationFailed(serviceName, url, e);
++ }
++ }
++
++ return result;
++ }
+
+ private static File resolveProviderConfigurationReference(String reference, File srcDirectory) {
+ File providerConfig;
+
+ // If the reference includes a path
+ if (reference.contains(File.separator)) {
+ // Check if it's an absolute path
+ providerConfig = new File(reference);
+ if (!providerConfig.exists()) {
+ // If it's not an absolute path, try treating it as a relative path
+ providerConfig = new File(srcDirectory, reference);
+ if (!providerConfig.exists()) {
+ providerConfig = null;
+ }
+ }
+ } else { // No file path, just a name
+ // Check if it's co-located with the referencing descriptor
+ providerConfig = new File(srcDirectory, reference);
+ if (!providerConfig.exists()) {
+ // Check the shared-providers config location
+ File sharedProvidersDir = new File(srcDirectory, "../shared-providers");
+ if (sharedProvidersDir.exists()) {
+ providerConfig = new File(sharedProvidersDir, reference);
+ if (!providerConfig.exists()) {
+ // Check if it's a valid name without the extension
+ providerConfig = new File(sharedProvidersDir, reference + ".xml");
+ if (!providerConfig.exists()) {
+ providerConfig = null;
+ }
+ }
+ }
+ }
+ }
+
+ return providerConfig;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorMessages.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorMessages.java
index eb9d887,0000000..07c4350
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorMessages.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/topology/simple/SimpleDescriptorMessages.java
@@@ -1,44 -1,0 +1,50 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.knox.gateway.topology.simple;
+
+import org.apache.knox.gateway.i18n.messages.Message;
+import org.apache.knox.gateway.i18n.messages.MessageLevel;
+import org.apache.knox.gateway.i18n.messages.Messages;
+import org.apache.knox.gateway.i18n.messages.StackTrace;
+
+@Messages(logger="org.apache.gateway.topology.simple")
+public interface SimpleDescriptorMessages {
+
+ @Message(level = MessageLevel.ERROR,
+ text = "Service discovery for cluster {0} failed.")
+ void failedToDiscoverClusterServices(final String cluster);
+
+ @Message(level = MessageLevel.ERROR,
- text = "No URLs were discovered for {0} in the {1} cluster.")
++ text = "No valid URLs were discovered for {0} in the {1} cluster.")
+ void failedToDiscoverClusterServiceURLs(final String serviceName, final String clusterName);
+
+ @Message(level = MessageLevel.ERROR,
+ text = "Failed to resolve the referenced provider configuration {0}.")
+ void failedToResolveProviderConfigRef(final String providerConfigRef);
+
+ @Message(level = MessageLevel.ERROR,
++ text = "URL validation failed for {0} URL {1} : {2}")
++ void serviceURLValidationFailed(final String serviceName,
++ final String url,
++ @StackTrace( level = MessageLevel.DEBUG ) Exception e );
++
++ @Message(level = MessageLevel.ERROR,
+ text = "Error generating topology {0} from simple descriptor: {1}")
+ void failedToGenerateTopologyFromSimpleDescriptor(final String topologyFile,
+ @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/8affbc02/gateway-server/src/main/java/org/apache/knox/gateway/websockets/GatewayWebsocketHandler.java
----------------------------------------------------------------------
diff --cc gateway-server/src/main/java/org/apache/knox/gateway/websockets/GatewayWebsocketHandler.java
index 3ddd311,0000000..69634a7
mode 100644,000000..100644
--- a/gateway-server/src/main/java/org/apache/knox/gateway/websockets/GatewayWebsocketHandler.java
+++ b/gateway-server/src/main/java/org/apache/knox/gateway/websockets/GatewayWebsocketHandler.java
@@@ -1,241 -1,0 +1,266 @@@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.websockets;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URL;
++import java.util.List;
++import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.knox.gateway.config.GatewayConfig;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.service.definition.ServiceDefinition;
+import org.apache.knox.gateway.services.GatewayServices;
+import org.apache.knox.gateway.services.registry.ServiceDefEntry;
+import org.apache.knox.gateway.services.registry.ServiceDefinitionRegistry;
+import org.apache.knox.gateway.services.registry.ServiceRegistry;
+import org.apache.knox.gateway.util.ServiceDefinitionsLoader;
+import org.eclipse.jetty.websocket.server.WebSocketHandler;
+import org.eclipse.jetty.websocket.servlet.ServletUpgradeRequest;
+import org.eclipse.jetty.websocket.servlet.ServletUpgradeResponse;
+import org.eclipse.jetty.websocket.servlet.WebSocketCreator;
+import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory;
+
++import javax.websocket.ClientEndpointConfig;
++
+/**
+ * Websocket handler that will handle websocket connection request. This class
+ * is responsible for creating a proxy socket for inbound and outbound
+ * connections. This is also where the http to websocket handoff happens.
- *
++ *
+ * @since 0.10
+ */
+public class GatewayWebsocketHandler extends WebSocketHandler
+ implements WebSocketCreator {
+
+ private static final WebsocketLogMessages LOG = MessagesFactory
+ .get(WebsocketLogMessages.class);
+
+ public static final String WEBSOCKET_PROTOCOL_STRING = "ws://";
+
+ public static final String SECURE_WEBSOCKET_PROTOCOL_STRING = "wss://";
+
+ static final String REGEX_SPLIT_CONTEXT = "^((?:[^/]*/){2}[^/]*)";
+
+ final static String REGEX_SPLIT_SERVICE_PATH = "^((?:[^/]*/){3}[^/]*)";
+
+ private static final int POOL_SIZE = 10;
+
+ /**
+ * Manage the threads that are spawned
+ * @since 0.13
+ */
+ private final ExecutorService pool;
+
+ final GatewayConfig config;
+ final GatewayServices services;
+
+ /**
+ * Create an instance
- *
++ *
+ * @param config
+ * @param services
+ */
+ public GatewayWebsocketHandler(final GatewayConfig config,
+ final GatewayServices services) {
+ super();
+
+ this.config = config;
+ this.services = services;
+ pool = Executors.newFixedThreadPool(POOL_SIZE);
+
+ }
+
+ /*
+ * (non-Javadoc)
- *
++ *
+ * @see
+ * org.eclipse.jetty.websocket.server.WebSocketHandler#configure(org.eclipse.
+ * jetty.websocket.servlet.WebSocketServletFactory)
+ */
+ @Override
+ public void configure(final WebSocketServletFactory factory) {
+ factory.setCreator(this);
+ factory.getPolicy()
+ .setMaxTextMessageSize(config.getWebsocketMaxTextMessageSize());
+ factory.getPolicy()
+ .setMaxBinaryMessageSize(config.getWebsocketMaxBinaryMessageSize());
+
+ factory.getPolicy().setMaxBinaryMessageBufferSize(
+ config.getWebsocketMaxBinaryMessageBufferSize());
+ factory.getPolicy().setMaxTextMessageBufferSize(
+ config.getWebsocketMaxTextMessageBufferSize());
+
+ factory.getPolicy()
+ .setInputBufferSize(config.getWebsocketInputBufferSize());
+
+ factory.getPolicy()
+ .setAsyncWriteTimeout(config.getWebsocketAsyncWriteTimeout());
+ factory.getPolicy().setIdleTimeout(config.getWebsocketIdleTimeout());
+
+ }
+
+ /*
+ * (non-Javadoc)
- *
++ *
+ * @see
+ * org.eclipse.jetty.websocket.servlet.WebSocketCreator#createWebSocket(org.
+ * eclipse.jetty.websocket.servlet.ServletUpgradeRequest,
+ * org.eclipse.jetty.websocket.servlet.ServletUpgradeResponse)
+ */
+ @Override
+ public Object createWebSocket(ServletUpgradeRequest req,
+ ServletUpgradeResponse resp) {
+
+ try {
+ final URI requestURI = req.getRequestURI();
+ final String path = requestURI.getPath();
+
+ /* URL used to connect to websocket backend */
+ final String backendURL = getMatchedBackendURL(path);
+
+ /* Upgrade happens here */
- return new ProxyWebSocketAdapter(URI.create(backendURL), pool);
++ return new ProxyWebSocketAdapter(URI.create(backendURL), pool, getClientEndpointConfig(req));
+ } catch (final Exception e) {
+ LOG.failedCreatingWebSocket(e);
+ throw e;
+ }
+ }
+
+ /**
++ * Returns a {@link ClientEndpointConfig} config that contains the headers
++ * to be passed to the backend.
++ * @since 0.14.0
++ * @param req
++ * @return
++ */
++ private ClientEndpointConfig getClientEndpointConfig(final ServletUpgradeRequest req) {
++
++ return ClientEndpointConfig.Builder.create().configurator( new ClientEndpointConfig.Configurator() {
++
++ @Override
++ public void beforeRequest(final Map<String, List<String>> headers) {
++
++ /* Add request headers */
++ req.getHeaders().forEach(headers::putIfAbsent);
++
++ }
++ }).build();
++ }
++
++ /**
+ * This method looks at the context path and returns the backend websocket
+ * url. If websocket url is found it is used as is, or we default to
+ * ws://{host}:{port} which might or might not be right.
- *
- * @param The context path
++ *
++ * @param
+ * @return Websocket backend url
+ */
+ private synchronized String getMatchedBackendURL(final String path) {
+
+ final ServiceRegistry serviceRegistryService = services
+ .getService(GatewayServices.SERVICE_REGISTRY_SERVICE);
+
+ final ServiceDefinitionRegistry serviceDefinitionService = services
+ .getService(GatewayServices.SERVICE_DEFINITION_REGISTRY);
+
+ /* Filter out the /cluster/topology to get the context we want */
+ String[] pathInfo = path.split(REGEX_SPLIT_CONTEXT);
+
+ final ServiceDefEntry entry = serviceDefinitionService
+ .getMatchingService(pathInfo[1]);
+
+ if (entry == null) {
+ throw new RuntimeException(
+ String.format("Cannot find service for the given path: %s", path));
+ }
+
+ /* Filter out /cluster/topology/service to get endpoint */
+ String[] pathService = path.split(REGEX_SPLIT_SERVICE_PATH);
+
+ final File servicesDir = new File(config.getGatewayServicesDir());
+
+ final Set<ServiceDefinition> serviceDefs = ServiceDefinitionsLoader
+ .getServiceDefinitions(servicesDir);
+
+ /* URL used to connect to websocket backend */
+ String backendURL = urlFromServiceDefinition(serviceDefs,
+ serviceRegistryService, entry, path);
+
+ StringBuffer backend = new StringBuffer();
+ try {
+
+ /* if we do not find websocket URL we default to HTTP */
+ if (!StringUtils.containsAny(backendURL, WEBSOCKET_PROTOCOL_STRING, SECURE_WEBSOCKET_PROTOCOL_STRING)) {
+ URL serviceUrl = new URL(backendURL);
+
+ /* Use http host:port if ws url not configured */
+ final String protocol = (serviceUrl.getProtocol() == "ws"
+ || serviceUrl.getProtocol() == "wss") ? serviceUrl.getProtocol()
+ : "ws";
+ backend.append(protocol).append("://");
+ backend.append(serviceUrl.getHost()).append(":");
+ backend.append(serviceUrl.getPort()).append("/");
+ backend.append(serviceUrl.getPath());
+ }
+ else {
+ URI serviceUri = new URI(backendURL);
+ backend.append(serviceUri);
+ /* Avoid Zeppelin Regression - as this would require ambari changes and break current knox websocket use case*/
- if (!StringUtils.endsWith(backend.toString(), "/ws") && pathService[1] != null) {
++ if (!StringUtils.endsWith(backend.toString(), "/ws") && pathService.length > 0 && pathService[1] != null) {
+ backend.append(pathService[1]);
+ }
+ }
+ backendURL = backend.toString();
+
+ } catch (MalformedURLException e){
+ LOG.badUrlError(e);
+ throw new RuntimeException(e.toString());
+ } catch (Exception e1) {
+ LOG.failedCreatingWebSocket(e1);
+ throw new RuntimeException(e1.toString());
+ }
+
+ return backendURL;
+ }
+
+ private static String urlFromServiceDefinition(
+ final Set<ServiceDefinition> serviceDefs,
+ final ServiceRegistry serviceRegistry, final ServiceDefEntry entry,
+ final String path) {
+
+ final String[] contexts = path.split("/");
+
+ final String serviceURL = serviceRegistry.lookupServiceURL(contexts[2],
+ entry.getName().toUpperCase());
+
+ /*
+ * we have a match, if ws:// is present it is returned else http:// is
+ * returned
+ */
+ return serviceURL;
+
+ }
+
+}
[20/23] knox git commit: KNOX-1081 - Manually remove the picketlink
folder because of merge issue
Posted by mo...@apache.org.
KNOX-1081 - Manually remove the picketlink folder because of merge issue
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/78d35f16
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/78d35f16
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/78d35f16
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 78d35f16f84f91985bbbd7582d79f648e4ef76e9
Parents: 8affbc0
Author: Sandeep More <mo...@apache.org>
Authored: Mon Oct 16 10:55:24 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Mon Oct 16 10:55:24 2017 -0400
----------------------------------------------------------------------
.../gateway/picketlink/PicketlinkMessages.java | 0
.../picketlink/deploy/PicketlinkConf.java | 0
.../PicketlinkFederationProviderContributor.java | 0
.../filter/CaptureOriginalURLFilter.java | 0
.../filter/PicketlinkIdentityAdapter.java | 0
....gateway.deploy.ProviderDeploymentContributor | 19 -------------------
.../knox/gateway/picketlink/PicketlinkTest.java | 0
7 files changed, 19 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/78d35f16/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/PicketlinkMessages.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/PicketlinkMessages.java b/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/PicketlinkMessages.java
deleted file mode 100644
index e69de29..0000000
http://git-wip-us.apache.org/repos/asf/knox/blob/78d35f16/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkConf.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkConf.java b/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkConf.java
deleted file mode 100644
index e69de29..0000000
http://git-wip-us.apache.org/repos/asf/knox/blob/78d35f16/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java b/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/deploy/PicketlinkFederationProviderContributor.java
deleted file mode 100644
index e69de29..0000000
http://git-wip-us.apache.org/repos/asf/knox/blob/78d35f16/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/CaptureOriginalURLFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/CaptureOriginalURLFilter.java b/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/CaptureOriginalURLFilter.java
deleted file mode 100644
index e69de29..0000000
http://git-wip-us.apache.org/repos/asf/knox/blob/78d35f16/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/PicketlinkIdentityAdapter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/PicketlinkIdentityAdapter.java b/gateway-provider-security-picketlink/src/main/java/org/apache/knox/gateway/picketlink/filter/PicketlinkIdentityAdapter.java
deleted file mode 100644
index e69de29..0000000
http://git-wip-us.apache.org/repos/asf/knox/blob/78d35f16/gateway-provider-security-picketlink/src/main/resources/META-INF/services/org.apache.knox.gateway.deploy.ProviderDeploymentContributor
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/main/resources/META-INF/services/org.apache.knox.gateway.deploy.ProviderDeploymentContributor b/gateway-provider-security-picketlink/src/main/resources/META-INF/services/org.apache.knox.gateway.deploy.ProviderDeploymentContributor
deleted file mode 100644
index 2d6b75c..0000000
--- a/gateway-provider-security-picketlink/src/main/resources/META-INF/services/org.apache.knox.gateway.deploy.ProviderDeploymentContributor
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.knox.gateway.picketlink.deploy.PicketlinkFederationProviderContributor
http://git-wip-us.apache.org/repos/asf/knox/blob/78d35f16/gateway-provider-security-picketlink/src/test/java/org/apache/knox/gateway/picketlink/PicketlinkTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-picketlink/src/test/java/org/apache/knox/gateway/picketlink/PicketlinkTest.java b/gateway-provider-security-picketlink/src/test/java/org/apache/knox/gateway/picketlink/PicketlinkTest.java
deleted file mode 100644
index e69de29..0000000
[12/23] knox git commit: KNOX-895 - Pass Headers and Cookies to
websocket backend
Posted by mo...@apache.org.
KNOX-895 - Pass Headers and Cookies to websocket backend
Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/2d236e78
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/2d236e78
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/2d236e78
Branch: refs/heads/KNOX-998-Package_Restructuring
Commit: 2d236e78b70ef7fb312ebf0fa198657595e2f4ba
Parents: 7b401de
Author: Sandeep More <mo...@apache.org>
Authored: Wed Oct 11 17:04:52 2017 -0400
Committer: Sandeep More <mo...@apache.org>
Committed: Wed Oct 11 17:04:52 2017 -0400
----------------------------------------------------------------------
.../websockets/GatewayWebsocketHandler.java | 41 +-
.../gateway/websockets/ProxyInboundClient.java | 107 ++++++
.../websockets/ProxyWebSocketAdapter.java | 20 +-
.../websockets/ProxyInboundClientTest.java | 374 +++++++++++++++++++
4 files changed, 530 insertions(+), 12 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/knox/blob/2d236e78/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/GatewayWebsocketHandler.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/GatewayWebsocketHandler.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/GatewayWebsocketHandler.java
index 75a4a2b..0ee54fd 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/GatewayWebsocketHandler.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/GatewayWebsocketHandler.java
@@ -21,6 +21,8 @@ import java.io.File;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
+import java.util.List;
+import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@@ -40,11 +42,13 @@ import org.eclipse.jetty.websocket.servlet.ServletUpgradeResponse;
import org.eclipse.jetty.websocket.servlet.WebSocketCreator;
import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory;
+import javax.websocket.ClientEndpointConfig;
+
/**
* Websocket handler that will handle websocket connection request. This class
* is responsible for creating a proxy socket for inbound and outbound
* connections. This is also where the http to websocket handoff happens.
- *
+ *
* @since 0.10
*/
public class GatewayWebsocketHandler extends WebSocketHandler
@@ -74,7 +78,7 @@ public class GatewayWebsocketHandler extends WebSocketHandler
/**
* Create an instance
- *
+ *
* @param config
* @param services
*/
@@ -90,7 +94,7 @@ public class GatewayWebsocketHandler extends WebSocketHandler
/*
* (non-Javadoc)
- *
+ *
* @see
* org.eclipse.jetty.websocket.server.WebSocketHandler#configure(org.eclipse.
* jetty.websocket.servlet.WebSocketServletFactory)
@@ -119,7 +123,7 @@ public class GatewayWebsocketHandler extends WebSocketHandler
/*
* (non-Javadoc)
- *
+ *
* @see
* org.eclipse.jetty.websocket.servlet.WebSocketCreator#createWebSocket(org.
* eclipse.jetty.websocket.servlet.ServletUpgradeRequest,
@@ -137,7 +141,7 @@ public class GatewayWebsocketHandler extends WebSocketHandler
final String backendURL = getMatchedBackendURL(path);
/* Upgrade happens here */
- return new ProxyWebSocketAdapter(URI.create(backendURL), pool);
+ return new ProxyWebSocketAdapter(URI.create(backendURL), pool, getClientEndpointConfig(req));
} catch (final Exception e) {
LOG.failedCreatingWebSocket(e);
throw e;
@@ -145,11 +149,32 @@ public class GatewayWebsocketHandler extends WebSocketHandler
}
/**
+ * Returns a {@link ClientEndpointConfig} config that contains the headers
+ * to be passed to the backend.
+ * @since 0.14.0
+ * @param req
+ * @return
+ */
+ private ClientEndpointConfig getClientEndpointConfig(final ServletUpgradeRequest req) {
+
+ return ClientEndpointConfig.Builder.create().configurator( new ClientEndpointConfig.Configurator() {
+
+ @Override
+ public void beforeRequest(final Map<String, List<String>> headers) {
+
+ /* Add request headers */
+ req.getHeaders().forEach(headers::putIfAbsent);
+
+ }
+ }).build();
+ }
+
+ /**
* This method looks at the context path and returns the backend websocket
* url. If websocket url is found it is used as is, or we default to
* ws://{host}:{port} which might or might not be right.
- *
- * @param The context path
+ *
+ * @param
* @return Websocket backend url
*/
private synchronized String getMatchedBackendURL(final String path) {
@@ -203,7 +228,7 @@ public class GatewayWebsocketHandler extends WebSocketHandler
URI serviceUri = new URI(backendURL);
backend.append(serviceUri);
/* Avoid Zeppelin Regression - as this would require ambari changes and break current knox websocket use case*/
- if (!StringUtils.endsWith(backend.toString(), "/ws") && pathService[1] != null) {
+ if (!StringUtils.endsWith(backend.toString(), "/ws") && pathService.length > 0 && pathService[1] != null) {
backend.append(pathService[1]);
}
}
http://git-wip-us.apache.org/repos/asf/knox/blob/2d236e78/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyInboundClient.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyInboundClient.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyInboundClient.java
new file mode 100644
index 0000000..4e938d2
--- /dev/null
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyInboundClient.java
@@ -0,0 +1,107 @@
+package org.apache.hadoop.gateway.websockets;
+
+import javax.websocket.CloseReason;
+import javax.websocket.Endpoint;
+import javax.websocket.EndpointConfig;
+import javax.websocket.MessageHandler;
+import javax.websocket.Session;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+/**
+ * A Websocket client with callback which is not annotation based.
+ * This handler accepts String and binary messages.
+ * @since 0.14.0
+ */
+public class ProxyInboundClient extends Endpoint {
+
+ /**
+ * Callback to be called once we have events on our socket.
+ */
+ private MessageEventCallback callback;
+
+ protected Session session;
+ protected EndpointConfig config;
+
+
+ public ProxyInboundClient(final MessageEventCallback callback) {
+ super();
+ this.callback = callback;
+ }
+
+ /**
+ * Developers must implement this method to be notified when a new
+ * conversation has just begun.
+ *
+ * @param backendSession the session that has just been activated.
+ * @param config the configuration used to configure this endpoint.
+ */
+ @Override
+ public void onOpen(final javax.websocket.Session backendSession, final EndpointConfig config) {
+ this.session = backendSession;
+ this.config = config;
+
+ /* Set the max message size */
+ session.setMaxBinaryMessageBufferSize(Integer.MAX_VALUE);
+ session.setMaxTextMessageBufferSize(Integer.MAX_VALUE);
+
+ /* Add message handler for binary data */
+ session.addMessageHandler(new MessageHandler.Whole<byte[]>() {
+
+ /**
+ * Called when the message has been fully received.
+ *
+ * @param message the message data.
+ */
+ @Override
+ public void onMessage(final byte[] message) {
+ callback.onMessageBinary(message, true, session);
+ }
+
+ });
+
+ /* Add message handler for text data */
+ session.addMessageHandler(new MessageHandler.Whole<String>() {
+
+ /**
+ * Called when the message has been fully received.
+ *
+ * @param message the message data.
+ */
+ @Override
+ public void onMessage(final String message) {
+ callback.onMessageText(message, session);
+ }
+
+ });
+
+ callback.onConnectionOpen(backendSession);
+ }
+
+ @Override
+ public void onClose(final javax.websocket.Session backendSession, final CloseReason closeReason) {
+ callback.onConnectionClose(closeReason);
+ this.session = null;
+ }
+
+ @Override
+ public void onError(final javax.websocket.Session backendSession, final Throwable cause) {
+ callback.onError(cause);
+ this.session = null;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/knox/blob/2d236e78/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyWebSocketAdapter.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyWebSocketAdapter.java b/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyWebSocketAdapter.java
index 1e7f583..4ea8d6c 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyWebSocketAdapter.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/websockets/ProxyWebSocketAdapter.java
@@ -20,8 +20,8 @@ package org.apache.hadoop.gateway.websockets;
import java.io.IOException;
import java.net.URI;
import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
+import javax.websocket.ClientEndpointConfig;
import javax.websocket.CloseReason;
import javax.websocket.ContainerProvider;
import javax.websocket.DeploymentException;
@@ -60,12 +60,23 @@ public class ProxyWebSocketAdapter extends WebSocketAdapter {
private ExecutorService pool;
/**
+ * Used to transmit headers from browser to backend server.
+ * @since 0.14
+ */
+ private ClientEndpointConfig clientConfig;
+
+ /**
* Create an instance
*/
public ProxyWebSocketAdapter(final URI backend, final ExecutorService pool) {
+ this(backend, pool, null);
+ }
+
+ public ProxyWebSocketAdapter(final URI backend, final ExecutorService pool, final ClientEndpointConfig clientConfig) {
super();
this.backend = backend;
this.pool = pool;
+ this.clientConfig = clientConfig;
}
@Override
@@ -76,14 +87,15 @@ public class ProxyWebSocketAdapter extends WebSocketAdapter {
* plumbing takes place
*/
container = ContainerProvider.getWebSocketContainer();
- final ProxyInboundSocket backendSocket = new ProxyInboundSocket(
- getMessageCallback());
+
+ final ProxyInboundClient backendSocket = new ProxyInboundClient(getMessageCallback());
/* build the configuration */
/* Attempt Connect */
try {
- backendSession = container.connectToServer(backendSocket, backend);
+ backendSession = container.connectToServer(backendSocket, clientConfig, backend);
+
LOG.onConnectionOpen(backend.toString());
} catch (DeploymentException e) {
http://git-wip-us.apache.org/repos/asf/knox/blob/2d236e78/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/ProxyInboundClientTest.java
----------------------------------------------------------------------
diff --git a/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/ProxyInboundClientTest.java b/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/ProxyInboundClientTest.java
new file mode 100644
index 0000000..69b45dd
--- /dev/null
+++ b/gateway-server/src/test/java/org/apache/hadoop/gateway/websockets/ProxyInboundClientTest.java
@@ -0,0 +1,374 @@
+package org.apache.hadoop.gateway.websockets;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.eclipse.jetty.server.Handler;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.handler.ContextHandler;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import javax.websocket.CloseReason;
+import javax.websocket.ContainerProvider;
+import javax.websocket.DeploymentException;
+import javax.websocket.Session;
+import javax.websocket.WebSocketContainer;
+import java.io.IOException;
+import java.net.URI;
+import java.nio.ByteBuffer;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static org.hamcrest.Matchers.instanceOf;
+
+/**
+ * Test {@link ProxyInboundClient} class.
+ * @since 0.14.0
+ */
+public class ProxyInboundClientTest {
+
+ private static Server server;
+ private static URI serverUri;
+ private static Handler handler;
+
+ String recievedMessage = null;
+
+ byte[] recievedBinaryMessage = null;
+
+
+ /* create an instance */
+ public ProxyInboundClientTest() {
+ super();
+ }
+
+ @BeforeClass
+ public static void startWSServer() throws Exception
+ {
+ server = new Server();
+ ServerConnector connector = new ServerConnector(server);
+ server.addConnector(connector);
+
+ handler = new WebsocketEchoHandler();
+
+ ContextHandler context = new ContextHandler();
+ context.setContextPath("/");
+ context.setHandler(handler);
+ server.setHandler(context);
+
+ server.start();
+
+ String host = connector.getHost();
+ if (host == null)
+ {
+ host = "localhost";
+ }
+ int port = connector.getLocalPort();
+ serverUri = new URI(String.format("ws://%s:%d/",host,port));
+ }
+
+ @AfterClass
+ public static void stopServer()
+ {
+ try
+ {
+ server.stop();
+ }
+ catch (Exception e)
+ {
+ e.printStackTrace(System.err);
+ }
+ }
+
+ //@Test(timeout = 3000)
+ @Test
+ public void testClientInstance() throws IOException, DeploymentException {
+
+ final String textMessage = "Echo";
+ final ByteBuffer binarymessage = ByteBuffer.wrap(textMessage.getBytes());
+
+ final AtomicBoolean isTestComplete = new AtomicBoolean(false);
+
+ final WebSocketContainer container = ContainerProvider.getWebSocketContainer();
+ final ProxyInboundClient client = new ProxyInboundClient( new MessageEventCallback() {
+
+ /**
+ * A generic callback, can be left un-implemented
+ *
+ * @param message
+ */
+ @Override
+ public void doCallback(String message) {
+
+ }
+
+ /**
+ * Callback when connection is established.
+ *
+ * @param session
+ */
+ @Override
+ public void onConnectionOpen(Object session) {
+
+ }
+
+ /**
+ * Callback when connection is closed.
+ *
+ * @param reason
+ */
+ @Override
+ public void onConnectionClose(CloseReason reason) {
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when there is an error in connection.
+ *
+ * @param cause
+ */
+ @Override
+ public void onError(Throwable cause) {
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when a text message is received.
+ *
+ * @param message
+ * @param session
+ */
+ @Override
+ public void onMessageText(String message, Object session) {
+ recievedMessage = message;
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when a binary message is received.
+ *
+ * @param message
+ * @param last
+ * @param session
+ */
+ @Override
+ public void onMessageBinary(byte[] message, boolean last,
+ Object session) {
+
+ }
+ } );
+
+ Assert.assertThat(client, instanceOf(javax.websocket.Endpoint.class));
+
+ Session session = container.connectToServer(client, serverUri);
+
+ session.getBasicRemote().sendText(textMessage);
+
+ while(!isTestComplete.get()) {
+ /* just wait for the test to finish */
+ }
+
+ Assert.assertEquals("The received text message is not the same as the sent", textMessage, recievedMessage);
+ }
+
+ @Test(timeout = 3000)
+ public void testBinarymessage() throws IOException, DeploymentException {
+
+ final String textMessage = "Echo";
+ final ByteBuffer binarymessage = ByteBuffer.wrap(textMessage.getBytes());
+
+ final AtomicBoolean isTestComplete = new AtomicBoolean(false);
+
+ final WebSocketContainer container = ContainerProvider.getWebSocketContainer();
+ final ProxyInboundClient client = new ProxyInboundClient( new MessageEventCallback() {
+
+ /**
+ * A generic callback, can be left un-implemented
+ *
+ * @param message
+ */
+ @Override
+ public void doCallback(String message) {
+
+ }
+
+ /**
+ * Callback when connection is established.
+ *
+ * @param session
+ */
+ @Override
+ public void onConnectionOpen(Object session) {
+
+ }
+
+ /**
+ * Callback when connection is closed.
+ *
+ * @param reason
+ */
+ @Override
+ public void onConnectionClose(CloseReason reason) {
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when there is an error in connection.
+ *
+ * @param cause
+ */
+ @Override
+ public void onError(Throwable cause) {
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when a text message is received.
+ *
+ * @param message
+ * @param session
+ */
+ @Override
+ public void onMessageText(String message, Object session) {
+ recievedMessage = message;
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when a binary message is received.
+ *
+ * @param message
+ * @param last
+ * @param session
+ */
+ @Override
+ public void onMessageBinary(byte[] message, boolean last,
+ Object session) {
+ recievedBinaryMessage = message;
+ isTestComplete.set(true);
+ }
+ } );
+
+ Assert.assertThat(client, instanceOf(javax.websocket.Endpoint.class));
+
+ Session session = container.connectToServer(client, serverUri);
+
+ session.getBasicRemote().sendBinary(binarymessage);
+
+ while(!isTestComplete.get()) {
+ /* just wait for the test to finish */
+ }
+
+ Assert.assertEquals("Binary message does not match", textMessage, new String(recievedBinaryMessage));
+ }
+
+ @Test(timeout = 3000)
+ public void testTextMaxBufferLimit() throws IOException, DeploymentException {
+
+ final String longMessage = RandomStringUtils.random(100000);
+
+ final AtomicBoolean isTestComplete = new AtomicBoolean(false);
+
+ final WebSocketContainer container = ContainerProvider.getWebSocketContainer();
+ final ProxyInboundClient client = new ProxyInboundClient( new MessageEventCallback() {
+
+ /**
+ * A generic callback, can be left un-implemented
+ *
+ * @param message
+ */
+ @Override
+ public void doCallback(String message) {
+
+ }
+
+ /**
+ * Callback when connection is established.
+ *
+ * @param session
+ */
+ @Override
+ public void onConnectionOpen(Object session) {
+
+ }
+
+ /**
+ * Callback when connection is closed.
+ *
+ * @param reason
+ */
+ @Override
+ public void onConnectionClose(CloseReason reason) {
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when there is an error in connection.
+ *
+ * @param cause
+ */
+ @Override
+ public void onError(Throwable cause) {
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when a text message is received.
+ *
+ * @param message
+ * @param session
+ */
+ @Override
+ public void onMessageText(String message, Object session) {
+ recievedMessage = message;
+ isTestComplete.set(true);
+ }
+
+ /**
+ * Callback when a binary message is received.
+ *
+ * @param message
+ * @param last
+ * @param session
+ */
+ @Override
+ public void onMessageBinary(byte[] message, boolean last,
+ Object session) {
+
+ }
+ } );
+
+ Assert.assertThat(client, instanceOf(javax.websocket.Endpoint.class));
+
+ Session session = container.connectToServer(client, serverUri);
+
+ session.getBasicRemote().sendText(longMessage);
+
+ while(!isTestComplete.get()) {
+ /* just wait for the test to finish */
+ }
+
+ Assert.assertEquals(longMessage, recievedMessage);
+
+ }
+
+
+
+}