You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ranger.apache.org by sn...@apache.org on 2014/10/09 00:15:29 UTC

git commit: ARGUS-9:Adding Test Connection Support for Storm Repo

Repository: incubator-argus
Updated Branches:
  refs/heads/master 49892c0d1 -> 7b1b3e436


ARGUS-9:Adding Test Connection Support for Storm Repo

Signed-off-by: sneethiraj <sn...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/incubator-argus/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-argus/commit/7b1b3e43
Tree: http://git-wip-us.apache.org/repos/asf/incubator-argus/tree/7b1b3e43
Diff: http://git-wip-us.apache.org/repos/asf/incubator-argus/diff/7b1b3e43

Branch: refs/heads/master
Commit: 7b1b3e4365c35ffd80c3cba1f49d16592eafc64e
Parents: 49892c0
Author: vperiasamy <vp...@hortonworks.com>
Authored: Wed Oct 8 17:56:04 2014 -0400
Committer: sneethiraj <sn...@apache.org>
Committed: Wed Oct 8 18:15:06 2014 -0400

----------------------------------------------------------------------
 .../hadoop/client/config/BaseClient.java        |  12 +-
 .../com/xasecure/storm/client/StormClient.java  | 209 +++++++++++++++++--
 .../main/java/com/xasecure/biz/AssetMgr.java    |   2 +
 .../webapp/scripts/views/asset/AssetCreate.js   |   5 +-
 .../webapp/templates/asset/AssetForm_tmpl.html  |   2 +-
 5 files changed, 202 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7b1b3e43/lookup-client/src/main/java/com/xasecure/hadoop/client/config/BaseClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hadoop/client/config/BaseClient.java b/lookup-client/src/main/java/com/xasecure/hadoop/client/config/BaseClient.java
index 6d018df..1f8ae47 100644
--- a/lookup-client/src/main/java/com/xasecure/hadoop/client/config/BaseClient.java
+++ b/lookup-client/src/main/java/com/xasecure/hadoop/client/config/BaseClient.java
@@ -20,10 +20,13 @@
  package com.xasecure.hadoop.client.config;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.List;
 
 import javax.security.auth.Subject;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.security.SecureClientLogin;
@@ -146,13 +149,14 @@ public abstract class BaseClient {
 	}
 
 	public static String getMessage(Throwable excp) {
-		StringBuilder sb = new StringBuilder();
+		List<String> errList = new ArrayList<String>();
 		while (excp != null) {
-			sb.append(excp.getMessage()).append("\n");
+			if (!errList.contains(excp.getMessage() + "\n")) {
+				errList.add(excp.getMessage() + "\n");
+			}
 			excp = excp.getCause();
 		}
-
-		return sb.toString();
+		return StringUtils.join(errList, ". ");
 	}
 	
 }

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7b1b3e43/lookup-client/src/main/java/com/xasecure/storm/client/StormClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/storm/client/StormClient.java b/lookup-client/src/main/java/com/xasecure/storm/client/StormClient.java
index f3b5a3b..35dc2db 100644
--- a/lookup-client/src/main/java/com/xasecure/storm/client/StormClient.java
+++ b/lookup-client/src/main/java/com/xasecure/storm/client/StormClient.java
@@ -28,6 +28,7 @@ import java.util.Map;
 
 import javax.security.auth.Subject;
 import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
 import javax.security.auth.login.LoginContext;
 import javax.security.auth.login.LoginException;
 
@@ -41,11 +42,11 @@ import com.google.gson.GsonBuilder;
 import com.sun.jersey.api.client.Client;
 import com.sun.jersey.api.client.ClientResponse;
 import com.sun.jersey.api.client.WebResource;
+import com.xasecure.hadoop.client.config.BaseClient;
+import com.xasecure.hadoop.client.exceptions.HadoopException;
 import com.xasecure.storm.client.json.model.Topology;
 import com.xasecure.storm.client.json.model.TopologyListResponse;
 
-
-
 public class StormClient {
 	
 	public static final Logger LOG = Logger.getLogger(StormClient.class) ;
@@ -73,6 +74,12 @@ public class StormClient {
 
 	public List<String> getTopologyList(final String topologyNameMatching) {
 		
+		LOG.debug("Getting Storm topology list for topologyNameMatching : " +
+				topologyNameMatching);
+		final String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+		
 		List<String> ret = new ArrayList<String>();
 		
 		PrivilegedAction<ArrayList<String>> topologyListGetter = new PrivilegedAction<ArrayList<String>>() {
@@ -94,7 +101,10 @@ public class StormClient {
 					response = webResource.accept(EXPECTED_MIME_TYPE)
 						    .get(ClientResponse.class);
 					
+					LOG.info("getTopologyList():calling " + url);
+					
 					if (response != null) {
+						LOG.info("getTopologyList():response.getStatus()= " + response.getStatus());	
 						if (response.getStatus() == 200) {
 							String jsonString = response.getEntity(String.class);
 							Gson gson = new GsonBuilder().setPrettyPrinting().create();
@@ -111,12 +121,36 @@ public class StormClient {
 									}
 								}
 							}
-							
+						} else{
+							LOG.info("getTopologyList():response.getStatus()= " + response.getStatus() + " for URL " + url);	
+							String jsonString = response.getEntity(String.class);
+							LOG.info(jsonString);
 						}
+					} else {
+						String msgDesc = "Unable to get a valid response for "
+								+ "expected mime type : [" + EXPECTED_MIME_TYPE
+								+ "] URL : " + url + " - got null response.";
+						LOG.error(msgDesc);
+						HadoopException hdpException = new HadoopException(msgDesc);
+						hdpException.generateResponseDataMap(false, msgDesc,
+								msgDesc + errMsg, null, null);
+						throw hdpException;
 					}
-				}
-				finally {
+				} catch (HadoopException he) {
+					throw he;
+				} catch (Throwable t) {
+					String msgDesc = "Exception while getting Storm TopologyList."
+							+ " URL : " + url;
+					HadoopException hdpException = new HadoopException(msgDesc,
+							t);
+					LOG.error(msgDesc, t);
+
+					hdpException.generateResponseDataMap(false,
+							BaseClient.getMessage(t), msgDesc + errMsg, null,
+							null);
+					throw hdpException;
 					
+				} finally {
 					if (response != null) {
 						response.close();
 					}
@@ -126,7 +160,6 @@ public class StormClient {
 					}
 				
 				}
-				
 				return lret ;
 			}
 		} ;
@@ -140,13 +173,12 @@ public class StormClient {
 		return ret;
 	}
 	
-	
-	
-	
-
 	public static <T> T executeUnderKerberos(String userName, String password,
 			PrivilegedAction<T> action) throws IOException {
-
+		
+		final String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
 		class MySecureClientLoginConfiguration extends
 				javax.security.auth.login.Configuration {
 
@@ -165,26 +197,43 @@ public class StormClient {
 
 				Map<String, String> kerberosOptions = new HashMap<String, String>();
 				kerberosOptions.put("principal", this.userName);
-				kerberosOptions.put("debug", "false");
+				kerberosOptions.put("debug", "true");
 				kerberosOptions.put("useKeyTab", "false");
 				kerberosOptions.put(KrbPasswordSaverLoginModule.USERNAME_PARAM, this.userName);
 				kerberosOptions.put(KrbPasswordSaverLoginModule.PASSWORD_PARAM, this.password);
-				kerberosOptions.put("doNotPrompt", "true");
+				kerberosOptions.put("doNotPrompt", "false");
 				kerberosOptions.put("useFirstPass", "true");
-				kerberosOptions.put("tryFirstPass","false") ;
+				kerberosOptions.put("tryFirstPass", "false");
 				kerberosOptions.put("storeKey", "true");
 				kerberosOptions.put("refreshKrb5Config", "true");
 
+				AppConfigurationEntry KEYTAB_KERBEROS_LOGIN = null;
+				AppConfigurationEntry KERBEROS_PWD_SAVER = null;
+				try {
+					KEYTAB_KERBEROS_LOGIN = new AppConfigurationEntry(
+							KerberosUtil.getKrb5LoginModuleName(),
+							AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
+							kerberosOptions);
+					KERBEROS_PWD_SAVER = new AppConfigurationEntry(KrbPasswordSaverLoginModule.class.getName(), LoginModuleControlFlag.REQUIRED, kerberosOptions);
 
+				} catch (IllegalArgumentException e) {
+					String msgDesc = "executeUnderKerberos: Exception while getting Storm TopologyList.";
+					HadoopException hdpException = new HadoopException(msgDesc,
+							e);
+					LOG.error(msgDesc, e);
 
-				AppConfigurationEntry KEYTAB_KERBEROS_LOGIN = new AppConfigurationEntry(
-						KerberosUtil.getKrb5LoginModuleName(),
-						AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, kerberosOptions);
-				return new AppConfigurationEntry[] { KEYTAB_KERBEROS_LOGIN };
+					hdpException.generateResponseDataMap(false,
+							BaseClient.getMessage(e), msgDesc + errMsg, null,
+							null);
+					throw hdpException;
+				}
+                
+				LOG.info("getAppConfigurationEntry():" + kerberosOptions.get("principal"));
+				
+                return new AppConfigurationEntry[] { KERBEROS_PWD_SAVER, KEYTAB_KERBEROS_LOGIN };
 			}
 
-		}
-		;
+		};
 
 		T ret = null;
 
@@ -192,11 +241,16 @@ public class StormClient {
 		LoginContext loginContext = null;
 
 		try {
-			subject = new Subject();
+		    subject = new Subject();
+			LOG.info("executeUnderKerberos():user=" + userName + ",pass=" + password);
+			LOG.info("executeUnderKerberos():Creating config..");
 			MySecureClientLoginConfiguration loginConf = new MySecureClientLoginConfiguration(
 					userName, password);
+			LOG.info("executeUnderKerberos():Creating Context..");
 			loginContext = new LoginContext("hadoop-keytab-kerberos", subject,
 					null, loginConf);
+			
+			LOG.info("executeUnderKerberos():Logging in..");
 			loginContext.login();
 
 			Subject loginSubj = loginContext.getSubject();
@@ -205,7 +259,23 @@ public class StormClient {
 				ret = Subject.doAs(loginSubj, action);
 			}
 		} catch (LoginException le) {
-			throw new IOException("Login failure", le);
+			String msgDesc = "executeUnderKerberos: Login failure using given"
+					+ " configuration parameters, username : `" + userName + "`.";
+			HadoopException hdpException = new HadoopException(msgDesc, le);
+			LOG.error(msgDesc, le);
+
+			hdpException.generateResponseDataMap(false,
+					BaseClient.getMessage(le), msgDesc + errMsg, null, null);
+			throw hdpException;
+		} catch (SecurityException se) {
+			String msgDesc = "executeUnderKerberos: Exception while getting Storm TopologyList.";
+			HadoopException hdpException = new HadoopException(msgDesc, se);
+			LOG.error(msgDesc, se);
+
+			hdpException.generateResponseDataMap(false,
+					BaseClient.getMessage(se), msgDesc + errMsg, null, null);
+			throw hdpException;
+
 		} finally {
 			if (loginContext != null) {
 				if (subject != null) {
@@ -220,4 +290,99 @@ public class StormClient {
 
 		return ret;
 	}
+
+	public static HashMap<String, Object> testConnection(String dataSource,
+			HashMap<String, String> connectionProperties) {
+
+		List<String> strList = new ArrayList<String>();
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+		boolean connectivityStatus = false;
+		HashMap<String, Object> responseData = new HashMap<String, Object>();
+
+		StormClient stormClient = getStormClient(dataSource,
+				connectionProperties);
+		strList = getStormResources(stormClient, "");
+
+		if (strList != null && (strList.size() != 0)) {
+			connectivityStatus = true;
+		}
+
+		if (connectivityStatus) {
+			String successMsg = "TestConnection Successful";
+			BaseClient.generateResponseDataMap(connectivityStatus, successMsg,
+					successMsg, null, null, responseData);
+		} else {
+			String failureMsg = "Unable to retrive any topologies using given parameters.";
+			BaseClient.generateResponseDataMap(connectivityStatus, failureMsg,
+					failureMsg + errMsg, null, null, responseData);
+		}
+
+		return responseData;
+	}
+
+	public static StormClient getStormClient(String dataSourceName,
+			Map<String, String> configMap) {
+		StormClient stormClient = null;
+		LOG.debug("Getting StormClient for datasource: " + dataSourceName
+				+ "configMap: " + configMap);
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+		if (configMap == null || configMap.isEmpty()) {
+			String msgDesc = "Could not connect as Connection ConfigMap is empty.";
+			LOG.error(msgDesc);
+			HadoopException hdpException = new HadoopException(msgDesc);
+			hdpException.generateResponseDataMap(false, msgDesc, msgDesc
+					+ errMsg, null, null);
+			throw hdpException;
+		} else {
+			String stormUrl = configMap.get("nimbus.url");
+			String stormAdminUser = configMap.get("username");
+			String stormAdminPassword = configMap.get("password");
+			stormClient = new StormClient(stormUrl, stormAdminUser,
+					stormAdminPassword);
+		}
+		return stormClient;
+	}
+
+	public static List<String> getStormResources(final StormClient stormClient,
+			String topologyName) {
+
+		List<String> resultList = new ArrayList<String>();
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+
+		try {
+			if (stormClient == null) {
+				String msgDesc = "Unable to get Storm resources: StormClient is null.";
+				LOG.error(msgDesc);
+				HadoopException hdpException = new HadoopException(msgDesc);
+				hdpException.generateResponseDataMap(false, msgDesc, msgDesc
+						+ errMsg, null, null);
+				throw hdpException;
+			}
+
+			if (topologyName != null) {
+				String finalTopologyNameMatching = (topologyName == null) ? ""
+						: topologyName.trim();
+				resultList = stormClient
+						.getTopologyList(finalTopologyNameMatching);
+			}
+		} catch (HadoopException he) {
+			throw he;
+		} catch (Exception e) {
+			String msgDesc = "getStormResources: Unable to get Storm resources.";
+			LOG.error(msgDesc, e);
+			HadoopException hdpException = new HadoopException(msgDesc);
+
+			hdpException.generateResponseDataMap(false,
+					BaseClient.getMessage(e), msgDesc + errMsg, null, null);
+			throw hdpException;
+		}
+		return resultList;
+	}
+	
 }

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7b1b3e43/security-admin/src/main/java/com/xasecure/biz/AssetMgr.java
----------------------------------------------------------------------
diff --git a/security-admin/src/main/java/com/xasecure/biz/AssetMgr.java b/security-admin/src/main/java/com/xasecure/biz/AssetMgr.java
index 10d3d4d..125dd0a 100644
--- a/security-admin/src/main/java/com/xasecure/biz/AssetMgr.java
+++ b/security-admin/src/main/java/com/xasecure/biz/AssetMgr.java
@@ -1655,6 +1655,8 @@ public class AssetMgr extends AssetMgrBase {
 				// connectivityStatus = true;
 				// }
 				responseData = KnoxClient.testConnection(dataSource, configMap);
+			} else if (assetType == AppConstants.ASSET_STORM) {
+				responseData = StormClient.testConnection(dataSource, configMap);
 			} else {
 				throw restErrorUtil.createRESTException(
 						"Invalid repository type.",

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7b1b3e43/security-admin/src/main/webapp/scripts/views/asset/AssetCreate.js
----------------------------------------------------------------------
diff --git a/security-admin/src/main/webapp/scripts/views/asset/AssetCreate.js b/security-admin/src/main/webapp/scripts/views/asset/AssetCreate.js
index 9f0c5a9..9df4138 100644
--- a/security-admin/src/main/webapp/scripts/views/asset/AssetCreate.js
+++ b/security-admin/src/main/webapp/scripts/views/asset/AssetCreate.js
@@ -249,7 +249,10 @@ define(function(require){
 									}]
 									);
 								}else{
-									bootbox.alert(msResponse.msgDesc);	
+									if(!_.isEmpty(msResponse.msgDesc))
+										bootbox.alert(msResponse.msgDesc);
+									else
+										bootbox.alert("Connection Problem.");
 								}
 							}else{
 								bootbox.alert("Connection Problem.");

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/7b1b3e43/security-admin/src/main/webapp/templates/asset/AssetForm_tmpl.html
----------------------------------------------------------------------
diff --git a/security-admin/src/main/webapp/templates/asset/AssetForm_tmpl.html b/security-admin/src/main/webapp/templates/asset/AssetForm_tmpl.html
index 9b02a06..1606265 100644
--- a/security-admin/src/main/webapp/templates/asset/AssetForm_tmpl.html
+++ b/security-admin/src/main/webapp/templates/asset/AssetForm_tmpl.html
@@ -37,7 +37,7 @@
 		<!-- div class="" data-fields="pair"></div-->
 	</fieldset>
 	<div>
-		<div class="control-group field-username hdfs hive knox" style="display: block;">
+		<div class="control-group field-username hdfs hive knox storm" style="display: block;">
 			<label class="control-label" for="c12_username"></label>
 			<div class="controls">
 				<span data-editor="">