You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ranger.apache.org by sn...@apache.org on 2014/10/05 06:25:22 UTC

git commit: ARGUS-92: Added specific messages for Test Connection Failure

Repository: incubator-argus
Updated Branches:
  refs/heads/master 01fb970fe -> d95b51934


ARGUS-92: Added specific messages for Test Connection Failure

Signed-off-by: sneethiraj <sn...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/incubator-argus/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-argus/commit/d95b5193
Tree: http://git-wip-us.apache.org/repos/asf/incubator-argus/tree/d95b5193
Diff: http://git-wip-us.apache.org/repos/asf/incubator-argus/diff/d95b5193

Branch: refs/heads/master
Commit: d95b51934149cee33b84ec8a5c349c3a6ca47ffb
Parents: 01fb970
Author: vperiasamy <vp...@hortonworks.com>
Authored: Sat Oct 4 22:29:39 2014 -0400
Committer: sneethiraj <sn...@apache.org>
Committed: Sat Oct 4 23:25:05 2014 -0500

----------------------------------------------------------------------
 .../com/xasecure/hadoop/client/HadoopFS.java    |  69 ++++++-
 .../hadoop/client/config/BaseClient.java        |  52 ++++-
 .../client/exceptions/HadoopException.java      |  13 ++
 .../com/xasecure/hbase/client/HBaseClient.java  | 195 +++++++++++++++++--
 .../com/xasecure/hive/client/HiveClient.java    | 176 ++++++++++++++---
 .../com/xasecure/knox/client/KnoxClient.java    | 186 +++++++++++++++++-
 .../main/java/com/xasecure/biz/AssetMgr.java    | 135 +++++++++----
 .../webapp/scripts/views/asset/AssetCreate.js   |  32 ++-
 8 files changed, 760 insertions(+), 98 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/d95b5193/lookup-client/src/main/java/com/xasecure/hadoop/client/HadoopFS.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hadoop/client/HadoopFS.java b/lookup-client/src/main/java/com/xasecure/hadoop/client/HadoopFS.java
index 7b6e8b7..4ae07fc 100644
--- a/lookup-client/src/main/java/com/xasecure/hadoop/client/HadoopFS.java
+++ b/lookup-client/src/main/java/com/xasecure/hadoop/client/HadoopFS.java
@@ -19,7 +19,9 @@
 
  package com.xasecure.hadoop.client;
 
+import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.net.UnknownHostException;
 import java.security.PrivilegedAction;
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -49,10 +51,12 @@ public class HadoopFS extends BaseClient {
 		super(dataSource,connectionProperties) ;
 	}
 	
-	
 	private List<String> listFilesInternal(String baseDir, String fileMatching) {
 		List<String> fileList = new ArrayList<String>() ;
 		ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
 		try {
 			Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
 			String dirPrefix = (baseDir.endsWith("/") ? baseDir : (baseDir + "/")) ;
@@ -80,12 +84,42 @@ public class HadoopFS extends BaseClient {
 						}
 					}
 				}
+			} catch (UnknownHostException uhe) {
+				String msgDesc = "listFilesInternal: Unable to connect using given config parameters"
+						+ " of Hadoop environment [" + getDataSource() + "].";
+				HadoopException hdpException = new HadoopException(msgDesc, uhe);
+				hdpException.generateResponseDataMap(false, getMessage(uhe),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} catch (FileNotFoundException fne) {
+				String msgDesc = "listFilesInternal: Unable to locate files using given config parameters "
+						+ "of Hadoop environment [" + getDataSource() + "].";
+				HadoopException hdpException = new HadoopException(msgDesc, fne);
+				hdpException.generateResponseDataMap(false, getMessage(fne),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
 			}
 			finally {
 			}
-		}
-		catch(IOException ioe) {
-			throw new HadoopException("Unable to get listing of files for directory [" + baseDir + "] from Hadoop environment [" + getDataSource() + "]", ioe) ;
+		} catch (IOException ioe) {
+			String msgDesc = "listFilesInternal: Unable to get listing of files for directory "
+					+ baseDir
+					+ "] from Hadoop environment ["
+					+ getDataSource()
+					+ "].";
+			HadoopException hdpException = new HadoopException(msgDesc, ioe);
+			hdpException.generateResponseDataMap(false, getMessage(ioe),
+					msgDesc + errMsg, null, null);
+			throw hdpException;
+
+		} catch (IllegalArgumentException iae) {
+			String msgDesc = "Unable to get listing of files for directory ["
+					+ baseDir + "] from Hadoop environment [" + getDataSource()
+					+ "].";
+			HadoopException hdpException = new HadoopException(msgDesc, iae);
+			hdpException.generateResponseDataMap(false, getMessage(iae),
+					msgDesc + errMsg, null, null);
+			throw hdpException;
 		}
 		finally {
 			Thread.currentThread().setContextClassLoader(prevCl);
@@ -129,4 +163,31 @@ public class HadoopFS extends BaseClient {
 		}
 	}
 
+	public static HashMap<String, Object> testConnection(String dataSource,
+			HashMap<String, String> connectionProperties) {
+
+		HashMap<String, Object> responseData = new HashMap<String, Object>();
+		boolean connectivityStatus = false;
+		HadoopFS connectionObj = new HadoopFS(dataSource, connectionProperties);
+		if (connectionObj != null) {
+			List<String> testResult = connectionObj.listFiles("/", null);
+			if (testResult != null && testResult.size() != 0) {
+				connectivityStatus = true;
+			}
+		}
+		if (connectivityStatus) {
+			String successMsg = "TestConnection Successful";
+			generateResponseDataMap(connectivityStatus, successMsg, successMsg,
+					null, null, responseData);
+		} else {
+			String failureMsg = "Unable to retrive any files using given parameters, "
+					+ "You can still save the repository and start creating policies, "
+					+ "but you would not be able to use autocomplete for resource names. "
+					+ "Check xa_portal.log for more info.";
+			generateResponseDataMap(connectivityStatus, failureMsg, failureMsg,
+					null, null, responseData);
+		}
+		return responseData;
+	}
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/d95b5193/lookup-client/src/main/java/com/xasecure/hadoop/client/config/BaseClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hadoop/client/config/BaseClient.java b/lookup-client/src/main/java/com/xasecure/hadoop/client/config/BaseClient.java
index 44f343c..6d018df 100644
--- a/lookup-client/src/main/java/com/xasecure/hadoop/client/config/BaseClient.java
+++ b/lookup-client/src/main/java/com/xasecure/hadoop/client/config/BaseClient.java
@@ -66,11 +66,20 @@ public abstract class BaseClient {
 	
 	protected void login() {
 		ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
 		try {
 			Thread.currentThread().setContextClassLoader(configHolder.getClassLoader());
 			String userName = configHolder.getUserName() ;
 			if (userName == null) {
-				throw new HadoopException("Unable to find login username for hadoop environment, [" + dataSource + "]") ;
+				String msgDesc = "Unable to find login username for hadoop environment, ["
+						+ dataSource + "]";
+				HadoopException hdpException = new HadoopException(msgDesc);
+				hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg,
+						null, null);
+
+				throw hdpException;
 			}
 			String keyTabFile = configHolder.getKeyTabFile() ;
 			if (keyTabFile != null) {
@@ -94,11 +103,22 @@ public abstract class BaseClient {
 					loginSubject = SecureClientLogin.login(userName) ;
 				}
 			}
-		}
-		catch(IOException ioe) {
-			throw new HadoopException("Unable to login to Hadoop environment [" + dataSource + "]", ioe) ;
-		}
-		finally {
+		} catch (IOException ioe) {
+			String msgDesc = "Unable to login to Hadoop environment ["
+					+ dataSource + "]";
+
+			HadoopException hdpException = new HadoopException(msgDesc, ioe);
+			hdpException.generateResponseDataMap(false, getMessage(ioe),
+					msgDesc + errMsg, null, null);
+			throw hdpException;
+		} catch (SecurityException se) {
+			String msgDesc = "Unable to login to Hadoop environment ["
+					+ dataSource + "]";
+			HadoopException hdpException = new HadoopException(msgDesc, se);
+			hdpException.generateResponseDataMap(false, getMessage(se),
+					msgDesc + errMsg, null, null);
+			throw hdpException;
+		} finally {
 			Thread.currentThread().setContextClassLoader(prevCl);
 		}
 	}
@@ -115,6 +135,24 @@ public abstract class BaseClient {
 		return configHolder;
 	}
 	
-	
+	public static void generateResponseDataMap(boolean connectivityStatus,
+			String message, String description, Long objectId,
+			String fieldName, HashMap<String, Object> responseData) {
+		responseData.put("connectivityStatus", connectivityStatus);
+		responseData.put("message", message);
+		responseData.put("description", description);
+		responseData.put("objectId", objectId);
+		responseData.put("fieldName", fieldName);
+	}
+
+	public static String getMessage(Throwable excp) {
+		StringBuilder sb = new StringBuilder();
+		while (excp != null) {
+			sb.append(excp.getMessage()).append("\n");
+			excp = excp.getCause();
+		}
 
+		return sb.toString();
+	}
+	
 }

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/d95b5193/lookup-client/src/main/java/com/xasecure/hadoop/client/exceptions/HadoopException.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hadoop/client/exceptions/HadoopException.java b/lookup-client/src/main/java/com/xasecure/hadoop/client/exceptions/HadoopException.java
index 9707fdd..3cb838e 100644
--- a/lookup-client/src/main/java/com/xasecure/hadoop/client/exceptions/HadoopException.java
+++ b/lookup-client/src/main/java/com/xasecure/hadoop/client/exceptions/HadoopException.java
@@ -19,9 +19,13 @@
 
  package com.xasecure.hadoop.client.exceptions;
 
+import java.util.HashMap;
+
 public class HadoopException extends RuntimeException {
 
 	private static final long serialVersionUID = 8872734935128535649L;
+	
+	public HashMap<String, Object> responseData;
 
 	public HadoopException() {
 		super();
@@ -43,5 +47,14 @@ public class HadoopException extends RuntimeException {
 		// TODO Auto-generated constructor stub
 	}
 
+	public void generateResponseDataMap(boolean connectivityStatus,
+			String message, String description, Long objectId, String fieldName) {
+		responseData = new HashMap<String, Object>();
+		responseData.put("connectivityStatus", connectivityStatus);
+		responseData.put("message", message);
+		responseData.put("description", description);
+		responseData.put("objectId", objectId);
+		responseData.put("fieldName", fieldName);
+	}
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/d95b5193/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClient.java b/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClient.java
index dcb80d7..292e53c 100644
--- a/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClient.java
+++ b/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClient.java
@@ -35,10 +35,13 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.ZooKeeperConnectionException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.security.SecureClientLogin;
 
+import com.google.protobuf.ServiceException;
 import com.xasecure.hadoop.client.config.BaseClient;
+import com.xasecure.hadoop.client.exceptions.HadoopException;
 
 public class HBaseClient extends BaseClient {
 
@@ -75,9 +78,39 @@ public class HBaseClient extends BaseClient {
 		return connectionProp;
 	}
 	
+	public static HashMap<String, Object> testConnection(String dataSource,
+			HashMap<String, String> connectionProperties) {
+
+		HashMap<String, Object> responseData = new HashMap<String, Object>();
+		final String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+		boolean connectivityStatus = false;
+
+		HBaseClient connectionObj = new HBaseClient(dataSource,
+				connectionProperties);
+		if (connectionObj != null) {
+			connectivityStatus = connectionObj.getHBaseStatus();
+		}
+
+		if (connectivityStatus) {
+			String successMsg = "TestConnection Successful";
+			generateResponseDataMap(connectivityStatus, successMsg, successMsg,
+					null, null, responseData);
+		} else {
+			String failureMsg = "Unable to retrive any databases using given parameters.";
+			generateResponseDataMap(connectivityStatus, failureMsg, failureMsg
+					+ errMsg, null, null, responseData);
+		}
+		return responseData;
+	}
+	
 	public boolean getHBaseStatus() {
 		boolean hbaseStatus = false;
 		subj = getLoginSubject();
+		final String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
 		if (subj != null) {
 			ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
 			try {
@@ -96,13 +129,61 @@ public class HBaseClient extends BaseClient {
 							HBaseAdmin.checkHBaseAvailable(conf);					
 						    LOG.info("getHBaseStatus: no exception: HbaseAvailability true");
 							hbaseStatus1 = true;
-						} catch (Throwable e) {
-							LOG.error("getHBaseStatus: Unable to check availability of Hbase environment [" + getConfigHolder().getDatasourceName() + "]", e);
+						} catch (ZooKeeperConnectionException zce) {
+							String msgDesc = "getHBaseStatus: Unable to connect to `ZooKeeper` "
+									+ "using given config parameters.";
+							HadoopException hdpException = new HadoopException(msgDesc, zce);
+							hdpException.generateResponseDataMap(false, getMessage(zce),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
+							
+						} catch (MasterNotRunningException mnre) {
+							String msgDesc = "getHBaseStatus: Looks like `Master` is not running, "
+									+ "so couldn't check that running HBase is available or not, "
+									+ "Please try again later.";
+							HadoopException hdpException = new HadoopException(
+									msgDesc, mnre);
+							hdpException.generateResponseDataMap(false,
+									getMessage(mnre), msgDesc + errMsg,
+									null, null);
+							throw hdpException;
+
+						} catch (ServiceException se) {
+							String msgDesc = "getHBaseStatus: Unable to check availability of "
+									+ "Hbase environment [" + getConfigHolder().getDatasourceName() + "].";
+							HadoopException hdpException = new HadoopException(msgDesc, se);
+							hdpException.generateResponseDataMap(false, getMessage(se),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
+							
+						} catch(IOException io) {
+							String msgDesc = "getHBaseStatus: Unable to check availability of"
+									+ " Hbase environment [" + getConfigHolder().getDatasourceName() + "].";
+							HadoopException hdpException = new HadoopException(msgDesc, io);
+							hdpException.generateResponseDataMap(false, getMessage(io),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
+							
+						}  catch (Throwable e) {
+							String msgDesc = "getHBaseStatus: Unable to check availability of"
+									+ " Hbase environment [" + getConfigHolder().getDatasourceName() + "].";
+							LOG.error(msgDesc);
 							hbaseStatus1 = false;
+							HadoopException hdpException = new HadoopException(msgDesc, e);
+							hdpException.generateResponseDataMap(false, getMessage(e),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
 						}
 						return hbaseStatus1;
 					}
 				}) ;
+			} catch (SecurityException se) {
+				String msgDesc = "getHBaseStatus: Unable to connect to HBase Server instance, "
+						+ "current thread might not be able set the context ClassLoader.";
+				HadoopException hdpException = new HadoopException(msgDesc, se);
+				hdpException.generateResponseDataMap(false, getMessage(se),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
 			} finally {
 				Thread.currentThread().setContextClassLoader(prevCl);
 			}
@@ -127,6 +208,9 @@ public class HBaseClient extends BaseClient {
 
 	public List<String> getTableList(final String tableNameMatching) {
 		List<String> ret = null ;
+		final String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
 		
 		if (subj != null) {
 			ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
@@ -147,9 +231,42 @@ public class HBaseClient extends BaseClient {
 							for (HTableDescriptor htd : admin.listTables(tableNameMatching)) {
 								tableList.add(htd.getNameAsString()) ;
 							}
-						}
-						catch(Throwable t) {
-							LOG.error("Unable to get HBase table List for [repository:" + getConfigHolder().getDatasourceName() + ",table-match:" + tableNameMatching + "]", t);
+						} catch (ZooKeeperConnectionException zce) {
+							String msgDesc = "getTableList: Unable to connect to `ZooKeeper` "
+									+ "using given config parameters.";
+							HadoopException hdpException = new HadoopException(msgDesc, zce);
+							hdpException.generateResponseDataMap(false, getMessage(zce),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
+							
+						} catch (MasterNotRunningException mnre) {
+							String msgDesc = "getTableList: Looks like `Master` is not running, "
+									+ "so couldn't check that running HBase is available or not, "
+									+ "Please try again later.";
+							HadoopException hdpException = new HadoopException(
+									msgDesc, mnre);
+							hdpException.generateResponseDataMap(false,
+									getMessage(mnre), msgDesc + errMsg,
+									null, null);
+							throw hdpException;
+
+						}  catch(IOException io) {
+							String msgDesc = "Unable to get HBase table List for [repository:"
+									+ getConfigHolder().getDatasourceName() + ",table-match:" 
+									+ tableNameMatching + "].";
+							HadoopException hdpException = new HadoopException(msgDesc, io);
+							hdpException.generateResponseDataMap(false, getMessage(io),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
+						}   catch (Throwable e) {
+							String msgDesc = "Unable to get HBase table List for [repository:"
+									+ getConfigHolder().getDatasourceName() + ",table-match:" 
+									+ tableNameMatching + "].";
+							LOG.error(msgDesc);
+							HadoopException hdpException = new HadoopException(msgDesc, e);
+							hdpException.generateResponseDataMap(false, getMessage(e),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
 						}
 						finally {
 							if (admin != null) {
@@ -174,7 +291,10 @@ public class HBaseClient extends BaseClient {
 	
 	
 	public List<String> getColumnFamilyList(final String tableName, final String columnFamilyMatching) {
-		List<String> ret = null ;		
+		List<String> ret = null ;
+		final String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
 		if (subj != null) {
 			ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
 			try {
@@ -201,9 +321,54 @@ public class HBaseClient extends BaseClient {
 									}
 								}
 							}
-						}
-						catch(Throwable t) {
-							LOG.error("Unable to get HBase table List for [repository:" + getConfigHolder().getDatasourceName() + ",table:" + tableName + ", table-match:" + columnFamilyMatching + "]", t);
+						}  catch (ZooKeeperConnectionException zce) {
+							String msgDesc = "getColumnFamilyList: Unable to connect to `ZooKeeper` "
+									+ "using given config parameters.";
+							HadoopException hdpException = new HadoopException(msgDesc, zce);
+							hdpException.generateResponseDataMap(false, getMessage(zce),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
+							
+						} catch (MasterNotRunningException mnre) {
+							String msgDesc = "getColumnFamilyList: Looks like `Master` is not running, "
+									+ "so couldn't check that running HBase is available or not, "
+									+ "Please try again later.";
+							HadoopException hdpException = new HadoopException(
+									msgDesc, mnre);
+							hdpException.generateResponseDataMap(false,
+									getMessage(mnre), msgDesc + errMsg,
+									null, null);
+							throw hdpException;
+
+						}  catch(IOException io) {
+							String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for "
+									+ "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tableName
+									+ ", table-match:" + columnFamilyMatching + "], "
+									+ "current thread might not be able set the context ClassLoader.";
+							HadoopException hdpException = new HadoopException(msgDesc, io);
+							hdpException.generateResponseDataMap(false, getMessage(io),
+									msgDesc + errMsg, null, null);
+							throw hdpException; 
+						} catch (SecurityException se) {
+								String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for "
+										+ "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tableName
+										+ ", table-match:" + columnFamilyMatching + "], "
+										+ "current thread might not be able set the context ClassLoader.";
+								HadoopException hdpException = new HadoopException(msgDesc, se);
+								hdpException.generateResponseDataMap(false, getMessage(se),
+										msgDesc + errMsg, null, null);
+								throw hdpException;							
+							
+						}  catch (Throwable e) {
+							String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for "
+									+ "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tableName
+									+ ", table-match:" + columnFamilyMatching + "], "
+									+ "current thread might not be able set the context ClassLoader.";
+							LOG.error(msgDesc);
+							HadoopException hdpException = new HadoopException(msgDesc, e);
+							hdpException.generateResponseDataMap(false, getMessage(e),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
 						}
 						finally {
 							if (admin != null) {
@@ -218,8 +383,14 @@ public class HBaseClient extends BaseClient {
 					}
 					
 				}) ;
-			}
-			finally {
+			} catch (SecurityException se) {
+				String msgDesc = "getTableList: Unable to connect to HBase Server instance, "
+						+ "current thread might not be able set the context ClassLoader.";
+				HadoopException hdpException = new HadoopException(msgDesc, se);
+				hdpException.generateResponseDataMap(false, getMessage(se),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} finally {
 				Thread.currentThread().setContextClassLoader(prevCl);
 			}
 		}

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/d95b5193/lookup-client/src/main/java/com/xasecure/hive/client/HiveClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hive/client/HiveClient.java b/lookup-client/src/main/java/com/xasecure/hive/client/HiveClient.java
index 8b697ad..bfbf0f9 100644
--- a/lookup-client/src/main/java/com/xasecure/hive/client/HiveClient.java
+++ b/lookup-client/src/main/java/com/xasecure/hive/client/HiveClient.java
@@ -26,6 +26,7 @@ import java.sql.Driver;
 import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.SQLException;
+import java.sql.SQLTimeoutException;
 import java.sql.Statement;
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -53,7 +54,7 @@ public class HiveClient extends BaseClient implements Closeable {
 		super(dataSource) ;
 		initHive() ;
 	}
-
+	
 	public HiveClient(String dataSource,HashMap<String,String> connectionProp) {
 		super(dataSource,connectionProp) ;
 		initHive() ;
@@ -81,6 +82,9 @@ public class HiveClient extends BaseClient implements Closeable {
 	
 	public List<String> getDatabaseList(String databaseMatching) {
 		List<String> ret = new ArrayList<String>() ;
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
 		if (con != null) {
 			Statement stat =  null ;
 			ResultSet rs = null ;
@@ -94,11 +98,22 @@ public class HiveClient extends BaseClient implements Closeable {
 				while (rs.next()) {
 					ret.add(rs.getString(1)) ;
 				}
- 			}
-			catch(SQLException sqle) {
-				throw new HadoopException("Unable to execute SQL [" + sql + "]", sqle);
-			}
-			finally {
+			} catch (SQLTimeoutException sqlt) {
+				String msgDesc = "Time Out, Unable to execute SQL [" + sql
+						+ "].";
+				HadoopException hdpException = new HadoopException(msgDesc,
+						sqlt);
+				hdpException.generateResponseDataMap(false, getMessage(sqlt),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} catch (SQLException sqle) {
+				String msgDesc = "Unable to execute SQL [" + sql + "].";
+				HadoopException hdpException = new HadoopException(msgDesc,
+						sqle);
+				hdpException.generateResponseDataMap(false, getMessage(sqle),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} finally {
 				close(rs) ;
 				close(stat) ;
 			}
@@ -109,6 +124,9 @@ public class HiveClient extends BaseClient implements Closeable {
 
 	public List<String> getTableList(String database, String tableNameMatching) {
 		List<String> ret = new ArrayList<String>() ;
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
 		if (con != null) {
 			Statement stat =  null ;
 			ResultSet rs = null ;
@@ -135,11 +153,22 @@ public class HiveClient extends BaseClient implements Closeable {
 				while (rs.next()) {
 					ret.add(rs.getString(1)) ;
 				}
- 			}
-			catch(SQLException sqle) {
-				throw new HadoopException("Unable to execute SQL [" + sql + "]", sqle);
-			}
-			finally {
+			} catch (SQLTimeoutException sqlt) {
+				String msgDesc = "Time Out, Unable to execute SQL [" + sql
+						+ "].";
+				HadoopException hdpException = new HadoopException(msgDesc,
+						sqlt);
+				hdpException.generateResponseDataMap(false, getMessage(sqlt),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} catch (SQLException sqle) {
+				String msgDesc = "Unable to execute SQL [" + sql + "].";
+				HadoopException hdpException = new HadoopException(msgDesc,
+						sqle);
+				hdpException.generateResponseDataMap(false, getMessage(sqle),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} finally {
 				close(rs) ;
 				close(stat) ;
 			}
@@ -160,6 +189,9 @@ public class HiveClient extends BaseClient implements Closeable {
 
 	public List<String> getColumnList(String database, String tableName, String columnNameMatching) {
 		List<String> ret = new ArrayList<String>() ;
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
 		if (con != null) {
 			
 			String columnNameMatchingRegEx = null ;
@@ -196,11 +228,22 @@ public class HiveClient extends BaseClient implements Closeable {
 						ret.add(columnName) ;
 					}
 				}
- 			}
-			catch(SQLException sqle) {
-				throw new HadoopException("Unable to execute SQL [" + sql + "]", sqle);
-			}
-			finally {
+			} catch (SQLTimeoutException sqlt) {
+				String msgDesc = "Time Out, Unable to execute SQL [" + sql
+						+ "].";
+				HadoopException hdpException = new HadoopException(msgDesc,
+						sqlt);
+				hdpException.generateResponseDataMap(false, getMessage(sqlt),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} catch (SQLException sqle) {
+				String msgDesc = "Unable to execute SQL [" + sql + "].";
+				HadoopException hdpException = new HadoopException(msgDesc,
+						sqle);
+				hdpException.generateResponseDataMap(false, getMessage(sqle),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} finally {
 				close(rs) ;
 				close(stat) ;
 			}
@@ -254,30 +297,85 @@ public class HiveClient extends BaseClient implements Closeable {
 		Properties prop = getConfigHolder().getXASecureSection() ;
 		String driverClassName = prop.getProperty("jdbc.driverClassName") ;
 		String url =  prop.getProperty("jdbc.url") ;	
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
 	
 		if (driverClassName != null) {
 			try {
 				Driver driver = (Driver)Class.forName(driverClassName).newInstance() ;
 				DriverManager.registerDriver(driver);
+			} catch (SQLException e) {
+				String msgDesc = "initConnection: Caught SQLException while registering "
+						+ "Hive driver, so Unable to connect to Hive Thrift Server instance.";
+				HadoopException hdpException = new HadoopException(msgDesc, e);
+				hdpException.generateResponseDataMap(false, getMessage(e),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} catch (IllegalAccessException ilae) {
+				String msgDesc = "initConnection: Class or its nullary constructor might not accessible."
+						+ "So unable to initiate connection to hive thrift server instance.";
+				HadoopException hdpException = new HadoopException(msgDesc, ilae);
+				hdpException.generateResponseDataMap(false, getMessage(ilae),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} catch (InstantiationException ie) {
+				String msgDesc = "initConnection: Class may not have its nullary constructor or "
+						+ "may be the instantiation fails for some other reason."
+						+ "So unable to initiate connection to hive thrift server instance.";
+				HadoopException hdpException = new HadoopException(msgDesc, ie);
+				hdpException.generateResponseDataMap(false, getMessage(ie),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+				
+			} catch (ExceptionInInitializerError eie) {
+				String msgDesc = "initConnection: Got ExceptionInInitializerError, "
+						+ "The initialization provoked by this method fails."
+						+ "So unable to initiate connection to hive thrift server instance.";
+				HadoopException hdpException = new HadoopException(msgDesc, eie);
+				hdpException.generateResponseDataMap(false, getMessage(eie),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} catch (SecurityException se) {
+				String msgDesc = "initConnection: unable to initiate connection to hive thrift server instance,"
+						+ " The caller's class loader is not the same as or an ancestor "
+						+ "of the class loader for the current class and invocation of "
+						+ "s.checkPackageAccess() denies access to the package of this class.";
+				HadoopException hdpException = new HadoopException(msgDesc, se);
+				hdpException.generateResponseDataMap(false, getMessage(se),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
 			} catch (Throwable t) {
-				throw new HadoopException("Unable to connect to Hive Thrift Server instance", t) ;
+				String msgDesc = "initConnection: Unable to connect to Hive Thrift Server instance, "
+						+ "please provide valid value of field : {jdbc.driverClassName}.";
+				HadoopException hdpException = new HadoopException(msgDesc, t);
+				hdpException.generateResponseDataMap(false, getMessage(t),
+						msgDesc + errMsg, null, "jdbc.driverClassName");
+				throw hdpException;
 			}
 		}
 		
-	
 		try {
 			
 			if (userName == null && password == null) {
 				con = DriverManager.getConnection(url) ;
 			}
-			else {
-				
+			else {			
 				con = DriverManager.getConnection(url, userName, password) ;
-			
 			}
 		
 		} catch (SQLException e) {
-			throw new HadoopException("Unable to connect to Hive Thrift Server instance", e) ;
+			String msgDesc = "Unable to connect to Hive Thrift Server instance.";
+			HadoopException hdpException = new HadoopException(msgDesc, e);
+			hdpException.generateResponseDataMap(false, getMessage(e), msgDesc
+					+ errMsg, null, null);
+			throw hdpException;
+		} catch (SecurityException se) {
+			String msgDesc = "Unable to connect to Hive Thrift Server instance.";
+			HadoopException hdpException = new HadoopException(msgDesc, se);
+			hdpException.generateResponseDataMap(false, getMessage(se), msgDesc
+					+ errMsg, null, null);
+			throw hdpException;
 		}
 	}
 
@@ -335,5 +433,37 @@ public class HiveClient extends BaseClient implements Closeable {
 				hc.close();
 			}
 		}	
-	}	
+	}
+
+	public static HashMap<String, Object> testConnection(String dataSource,
+			HashMap<String, String> connectionProperties) {
+
+		HashMap<String, Object> responseData = new HashMap<String, Object>();
+		boolean connectivityStatus = false;
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+
+		HiveClient connectionObj = new HiveClient(dataSource,
+				connectionProperties);
+		if (connectionObj != null) {
+			List<String> testResult = connectionObj.getDatabaseList("*");
+			if (testResult != null && testResult.size() != 0) {
+				connectivityStatus = true;
+			}
+		}
+		if (connectivityStatus) {
+			String successMsg = "TestConnection Successful";
+			generateResponseDataMap(connectivityStatus, successMsg, successMsg,
+					null, null, responseData);
+		} else {
+			String failureMsg = "Unable to retrive any databases using given parameters.";
+			generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + errMsg,
+					null, null, responseData);
+		}
+		
+		connectionObj.close();
+		return responseData;
+	}
+	
 }

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/d95b5193/lookup-client/src/main/java/com/xasecure/knox/client/KnoxClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/knox/client/KnoxClient.java b/lookup-client/src/main/java/com/xasecure/knox/client/KnoxClient.java
index 4d35914..d4aa734 100644
--- a/lookup-client/src/main/java/com/xasecure/knox/client/KnoxClient.java
+++ b/lookup-client/src/main/java/com/xasecure/knox/client/KnoxClient.java
@@ -19,8 +19,12 @@
 package com.xasecure.knox.client;
 
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -31,6 +35,8 @@ import com.sun.jersey.api.client.Client;
 import com.sun.jersey.api.client.ClientResponse;
 import com.sun.jersey.api.client.WebResource;
 import com.sun.jersey.api.client.filter.HTTPBasicAuthFilter;
+import com.xasecure.hadoop.client.config.BaseClient;
+import com.xasecure.hadoop.client.exceptions.HadoopException;
 
 public class KnoxClient {
 
@@ -61,6 +67,9 @@ public class KnoxClient {
 		LOG.debug("Getting Knox topology list for topologyNameMatching : " +
 				topologyNameMatching);
 		List<String> topologyList = new ArrayList<String>();
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
 		if ( topologyNameMatching == null ||  topologyNameMatching.trim().isEmpty()) {
 			topologyNameMatching = "";
 		}
@@ -104,8 +113,16 @@ public class KnoxClient {
 					}
 
 				} else {
-					LOG.error("Unable to get a valid response for isFileChanged()  call for ["
-							+ knoxUrl + "] - got null response.");
+					// LOG.error("Unable to get a valid response for isFileChanged()  call for ["
+					// + knoxUrl + "] - got null response.");
+					String msgDesc = "Unable to get a valid response for "
+							+ "isFileChanged() call for KnoxUrl : [" + knoxUrl
+							+ "] - got null response.";
+					LOG.error(msgDesc);
+					HadoopException hdpException = new HadoopException(msgDesc);
+					hdpException.generateResponseDataMap(false, msgDesc,
+							msgDesc + errMsg, null, null);
+					throw hdpException;
 				}
 
 			} finally {
@@ -117,8 +134,18 @@ public class KnoxClient {
 				}
 			}
 		} catch (Throwable t) {
-			LOG.error("Exception on REST call to: " + knoxUrl, t);
-			t.printStackTrace();
+			// LOG.error("Exception on REST call to: " + knoxUrl, t);
+			// t.printStackTrace();
+
+			String msgDesc = "Exception on REST call to KnoxUrl : " + knoxUrl + ".";
+			HadoopException hdpException = new HadoopException(msgDesc, t);
+			LOG.error(msgDesc, t);
+
+			if (!(t instanceof HadoopException)) {
+				hdpException.generateResponseDataMap(false,
+						BaseClient.getMessage(t), msgDesc + errMsg, null, null);
+			}
+			throw hdpException;
 		} finally {
 		}
 		return topologyList;
@@ -130,6 +157,9 @@ public class KnoxClient {
 		// sample URI: .../admin/api/v1/topologies/<topologyName>
 		
 		List<String> serviceList = new ArrayList<String>();
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
 		if ( serviceNameMatching == null ||  serviceNameMatching.trim().isEmpty()) {
 			serviceNameMatching = "";
 		}
@@ -173,8 +203,16 @@ public class KnoxClient {
 					}
 
 				} else {
-					LOG.error("Unable to get a valid response for isFileChanged()  call for ["
-							+ knoxUrl + "] - got null response.");
+					// LOG.error("Unable to get a valid response for isFileChanged()  call for ["
+					// + knoxUrl + "] - got null response.");
+					String msgDesc = "Unable to get a valid response for "
+							+ "isFileChanged() call for KnoxUrl : [" + knoxUrl
+							+ "] - got null response.";
+					LOG.error(msgDesc);
+					HadoopException hdpException = new HadoopException(msgDesc);
+					hdpException.generateResponseDataMap(false, msgDesc,
+							msgDesc + errMsg, null, null);
+					throw hdpException;
 				}
 
 			} finally {
@@ -186,8 +224,19 @@ public class KnoxClient {
 				}
 			}
 		} catch (Throwable t) {
-			LOG.error("Exception on REST call to: " + knoxUrl, t);
-			t.printStackTrace();
+			// LOG.error("Exception on REST call to: " + knoxUrl, t);
+			// t.printStackTrace();
+
+			String msgDesc = "Exception on REST call to KnoxUrl : " + knoxUrl + ".";
+			HadoopException hdpException = new HadoopException(msgDesc, t);
+			LOG.error(msgDesc, t);
+
+			if (!(t instanceof HadoopException)) {
+				hdpException.generateResponseDataMap(false,
+						BaseClient.getMessage(t), msgDesc + errMsg, null, null);
+			}
+			throw hdpException;
+
 		} finally {
 		}
 		return serviceList;
@@ -227,4 +276,125 @@ public class KnoxClient {
 		}
 	}
 	
+	public static HashMap<String, Object> testConnection(String dataSource,
+			HashMap<String, String> connectionProperties) {
+
+		List<String> strList = new ArrayList<String>();
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+		boolean connectivityStatus = false;
+		HashMap<String, Object> responseData = new HashMap<String, Object>();
+
+		KnoxClient knoxClient = getKnoxClient(dataSource, connectionProperties);
+		strList = getKnoxResources(knoxClient, "", null);
+
+		if (strList != null && (strList.size() != 0)) {
+			connectivityStatus = true;
+		}
+		
+		if (connectivityStatus) {
+			String successMsg = "TestConnection Successful";
+			BaseClient.generateResponseDataMap(connectivityStatus, successMsg, successMsg,
+					null, null, responseData);
+		} else {
+			String failureMsg = "Unable to retrive any topologies/services using given parameters.";
+			BaseClient.generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + errMsg,
+					null, null, responseData);
+		}
+		
+		return responseData;
+	}
+
+	public static KnoxClient getKnoxClient(String dataSourceName,
+			Map<String, String> configMap) {
+		KnoxClient knoxClient = null;
+		LOG.debug("Getting knoxClient for datasource: " + dataSourceName
+				+ "configMap: " + configMap);
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+		if (configMap == null || configMap.isEmpty()) {
+			String msgDesc = "Could not connect as Connection ConfigMap is empty.";
+			LOG.error(msgDesc);
+			HadoopException hdpException = new HadoopException(msgDesc);
+			hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null,
+					null);
+			throw hdpException;
+		} else {
+			String knoxUrl = configMap.get("knox.url");
+			String knoxAdminUser = configMap.get("username");
+			String knoxAdminPassword = configMap.get("password");
+			knoxClient = new KnoxClient(knoxUrl, knoxAdminUser,
+					knoxAdminPassword);
+		}
+		return knoxClient;
+	}
+
+	public static List<String> getKnoxResources(final KnoxClient knoxClient,
+			String topologyName, String serviceName) {
+
+		List<String> resultList = new ArrayList<String>();
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+
+		try {
+			if (knoxClient == null) {
+				// LOG.error("Unable to get knox resources: knoxClient is null");
+				// return new ArrayList<String>();
+				String msgDesc = "Unable to get knox resources: knoxClient is null.";
+				LOG.error(msgDesc);
+				HadoopException hdpException = new HadoopException(msgDesc);
+				hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg,
+						null, null);
+				throw hdpException;
+			}
+
+			final Callable<List<String>> callableObj;
+			if (serviceName != null) {
+				final String finalServiceNameMatching = serviceName.trim();
+				final String finalTopologyName = topologyName;
+				callableObj = new Callable<List<String>>() {
+					@Override
+					public List<String> call() {
+						return knoxClient.getServiceList(finalTopologyName,
+								finalServiceNameMatching);
+					}
+				};
+
+			} else {
+				final String finalTopologyNameMatching = (topologyName == null) ? ""
+						: topologyName.trim();
+				callableObj = new Callable<List<String>>() {
+					@Override
+					public List<String> call() {
+						return knoxClient
+								.getTopologyList(finalTopologyNameMatching);
+					}
+				};
+			}
+			resultList = timedTask(callableObj, 5, TimeUnit.SECONDS);
+
+		} catch (Exception e) {
+			String msgDesc = "Unable to get knox resources.";
+			LOG.error(msgDesc, e);
+			HadoopException hdpException = new HadoopException(msgDesc);
+
+			if (!(e instanceof HadoopException)) {
+				hdpException.generateResponseDataMap(false,
+						BaseClient.getMessage(e), msgDesc + errMsg, null, null);
+			}
+
+			throw hdpException;
+		}
+
+		return resultList;
+	}
+
+	public static <T> T timedTask(Callable<T> callableObj, long timeout,
+			TimeUnit timeUnit) throws Exception {
+		return callableObj.call();
+	}
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/d95b5193/security-admin/src/main/java/com/xasecure/biz/AssetMgr.java
----------------------------------------------------------------------
diff --git a/security-admin/src/main/java/com/xasecure/biz/AssetMgr.java b/security-admin/src/main/java/com/xasecure/biz/AssetMgr.java
index 55fc3e3..cbf7017 100644
--- a/security-admin/src/main/java/com/xasecure/biz/AssetMgr.java
+++ b/security-admin/src/main/java/com/xasecure/biz/AssetMgr.java
@@ -21,6 +21,7 @@
 
 import java.io.File;
 import java.io.IOException;
+import java.net.UnknownHostException;
 import java.security.cert.X509Certificate;
 import java.sql.Timestamp;
 import java.util.ArrayList;
@@ -78,6 +79,7 @@ import com.xasecure.entity.XXResource;
 import com.xasecure.entity.XXTrxLog;
 import com.xasecure.entity.XXUser;
 import com.xasecure.hadoop.client.HadoopFS;
+import com.xasecure.hadoop.client.exceptions.HadoopException;
 import com.xasecure.hbase.client.HBaseClient;
 import com.xasecure.hive.client.HiveClient;
 import com.xasecure.knox.client.KnoxClient;
@@ -95,6 +97,7 @@ import com.xasecure.view.VXAsset;
 import com.xasecure.view.VXAuditMap;
 import com.xasecure.view.VXAuditMapList;
 import com.xasecure.view.VXLong;
+import com.xasecure.view.VXMessage;
 import com.xasecure.view.VXPermMap;
 import com.xasecure.view.VXPermMapList;
 import com.xasecure.view.VXPolicy;
@@ -1569,8 +1572,10 @@ public class AssetMgr extends AssetMgrBase {
 		}
 		
 		int assetType = vXAsset.getAssetType();
+		
 		VXResponse testResponse = new VXResponse();
-		boolean connectivityStatus = false;
+		HashMap<String, Object> responseData = new HashMap<String, Object>();
+		
 		HashMap<String, String> configMap = (HashMap<String, String>) jsonUtil
 				.jsonToMap(vXAsset.getConfig());
 		String password = configMap.get("password");
@@ -1597,60 +1602,106 @@ public class AssetMgr extends AssetMgrBase {
 		}
 
 		try {
+			String dataSource = vXAsset.getName();
 			if (assetType == AppConstants.ASSET_HDFS) {
-				HadoopFS connectionObj = new HadoopFS(vXAsset.getName(),
-						configMap);
-				if (connectionObj != null) {
-					List<String> testResult = connectionObj
-							.listFiles("/", null);
-					if (testResult != null && testResult.size() != 0) {
-						connectivityStatus = true;
-					}
-				}
+				// HadoopFS connectionObj = new HadoopFS(vXAsset.getName(),
+				// configMap);
+				// if (connectionObj != null) {
+				// List<String> testResult = connectionObj
+				// .listFiles("/", null);
+				// if (testResult != null && testResult.size() != 0) {
+				// connectivityStatus = true;
+				// }
+				// }
+				responseData = HadoopFS.testConnection(dataSource, configMap);
 			} else if (assetType == AppConstants.ASSET_HIVE) {
-				HiveClient connectionObj = new HiveClient(vXAsset.getName(),
-						configMap);
-				if (connectionObj != null) {
-					List<String> testResult = connectionObj
-							.getDatabaseList("*");
-					if (testResult != null && testResult.size() != 0) {
-						connectivityStatus = true;
-					}
-				}
-				connectionObj.close();
+				// HiveClient connectionObj = new HiveClient(vXAsset.getName(),
+				// configMap);
+				// if (connectionObj != null) {
+				// List<String> testResult = connectionObj
+				// .getDatabaseList("*");
+				// if (testResult != null && testResult.size() != 0) {
+				// connectivityStatus = true;
+				// }
+				// }
+				// connectionObj.close();
+				responseData = HiveClient.testConnection(dataSource, configMap);
 			} else if (assetType == AppConstants.ASSET_HBASE) {
-				HBaseClient connectionObj = new HBaseClient(vXAsset.getName(),
-						configMap);
-				if (connectionObj != null) {
-					connectivityStatus = connectionObj.getHBaseStatus();
-				} else {
-					Log.error("testConfig: Not able to create HBaseClient");
-				}
-			} else if (assetType == AppConstants.ASSET_KNOX) { 
-				KnoxClient knoxClient = assetConnectionMgr.getKnoxClient(
-						vXAsset.getName(), configMap);
-				VXStringList vxStringList = getKnoxResources(knoxClient, "", null);
-				if (vxStringList != null && (vxStringList.getListSize() != 0)) {
-					connectivityStatus = true;
-				}
+				// HBaseClient connectionObj = new
+				// HBaseClient(vXAsset.getName(),
+				// configMap);
+				// if (connectionObj != null) {
+				// connectivityStatus = connectionObj.getHBaseStatus();
+				// } else {
+				// Log.error("testConfig: Not able to create HBaseClient");
+				// }
+				responseData = HBaseClient
+						.testConnection(dataSource, configMap);
+			} else if (assetType == AppConstants.ASSET_KNOX) {
+				// KnoxClient knoxClient = assetConnectionMgr.getKnoxClient(
+				// vXAsset.getName(), configMap);
+				// VXStringList vxStringList = getKnoxResources(knoxClient, "",
+				// null);
+				// if (vxStringList != null && (vxStringList.getListSize() !=
+				// 0)) {
+				// connectivityStatus = true;
+				// }
+				responseData = KnoxClient.testConnection(dataSource, configMap);
 			} else {
 				throw restErrorUtil.createRESTException(
 						"Invalid repository type.",
 						MessageEnums.INVALID_INPUT_DATA);
 			}
-			if (connectivityStatus) {
-				testResponse.setStatusCode(VXResponse.STATUS_SUCCESS);
-			} else {
-				testResponse.setStatusCode(VXResponse.STATUS_ERROR);
-			}
+			testResponse = generateResponseForTestConn(responseData, "");
+
 		} catch (Exception e) {
-			testResponse.setStatusCode(VXResponse.STATUS_ERROR);
-			logger.error("Unable to connect repository with given config for "
-					+ vXAsset.getName(), e);
+
+			String msg = "Unable to connect repository with given config for "
+					+ vXAsset.getName();
+			HashMap<String, Object> respData = new HashMap<String, Object>();
+			String message = "";
+			if (e instanceof HadoopException) {
+				respData = ((HadoopException) e).responseData;
+				message = (respData != null && respData.get("message") != null) ? respData.get(
+						"message").toString() : msg;
+			}
+			testResponse = generateResponseForTestConn(respData, message);
+			logger.error(msg, e);
 		}
 		return testResponse;
 	}
 
+	private VXResponse generateResponseForTestConn(
+			HashMap<String, Object> responseData, String msg) {
+		VXResponse vXResponse = new VXResponse();
+
+		Long objId = (responseData.get("objectId") != null) ? Long
+				.parseLong(responseData.get("objectId").toString()) : null;
+		boolean connectivityStatus = (responseData.get("connectivityStatus") != null) ? Boolean
+				.parseBoolean(responseData.get("connectivityStatus").toString())
+				: false;
+		int statusCode = (connectivityStatus) ? VXResponse.STATUS_SUCCESS
+				: VXResponse.STATUS_ERROR;
+		String message = (responseData.get("message") != null) ? responseData
+				.get("message").toString() : msg;
+		String description = (responseData.get("description") != null) ? responseData
+				.get("description").toString() : msg;
+		String fieldName = (responseData.get("fieldName") != null) ? responseData
+				.get("fieldName").toString() : null;
+
+		VXMessage vXMsg = new VXMessage();
+		List<VXMessage> vXMsgList = new ArrayList<VXMessage>();
+		vXMsg.setFieldName(fieldName);
+		vXMsg.setMessage(message);
+		vXMsg.setObjectId(objId);
+		vXMsgList.add(vXMsg);
+
+		vXResponse.setMessageList(vXMsgList);
+		vXResponse.setMsgDesc(description);
+		vXResponse.setStatusCode(statusCode);
+		return vXResponse;
+	}
+
 	private void createResourcePathForHive(VXResource vXResource) {
 
 		String[] databases = (vXResource.getDatabases() == null || vXResource

http://git-wip-us.apache.org/repos/asf/incubator-argus/blob/d95b5193/security-admin/src/main/webapp/scripts/views/asset/AssetCreate.js
----------------------------------------------------------------------
diff --git a/security-admin/src/main/webapp/scripts/views/asset/AssetCreate.js b/security-admin/src/main/webapp/scripts/views/asset/AssetCreate.js
index fb4c349..9f0c5a9 100644
--- a/security-admin/src/main/webapp/scripts/views/asset/AssetCreate.js
+++ b/security-admin/src/main/webapp/scripts/views/asset/AssetCreate.js
@@ -225,8 +225,36 @@ define(function(require){
 			this.model.testConfig(this.model,{
 					//wait: true,
 					success: function (msResponse, options) {
-						if(msResponse.statusCode)
-							bootbox.alert("Connection Problem.");
+						if(msResponse.statusCode){
+							if(!_.isUndefined(msResponse) && _.isArray(msResponse.messageList) 
+														  && !_.isUndefined(msResponse.messageList[0].message)){
+								if(!_.isEmpty(msResponse.messageList[0].message) && msResponse.messageList[0].message != "\n"){
+									bootbox.dialog('<b>'+msResponse.messageList[0].message+'</b>',	[{
+										label: "Show More..",
+										callback:function(e){
+											console.log(e)
+											if($(e.currentTarget).text() == 'Show More..'){
+												var div = '<div class="showMore">'+msResponse.msgDesc+'</div>';
+												$(e.delegateTarget).find('.modal-body').append(div)
+												$(e.currentTarget).html('Show Less..')
+											}else{
+												$(e.delegateTarget).find('.showMore').remove();
+												$(e.currentTarget).html('Show More..')
+											}
+											return false;
+										}
+									}, {
+										label: "OK",
+										callback:function(){}
+									}]
+									);
+								}else{
+									bootbox.alert(msResponse.msgDesc);	
+								}
+							}else{
+								bootbox.alert("Connection Problem.");
+							}
+						}
 						else
 							bootbox.alert("Connected Successfully.");
 					},