You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ranger.apache.org by ma...@apache.org on 2014/12/12 02:30:12 UTC

[29/51] [partial] incubator-ranger git commit: RANGER-194: Rename packages from xasecure to apache ranger

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/com/xasecure/storm/client/json/model/Topology.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/storm/client/json/model/Topology.java b/lookup-client/src/main/java/com/xasecure/storm/client/json/model/Topology.java
deleted file mode 100644
index 229eb54..0000000
--- a/lookup-client/src/main/java/com/xasecure/storm/client/json/model/Topology.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.xasecure.storm.client.json.model;
-
-public class Topology {
-	private String id ;
-	private String name ;
-	private String status ;
-	
-	public String getId() {
-		return id;
-	}
-	public void setId(String id) {
-		this.id = id;
-	}
-	public String getName() {
-		return name;
-	}
-	public void setName(String name) {
-		this.name = name;
-	}
-	public String getStatus() {
-		return status;
-	}
-	public void setStatus(String status) {
-		this.status = status;
-	}
-	
-	
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/com/xasecure/storm/client/json/model/TopologyListResponse.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/storm/client/json/model/TopologyListResponse.java b/lookup-client/src/main/java/com/xasecure/storm/client/json/model/TopologyListResponse.java
deleted file mode 100644
index 486f3d2..0000000
--- a/lookup-client/src/main/java/com/xasecure/storm/client/json/model/TopologyListResponse.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.xasecure.storm.client.json.model;
-
-import java.util.List;
-
-import com.google.gson.annotations.SerializedName;
-
-public class TopologyListResponse {
-	@SerializedName("topologies")
-	private List<Topology>	topologyList;
-
-	public List<Topology> getTopologyList() {
-		return topologyList;
-	}
-
-	public void setTopologyList(List<Topology> topologyList) {
-		this.topologyList = topologyList;
-	}
-	
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFS.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFS.java b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFS.java
new file mode 100644
index 0000000..10a83c3
--- /dev/null
+++ b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFS.java
@@ -0,0 +1,195 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.hadoop.client;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.net.UnknownHostException;
+import java.security.PrivilegedAction;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import javax.security.auth.Subject;
+
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.ranger.hadoop.client.config.BaseClient;
+import org.apache.ranger.hadoop.client.exceptions.HadoopException;
+
+public class HadoopFS extends BaseClient {
+
+	private static final Log LOG = LogFactory.getLog(HadoopFS.class) ;
+
+	public HadoopFS(String dataSource) {
+		super(dataSource) ;
+	}
+	
+	public HadoopFS(String dataSource, HashMap<String,String> connectionProperties) {
+		super(dataSource,connectionProperties) ;
+	}
+	
+	private List<String> listFilesInternal(String baseDir, String fileMatching) {
+		List<String> fileList = new ArrayList<String>() ;
+		ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+		try {
+			Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
+			String dirPrefix = (baseDir.endsWith("/") ? baseDir : (baseDir + "/")) ;
+			String filterRegEx = null;
+			if (fileMatching != null && fileMatching.trim().length() > 0) {
+				filterRegEx = fileMatching.trim() ;
+			}
+			
+			Configuration conf = new Configuration() ;
+			UserGroupInformation.setConfiguration(conf);
+			
+			FileSystem fs = null ;
+			try {
+				fs = FileSystem.get(conf) ;
+				
+				FileStatus[] fileStats = fs.listStatus(new Path(baseDir)) ;
+				if (fileStats != null) {
+					for(FileStatus stat : fileStats) {
+						Path path = stat.getPath() ;
+						String pathComponent = path.getName() ;
+						if (filterRegEx == null) {
+							fileList.add(dirPrefix + pathComponent) ;
+						}
+						else if (FilenameUtils.wildcardMatch(pathComponent, fileMatching)) {
+							fileList.add(dirPrefix + pathComponent) ;
+						}
+					}
+				}
+			} catch (UnknownHostException uhe) {
+				String msgDesc = "listFilesInternal: Unable to connect using given config parameters"
+						+ " of Hadoop environment [" + getDataSource() + "].";
+				HadoopException hdpException = new HadoopException(msgDesc, uhe);
+				hdpException.generateResponseDataMap(false, getMessage(uhe),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} catch (FileNotFoundException fne) {
+				String msgDesc = "listFilesInternal: Unable to locate files using given config parameters "
+						+ "of Hadoop environment [" + getDataSource() + "].";
+				HadoopException hdpException = new HadoopException(msgDesc, fne);
+				hdpException.generateResponseDataMap(false, getMessage(fne),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			}
+			finally {
+			}
+		} catch (IOException ioe) {
+			String msgDesc = "listFilesInternal: Unable to get listing of files for directory "
+					+ baseDir
+					+ "] from Hadoop environment ["
+					+ getDataSource()
+					+ "].";
+			HadoopException hdpException = new HadoopException(msgDesc, ioe);
+			hdpException.generateResponseDataMap(false, getMessage(ioe),
+					msgDesc + errMsg, null, null);
+			throw hdpException;
+
+		} catch (IllegalArgumentException iae) {
+			String msgDesc = "Unable to get listing of files for directory ["
+					+ baseDir + "] from Hadoop environment [" + getDataSource()
+					+ "].";
+			HadoopException hdpException = new HadoopException(msgDesc, iae);
+			hdpException.generateResponseDataMap(false, getMessage(iae),
+					msgDesc + errMsg, null, null);
+			throw hdpException;
+		}
+		finally {
+			Thread.currentThread().setContextClassLoader(prevCl);
+		}
+		return fileList ;
+	}
+
+	
+	public List<String> listFiles(final String baseDir, final String fileMatching) {
+
+		PrivilegedAction<List<String>> action = new PrivilegedAction<List<String>>() {
+			@Override
+			public List<String> run() {
+				return listFilesInternal(baseDir, fileMatching) ;
+			}
+			
+		};
+		return Subject.doAs(getLoginSubject(),action) ;
+	}
+	
+	public static final void main(String[] args) {
+		
+		if (args.length < 2) {
+			System.err.println("USAGE: java " + HadoopFS.class.getName() + " repositoryName  basedirectory  [filenameToMatch]") ;
+			System.exit(1) ;
+		}
+		
+		String repositoryName = args[0] ;
+		String baseDir = args[1] ;
+		String fileNameToMatch = (args.length == 2 ? null : args[2]) ;
+		
+		HadoopFS fs = new HadoopFS(repositoryName) ;
+		List<String> fsList = fs.listFiles(baseDir, fileNameToMatch) ;
+		if (fsList != null && fsList.size() > 0) {
+			for(String s : fsList) {
+				System.out.println(s) ;
+			}
+		}
+		else {
+			System.err.println("Unable to get file listing for [" + baseDir + (baseDir.endsWith("/") ? "" : "/") + fileNameToMatch + "]  in repository [" + repositoryName + "]") ;
+		}
+	}
+
+	public static HashMap<String, Object> testConnection(String dataSource,
+			HashMap<String, String> connectionProperties) {
+
+		HashMap<String, Object> responseData = new HashMap<String, Object>();
+		boolean connectivityStatus = false;
+		HadoopFS connectionObj = new HadoopFS(dataSource, connectionProperties);
+		if (connectionObj != null) {
+			List<String> testResult = connectionObj.listFiles("/", null);
+			if (testResult != null && testResult.size() != 0) {
+				connectivityStatus = true;
+			}
+		}
+		if (connectivityStatus) {
+			String successMsg = "TestConnection Successful";
+			generateResponseDataMap(connectivityStatus, successMsg, successMsg,
+					null, null, responseData);
+		} else {
+			String failureMsg = "Unable to retrieve any files using given parameters, "
+					+ "You can still save the repository and start creating policies, "
+					+ "but you would not be able to use autocomplete for resource names. "
+					+ "Check xa_portal.log for more info.";
+			generateResponseDataMap(connectivityStatus, failureMsg, failureMsg,
+					null, null, responseData);
+		}
+		return responseData;
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFSTester.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFSTester.java b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFSTester.java
new file mode 100644
index 0000000..8199971
--- /dev/null
+++ b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFSTester.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.hadoop.client;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Properties;
+
+public class HadoopFSTester {
+
+	public static void main(String[] args) throws Throwable {
+		if (args.length < 3) {
+			System.err.println("USAGE: java " + HadoopFS.class.getName() + " repositoryName propertyFile basedirectory  [filenameToMatch]") ;
+			System.exit(1) ;
+		}
+		
+		String repositoryName = args[0] ;
+		String propFile = args[1] ;
+		String baseDir = args[2] ;
+		String fileNameToMatch = (args.length == 3 ? null : args[3]) ;
+
+		Properties conf = new Properties() ;
+		conf.load(HadoopFSTester.class.getClassLoader().getResourceAsStream(propFile));
+		
+		HashMap<String,String> prop = new HashMap<String,String>() ;
+		for(Object key : conf.keySet()) {
+			Object val = conf.get(key) ;
+			prop.put((String)key, (String)val) ;
+		}
+		
+		HadoopFS fs = new HadoopFS(repositoryName, prop) ;
+		List<String> fsList = fs.listFiles(baseDir, fileNameToMatch) ;
+		if (fsList != null && fsList.size() > 0) {
+			for(String s : fsList) {
+				System.out.println(s) ;
+			}
+		}
+		else {
+			System.err.println("Unable to get file listing for [" + baseDir + (baseDir.endsWith("/") ? "" : "/") + fileNameToMatch + "]  in repository [" + repositoryName + "]") ;
+		}
+
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/BaseClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/BaseClient.java b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/BaseClient.java
new file mode 100644
index 0000000..e7775d8
--- /dev/null
+++ b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/BaseClient.java
@@ -0,0 +1,163 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.hadoop.client.config;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import javax.security.auth.Subject;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.security.SecureClientLogin;
+import org.apache.ranger.hadoop.client.HadoopFS;
+import org.apache.ranger.hadoop.client.exceptions.HadoopException;
+
+public abstract class BaseClient {
+	private static final Log LOG = LogFactory.getLog(HadoopFS.class) ;
+	
+	private String dataSource ;
+	private Subject loginSubject ;
+	private HadoopConfigHolder configHolder;
+	
+	protected HashMap<String,String> connectionProperties ;
+	
+	public BaseClient(String dataSource) {
+		this.dataSource = dataSource ;
+		init() ;
+		login() ;
+	}
+	
+	public BaseClient(String dataSource, HashMap<String,String> connectionProperties) {
+		this.dataSource = dataSource ;
+		this.connectionProperties = connectionProperties ;
+		init() ;
+		login() ;
+	}
+	
+	
+	private void init() {
+		if (connectionProperties == null) {
+			configHolder = HadoopConfigHolder.getInstance(dataSource) ;
+		}
+		else {
+			configHolder = HadoopConfigHolder.getInstance(dataSource,connectionProperties) ;
+		}
+	}
+	
+	
+	protected void login() {
+		ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+		try {
+			Thread.currentThread().setContextClassLoader(configHolder.getClassLoader());
+			String userName = configHolder.getUserName() ;
+			if (userName == null) {
+				String msgDesc = "Unable to find login username for hadoop environment, ["
+						+ dataSource + "]";
+				HadoopException hdpException = new HadoopException(msgDesc);
+				hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg,
+						null, null);
+
+				throw hdpException;
+			}
+			String keyTabFile = configHolder.getKeyTabFile() ;
+			if (keyTabFile != null) {
+				if ( configHolder.isKerberosAuthentication() ) {
+					LOG.info("Init Login: security enabled, using username/keytab");
+					loginSubject = SecureClientLogin.loginUserFromKeytab(userName, keyTabFile) ;
+				}
+				else {
+					LOG.info("Init Login: using username");
+					loginSubject = SecureClientLogin.login(userName) ;
+				}
+			}
+			else {
+				String password = configHolder.getPassword() ;
+				if ( configHolder.isKerberosAuthentication() ) {
+					LOG.info("Init Login: using username/password");
+					loginSubject = SecureClientLogin.loginUserWithPassword(userName, password) ;
+				}
+				else {
+					LOG.info("Init Login: security not enabled, using username");
+					loginSubject = SecureClientLogin.login(userName) ;
+				}
+			}
+		} catch (IOException ioe) {
+			String msgDesc = "Unable to login to Hadoop environment ["
+					+ dataSource + "]";
+
+			HadoopException hdpException = new HadoopException(msgDesc, ioe);
+			hdpException.generateResponseDataMap(false, getMessage(ioe),
+					msgDesc + errMsg, null, null);
+			throw hdpException;
+		} catch (SecurityException se) {
+			String msgDesc = "Unable to login to Hadoop environment ["
+					+ dataSource + "]";
+			HadoopException hdpException = new HadoopException(msgDesc, se);
+			hdpException.generateResponseDataMap(false, getMessage(se),
+					msgDesc + errMsg, null, null);
+			throw hdpException;
+		} finally {
+			Thread.currentThread().setContextClassLoader(prevCl);
+		}
+	}
+	
+	public String getDataSource() {
+		return dataSource ;
+	}
+
+	protected Subject getLoginSubject() {
+		return loginSubject;
+	}
+
+	protected HadoopConfigHolder getConfigHolder() {
+		return configHolder;
+	}
+	
+	public static void generateResponseDataMap(boolean connectivityStatus,
+			String message, String description, Long objectId,
+			String fieldName, HashMap<String, Object> responseData) {
+		responseData.put("connectivityStatus", connectivityStatus);
+		responseData.put("message", message);
+		responseData.put("description", description);
+		responseData.put("objectId", objectId);
+		responseData.put("fieldName", fieldName);
+	}
+
+	public static String getMessage(Throwable excp) {
+		List<String> errList = new ArrayList<String>();
+		while (excp != null) {
+			if (!errList.contains(excp.getMessage() + ". \n")) {
+				if (excp.getMessage() != null && !(excp.getMessage().equalsIgnoreCase(""))) {
+					errList.add(excp.getMessage() + ". \n");
+				}
+			}
+			excp = excp.getCause();
+		}
+		return StringUtils.join(errList, "");
+	}
+	
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopClassLoader.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopClassLoader.java b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopClassLoader.java
new file mode 100644
index 0000000..bb13538
--- /dev/null
+++ b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopClassLoader.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.hadoop.client.config;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URL;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.ranger.hadoop.client.exceptions.HadoopException;
+
+public class HadoopClassLoader extends ClassLoader {
+	
+	private static final Log LOG = LogFactory.getLog(HadoopClassLoader.class) ;
+	
+	private HadoopConfigHolder confHolder ;
+	
+	public HadoopClassLoader(HadoopConfigHolder confHolder) {
+		super(Thread.currentThread().getContextClassLoader()) ;
+		this.confHolder = confHolder;
+	}
+	
+	
+	@Override
+	protected URL findResource(String resourceName) {
+		LOG.debug("findResource(" + resourceName + ") is called.") ;
+		URL ret = null;
+	
+		if (confHolder.hasResourceExists(resourceName)) {
+			ret = buildResourceFile(resourceName) ;
+		}
+		else {
+			ret = super.findResource(resourceName);
+		}
+		LOG.debug("findResource(" + resourceName + ") is returning [" + ret + "]") ;
+		return ret ;
+	}
+	
+	
+	@SuppressWarnings("deprecation")
+	private URL buildResourceFile(String aResourceName) {
+		URL ret = null ;
+		String prefix = aResourceName ;
+		String suffix = ".txt" ;
+
+		Properties prop = confHolder.getProperties(aResourceName) ;
+		LOG.debug("Building XML for: " + prop.toString());
+		if (prop != null && prop.size() > 0) {
+			if (aResourceName.contains(".")) {
+				int lastDotFound = aResourceName.indexOf(".") ;
+				prefix = aResourceName.substring(0,lastDotFound) + "-" ;
+				suffix = aResourceName.substring(lastDotFound) ;
+			}
+			
+			try {
+				File tempFile = File.createTempFile(prefix, suffix) ;
+				tempFile.deleteOnExit();
+				PrintWriter out = new PrintWriter(new FileWriter(tempFile)) ;
+				out.println("<?xml version=\"1.0\"?>") ;
+				out.println("<?xml-stylesheet type=\"text/xsl\" href=\"configuration.xsl\"?>") ;
+				out.println("<configuration xmlns:xi=\"http://www.w3.org/2001/XInclude\">") ;
+				for(Object keyobj : prop.keySet()) {
+					String key = (String)keyobj;
+					String val = prop.getProperty(key) ;
+					if (HadoopConfigHolder.HADOOP_RPC_PROTECTION.equals(key) && (val == null || val.trim().isEmpty()))  {
+						continue;
+					}
+					out.println("<property><name>" + key.trim() + "</name><value>" + val + "</value></property>") ;
+				}
+				out.println("</configuration>") ;
+				out.close() ;
+				ret = tempFile.toURL() ;
+			} catch (IOException e) {
+				throw new HadoopException("Unable to load create hadoop configuration file [" + aResourceName + "]", e) ;
+			}
+			
+		}
+		
+		return ret ;
+
+	}
+	
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopConfigHolder.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopConfigHolder.java b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopConfigHolder.java
new file mode 100644
index 0000000..7651bb5
--- /dev/null
+++ b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopConfigHolder.java
@@ -0,0 +1,360 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.hadoop.client.config;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.ranger.hadoop.client.exceptions.HadoopException;
+
+public class HadoopConfigHolder  {
+	private static final Log LOG = LogFactory.getLog(HadoopConfigHolder.class) ;
+	public static final String GLOBAL_LOGIN_PARAM_PROP_FILE = "hadoop-login.properties" ;
+	public static final String DEFAULT_DATASOURCE_PARAM_PROP_FILE = "datasource.properties" ;
+	public static final String RESOURCEMAP_PROP_FILE = "resourcenamemap.properties" ;
+	public static final String DEFAULT_RESOURCE_NAME = "core-site.xml" ;
+	public static final String RANGER_SECTION_NAME = "xalogin.xml" ;
+	public static final String RANGER_LOGIN_USER_NAME_PROP = "username" ;
+	public static final String RANGER_LOGIN_KEYTAB_FILE_PROP = "keytabfile" ;
+	public static final String RANGER_LOGIN_PASSWORD = "password" ;
+	public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
+	public static final String HADOOP_SECURITY_AUTHENTICATION_METHOD = "kerberos";
+	public static final String HADOOP_RPC_PROTECTION = "hadoop.rpc.protection";
+	
+
+	private static boolean initialized = false ;
+	private static HashMap<String,HashMap<String,Properties>> dataSource2ResourceListMap = new HashMap<String,HashMap<String,Properties>>() ;
+	private static Properties globalLoginProp = new Properties() ;
+	private static HashMap<String,HadoopConfigHolder> dataSource2HadoopConfigHolder = new HashMap<String,HadoopConfigHolder>() ;
+	private static Properties resourcemapProperties = null ;
+	
+	
+	private String datasourceName ;
+	private String userName ;
+	private String keyTabFile ;
+	private String password ;
+	private boolean isKerberosAuth ;
+	
+	private HadoopClassLoader classLoader ;
+	private HashMap<String,String>  connectionProperties; 
+	
+	public static HadoopConfigHolder getInstance(String aDatasourceName) {
+		HadoopConfigHolder ret = dataSource2HadoopConfigHolder.get(aDatasourceName) ;
+		if (ret == null) {
+			synchronized(HadoopConfigHolder.class) {
+				HadoopConfigHolder temp = ret ;
+				if (temp == null) {
+					ret = new HadoopConfigHolder(aDatasourceName) ;
+					dataSource2HadoopConfigHolder.put(aDatasourceName, ret) ;
+				}
+			}
+		}
+		return ret ;
+	}
+	
+	public static HadoopConfigHolder getInstance(String aDatasourceName, HashMap<String,String> connectionProperties) {
+		HadoopConfigHolder ret = dataSource2HadoopConfigHolder.get(aDatasourceName) ;
+		if (ret == null) {
+			synchronized(HadoopConfigHolder.class) {
+				HadoopConfigHolder temp = ret ;
+				if (temp == null) {
+					ret = new HadoopConfigHolder(aDatasourceName,connectionProperties) ;
+					dataSource2HadoopConfigHolder.put(aDatasourceName, ret) ;
+				}
+			}
+		}
+		else {
+			if (connectionProperties !=null  &&  !connectionProperties.equals(ret.connectionProperties)) {
+				ret = new HadoopConfigHolder(aDatasourceName,connectionProperties) ;
+				dataSource2HadoopConfigHolder.remove(aDatasourceName) ;
+				dataSource2HadoopConfigHolder.put(aDatasourceName, ret) ;
+			}
+		}
+		return ret ;
+	}
+	
+	
+
+	private HadoopConfigHolder(String aDatasourceName) {
+		datasourceName = aDatasourceName;
+		if ( ! initialized ) {
+			init() ;
+		}
+		initLoginInfo();
+		initClassLoader() ;
+	}
+	
+	private HadoopConfigHolder(String aDatasourceName, HashMap<String,String> connectionProperties) {
+		datasourceName = aDatasourceName;
+		this.connectionProperties = connectionProperties ;
+		initConnectionProp() ;
+		initLoginInfo();
+		initClassLoader() ;
+	}
+	
+	private void initConnectionProp() {
+		for(String key : connectionProperties.keySet()) {
+			
+			String resourceName = getResourceName(key) ;
+			
+			if (resourceName == null) {
+				resourceName = RANGER_SECTION_NAME ;
+			}
+			String val = connectionProperties.get(key) ;
+			addConfiguration(datasourceName, resourceName, key, val );
+		}
+	}
+	
+	private String getResourceName(String key) {
+		
+		if (resourcemapProperties == null) {
+			initResourceMap();
+		}
+		
+		if (resourcemapProperties != null) {
+			return resourcemapProperties.getProperty(key);
+		}
+		else {
+			return null;
+		}
+	}
+
+	public static void initResourceMap() {
+		if (resourcemapProperties == null) {
+			resourcemapProperties = new Properties() ;
+			InputStream in = HadoopConfigHolder.class.getClassLoader().getResourceAsStream(RESOURCEMAP_PROP_FILE) ;
+			if (in != null) {
+				try {
+					resourcemapProperties.load(in);
+				} catch (IOException e) {
+					throw new HadoopException("Unable to load resource map properties from [" + RESOURCEMAP_PROP_FILE + "]", e);
+				}
+			}
+			else {
+				throw new HadoopException("Unable to locate resource map properties from [" + RESOURCEMAP_PROP_FILE + "] in the class path.");
+			}
+		}
+	}
+
+	
+	
+	private static synchronized void init() {
+
+		if (initialized) {
+			return ;
+		}
+
+		try {
+			InputStream in = HadoopConfigHolder.class.getClassLoader().getResourceAsStream(DEFAULT_DATASOURCE_PARAM_PROP_FILE) ;
+			if (in != null) {
+				Properties prop = new Properties() ;
+				try {
+					prop.load(in) ;
+				} catch (IOException e) {
+					throw new HadoopException("Unable to get configuration information for Hadoop environments", e);
+				}
+				finally {
+					try {
+						in.close();
+					} catch (IOException e) {
+						// Ignored exception when the stream is closed.
+					} 
+				}
+	
+				if (prop.size() == 0) 
+					return ;
+				
+				for(Object keyobj : prop.keySet()) {
+					String key = (String)keyobj;
+					String val = prop.getProperty(key) ;
+					
+					int dotLocatedAt = key.indexOf(".") ;
+					
+					if (dotLocatedAt == -1) {
+						continue ;
+					}
+					
+					String dataSource = key.substring(0,dotLocatedAt) ;
+					
+					String propKey = key.substring(dotLocatedAt+1) ;
+					int resourceFoundAt =  propKey.indexOf(".") ;
+					if (resourceFoundAt > -1) {
+						String resourceName = propKey.substring(0, resourceFoundAt) + ".xml" ; 
+						propKey = propKey.substring(resourceFoundAt+1) ;
+						addConfiguration(dataSource, resourceName, propKey, val) ;
+					}
+					
+				}
+			}
+			
+			in = HadoopConfigHolder.class.getClassLoader().getResourceAsStream(GLOBAL_LOGIN_PARAM_PROP_FILE) ;
+			if (in != null) {
+				Properties tempLoginProp = new Properties() ;
+				try {
+					tempLoginProp.load(in) ;
+				} catch (IOException e) {
+					throw new HadoopException("Unable to get login configuration information for Hadoop environments from file: [" + GLOBAL_LOGIN_PARAM_PROP_FILE + "]", e);
+				}
+				finally {
+					try {
+						in.close();
+					} catch (IOException e) {
+						// Ignored exception when the stream is closed.
+					} 
+				}
+				globalLoginProp = tempLoginProp ;
+			}
+		}
+		finally {
+			initialized = true ;
+		}
+	}
+	
+	
+	private void initLoginInfo() {
+		Properties prop = this.getRangerSection() ;
+		if (prop != null) {
+			userName = prop.getProperty(RANGER_LOGIN_USER_NAME_PROP) ;
+			keyTabFile = prop.getProperty(RANGER_LOGIN_KEYTAB_FILE_PROP) ;
+			password = prop.getProperty(RANGER_LOGIN_PASSWORD) ;
+		
+			if ( getHadoopSecurityAuthentication() != null) {
+				isKerberosAuth = ( getHadoopSecurityAuthentication().equalsIgnoreCase(HADOOP_SECURITY_AUTHENTICATION_METHOD));
+			}
+			else {
+				isKerberosAuth = (userName != null) && (userName.indexOf("@") > -1) ;
+			}
+					
+		}
+	}
+	
+	private void initClassLoader() {
+		classLoader = new HadoopClassLoader(this) ;
+	}
+	
+	
+	public Properties getRangerSection() {
+		Properties prop = this.getProperties(RANGER_SECTION_NAME) ;
+		if (prop == null) {
+			prop = globalLoginProp ;
+		}
+		return prop ;
+	}
+
+
+
+	private static void addConfiguration(String dataSource, String resourceName, String propertyName, String value) {
+
+		if (dataSource == null || dataSource.isEmpty()) {
+			return ;
+		}
+		
+		if (propertyName == null || propertyName.isEmpty()) {
+			return ;
+		}
+		
+		if (resourceName == null) {
+			resourceName = DEFAULT_RESOURCE_NAME ;
+		}
+		
+		
+		HashMap<String,Properties> resourceName2PropertiesMap  = dataSource2ResourceListMap.get(dataSource) ;
+		
+		if (resourceName2PropertiesMap == null) {
+			resourceName2PropertiesMap = new HashMap<String,Properties>() ;
+			dataSource2ResourceListMap.put(dataSource, resourceName2PropertiesMap) ;
+		}
+		
+		Properties prop = resourceName2PropertiesMap.get(resourceName) ;
+		if (prop == null) {
+			prop = new Properties() ;
+			resourceName2PropertiesMap.put(resourceName, prop) ;
+		}
+		if (value == null) {
+			prop.remove(propertyName) ;
+		}
+		else {
+			prop.put(propertyName, value) ;
+		}
+	}
+	
+	
+	public String getDatasourceName() {
+		return datasourceName ;
+	}
+	
+	public boolean hasResourceExists(String aResourceName) {
+		HashMap<String,Properties> resourceName2PropertiesMap  = dataSource2ResourceListMap.get(datasourceName) ;
+		return (resourceName2PropertiesMap != null && resourceName2PropertiesMap.containsKey(aResourceName)) ;
+ 	}
+
+	public Properties getProperties(String aResourceName) {
+		Properties ret = null ;
+		HashMap<String,Properties> resourceName2PropertiesMap  = dataSource2ResourceListMap.get(datasourceName) ;
+		if (resourceName2PropertiesMap != null) {
+			ret =  resourceName2PropertiesMap.get(aResourceName) ;
+		}
+		return ret ;
+ 	}
+	
+	public String getHadoopSecurityAuthentication() {
+		Properties repoParam = null ;
+		String ret = null;
+		
+		HashMap<String,Properties> resourceName2PropertiesMap  = dataSource2ResourceListMap.get(this.getDatasourceName()) ;
+		
+		if ( resourceName2PropertiesMap != null) {
+			repoParam=resourceName2PropertiesMap.get(DEFAULT_RESOURCE_NAME);
+		}
+		
+		if ( repoParam != null ) {
+			ret = (String)repoParam.get(HADOOP_SECURITY_AUTHENTICATION);
+		}
+		return ret;
+ 	}
+	
+	public String getUserName() {
+		return userName;
+	}
+
+	public String getKeyTabFile() {
+		return keyTabFile;
+	}
+
+	public String getPassword() {
+		return password;
+	}
+
+	public HadoopClassLoader getClassLoader() {
+		return classLoader;
+	}
+
+	public boolean isKerberosAuthentication() {
+		return isKerberosAuth;
+	}
+
+  
+	
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/hadoop/client/exceptions/HadoopException.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/exceptions/HadoopException.java b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/exceptions/HadoopException.java
new file mode 100644
index 0000000..5614343
--- /dev/null
+++ b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/exceptions/HadoopException.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.hadoop.client.exceptions;
+
+import java.util.HashMap;
+
+public class HadoopException extends RuntimeException {
+
+	private static final long serialVersionUID = 8872734935128535649L;
+	
+	public HashMap<String, Object> responseData;
+
+	public HadoopException() {
+		super();
+		// TODO Auto-generated constructor stub
+	}
+
+	public HadoopException(String message, Throwable cause) {
+		super(message, cause);
+		// TODO Auto-generated constructor stub
+	}
+
+	public HadoopException(String message) {
+		super(message);
+		// TODO Auto-generated constructor stub
+	}
+
+	public HadoopException(Throwable cause) {
+		super(cause);
+		// TODO Auto-generated constructor stub
+	}
+
+	public void generateResponseDataMap(boolean connectivityStatus,
+			String message, String description, Long objectId, String fieldName) {
+		responseData = new HashMap<String, Object>();
+		responseData.put("connectivityStatus", connectivityStatus);
+		responseData.put("message", message);
+		responseData.put("description", description);
+		responseData.put("objectId", objectId);
+		responseData.put("fieldName", fieldName);
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClient.java b/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClient.java
new file mode 100644
index 0000000..1df5a0b
--- /dev/null
+++ b/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClient.java
@@ -0,0 +1,403 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.hbase.client;
+
+import java.io.IOException;
+import java.security.PrivilegedAction;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map.Entry;
+
+import javax.security.auth.Subject;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.MasterNotRunningException;
+import org.apache.hadoop.hbase.ZooKeeperConnectionException;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.ranger.hadoop.client.config.BaseClient;
+import org.apache.ranger.hadoop.client.exceptions.HadoopException;
+
+import com.google.protobuf.ServiceException;
+
+public class HBaseClient extends BaseClient {
+
+	private static final Log LOG = LogFactory.getLog(HBaseClient.class) ;
+
+	private Subject subj = null ;
+
+	public HBaseClient(String dataSource) {
+		super(dataSource) ;		
+	}
+
+	public HBaseClient(String dataSource,HashMap<String,String> connectionProp) {
+		super(dataSource, addDefaultHBaseProp(connectionProp)) ;		
+	}
+	
+	//TODO: temporary solution - to be added to the UI for HBase 
+	private static HashMap<String,String> addDefaultHBaseProp(HashMap<String,String> connectionProp) {
+		if (connectionProp != null) {
+			String param = "zookeeper.znode.parent" ;
+			String unsecuredPath = "/hbase-unsecure" ;
+			String authParam = "hadoop.security.authorization" ;
+			
+			String ret = connectionProp.get(param) ;
+			LOG.info("HBase connection has [" + param + "] with value [" + ret + "]");
+			if (ret == null) {
+				ret = connectionProp.get(authParam) ;
+				LOG.info("HBase connection has [" + authParam + "] with value [" + ret + "]");
+				if (ret != null && ret.trim().equalsIgnoreCase("false")) {
+					LOG.info("HBase connection is resetting [" + param + "] with value [" + unsecuredPath + "]");
+					connectionProp.put(param, unsecuredPath) ;
+				}
+			}
+		}
+		return connectionProp;
+	}
+	
+	public static HashMap<String, Object> testConnection(String dataSource,
+			HashMap<String, String> connectionProperties) {
+
+		HashMap<String, Object> responseData = new HashMap<String, Object>();
+		final String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+		boolean connectivityStatus = false;
+
+		HBaseClient connectionObj = new HBaseClient(dataSource,
+				connectionProperties);
+		if (connectionObj != null) {
+			connectivityStatus = connectionObj.getHBaseStatus();
+		}
+
+		if (connectivityStatus) {
+			String successMsg = "TestConnection Successful";
+			generateResponseDataMap(connectivityStatus, successMsg, successMsg,
+					null, null, responseData);
+		} else {
+			String failureMsg = "Unable to retrieve any databases using given parameters.";
+			generateResponseDataMap(connectivityStatus, failureMsg, failureMsg
+					+ errMsg, null, null, responseData);
+		}
+		return responseData;
+	}
+	
+	public boolean getHBaseStatus() {
+		boolean hbaseStatus = false;
+		subj = getLoginSubject();
+		final String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+		if (subj != null) {
+			ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
+			try {
+				Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
+	
+				hbaseStatus = Subject.doAs(subj, new PrivilegedAction<Boolean>() {
+					@Override
+					public Boolean run() {
+						Boolean hbaseStatus1 = false;
+						try {
+						    LOG.info("getHBaseStatus: creating default Hbase configuration");
+							Configuration conf = HBaseConfiguration.create() ;					
+							LOG.info("getHBaseStatus: setting config values from client");
+							setClientConfigValues(conf);						
+						    LOG.info("getHBaseStatus: checking HbaseAvailability with the new config");
+							HBaseAdmin.checkHBaseAvailable(conf);					
+						    LOG.info("getHBaseStatus: no exception: HbaseAvailability true");
+							hbaseStatus1 = true;
+						} catch (ZooKeeperConnectionException zce) {
+							String msgDesc = "getHBaseStatus: Unable to connect to `ZooKeeper` "
+									+ "using given config parameters.";
+							HadoopException hdpException = new HadoopException(msgDesc, zce);
+							hdpException.generateResponseDataMap(false, getMessage(zce),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
+							
+						} catch (MasterNotRunningException mnre) {
+							String msgDesc = "getHBaseStatus: Looks like `Master` is not running, "
+									+ "so couldn't check that running HBase is available or not, "
+									+ "Please try again later.";
+							HadoopException hdpException = new HadoopException(
+									msgDesc, mnre);
+							hdpException.generateResponseDataMap(false,
+									getMessage(mnre), msgDesc + errMsg,
+									null, null);
+							throw hdpException;
+
+						} catch (ServiceException se) {
+							String msgDesc = "getHBaseStatus: Unable to check availability of "
+									+ "Hbase environment [" + getConfigHolder().getDatasourceName() + "].";
+							HadoopException hdpException = new HadoopException(msgDesc, se);
+							hdpException.generateResponseDataMap(false, getMessage(se),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
+							
+						} catch(IOException io) {
+							String msgDesc = "getHBaseStatus: Unable to check availability of"
+									+ " Hbase environment [" + getConfigHolder().getDatasourceName() + "].";
+							HadoopException hdpException = new HadoopException(msgDesc, io);
+							hdpException.generateResponseDataMap(false, getMessage(io),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
+							
+						}  catch (Throwable e) {
+							String msgDesc = "getHBaseStatus: Unable to check availability of"
+									+ " Hbase environment [" + getConfigHolder().getDatasourceName() + "].";
+							LOG.error(msgDesc);
+							hbaseStatus1 = false;
+							HadoopException hdpException = new HadoopException(msgDesc, e);
+							hdpException.generateResponseDataMap(false, getMessage(e),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
+						}
+						return hbaseStatus1;
+					}
+				}) ;
+			} catch (SecurityException se) {
+				String msgDesc = "getHBaseStatus: Unable to connect to HBase Server instance, "
+						+ "current thread might not be able set the context ClassLoader.";
+				HadoopException hdpException = new HadoopException(msgDesc, se);
+				hdpException.generateResponseDataMap(false, getMessage(se),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} finally {
+				Thread.currentThread().setContextClassLoader(prevCl);
+			}
+		} else {
+			LOG.error("getHBaseStatus: secure login not done, subject is null");
+		}
+		
+		return hbaseStatus;
+	}
+	
+	private void setClientConfigValues(Configuration conf) {
+		if (this.connectionProperties == null) return;
+		Iterator<Entry<String, String>> i =  this.connectionProperties.entrySet().iterator();
+		while (i.hasNext()) {
+			Entry<String, String> e = i.next();
+			String v = conf.get(e.getKey());
+			if (v != null && !v.equalsIgnoreCase(e.getValue())) {
+				conf.set(e.getKey(), e.getValue());
+			}
+		}		
+	}
+
+	public List<String> getTableList(final String tableNameMatching) {
+		List<String> ret = null ;
+		final String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+		
+		subj = getLoginSubject();
+		
+		if (subj != null) {
+			ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
+			try {
+				Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
+	
+				ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() {
+		
+					@Override
+					public List<String> run() {
+						
+						List<String> tableList = new ArrayList<String>() ;
+						HBaseAdmin admin = null ;
+						try {
+							
+							Configuration conf = HBaseConfiguration.create() ;
+							admin = new HBaseAdmin(conf) ;
+							for (HTableDescriptor htd : admin.listTables(tableNameMatching)) {
+								tableList.add(htd.getNameAsString()) ;
+							}
+						} catch (ZooKeeperConnectionException zce) {
+							String msgDesc = "getTableList: Unable to connect to `ZooKeeper` "
+									+ "using given config parameters.";
+							HadoopException hdpException = new HadoopException(msgDesc, zce);
+							hdpException.generateResponseDataMap(false, getMessage(zce),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
+							
+						} catch (MasterNotRunningException mnre) {
+							String msgDesc = "getTableList: Looks like `Master` is not running, "
+									+ "so couldn't check that running HBase is available or not, "
+									+ "Please try again later.";
+							HadoopException hdpException = new HadoopException(
+									msgDesc, mnre);
+							hdpException.generateResponseDataMap(false,
+									getMessage(mnre), msgDesc + errMsg,
+									null, null);
+							throw hdpException;
+
+						}  catch(IOException io) {
+							String msgDesc = "Unable to get HBase table List for [repository:"
+									+ getConfigHolder().getDatasourceName() + ",table-match:" 
+									+ tableNameMatching + "].";
+							HadoopException hdpException = new HadoopException(msgDesc, io);
+							hdpException.generateResponseDataMap(false, getMessage(io),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
+						}   catch (Throwable e) {
+							String msgDesc = "Unable to get HBase table List for [repository:"
+									+ getConfigHolder().getDatasourceName() + ",table-match:" 
+									+ tableNameMatching + "].";
+							LOG.error(msgDesc);
+							HadoopException hdpException = new HadoopException(msgDesc, e);
+							hdpException.generateResponseDataMap(false, getMessage(e),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
+						}
+						finally {
+							if (admin != null) {
+								try {
+									admin.close() ;
+								} catch (IOException e) {
+									LOG.error("Unable to close HBase connection [" + getConfigHolder().getDatasourceName() + "]", e);
+								}
+							}
+						}
+						return tableList ;
+					}
+					
+				}) ;
+			}
+			finally {
+				Thread.currentThread().setContextClassLoader(prevCl);
+			}
+		}
+		return ret ;
+	}
+	
+	
+	public List<String> getColumnFamilyList(final String tableName, final String columnFamilyMatching) {
+		List<String> ret = null ;
+		final String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+		
+		subj = getLoginSubject();
+		if (subj != null) {
+			ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
+			try {
+				Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
+				
+				ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() {
+		
+					@Override
+					public List<String> run() {
+						
+						List<String> colfList = new ArrayList<String>() ;
+						HBaseAdmin admin = null ;
+						try {
+							Configuration conf = HBaseConfiguration.create();
+							admin = new HBaseAdmin(conf) ;
+							HTableDescriptor htd = admin.getTableDescriptor(tableName.getBytes()) ;
+							if (htd != null) {
+								for (HColumnDescriptor hcd : htd.getColumnFamilies()) {
+									String colf = hcd.getNameAsString() ;
+									if (colf.matches(columnFamilyMatching)) {
+										if (!colfList.contains(colf)) {
+											colfList.add(colf) ;
+										}
+									}
+								}
+							}
+						}  catch (ZooKeeperConnectionException zce) {
+							String msgDesc = "getColumnFamilyList: Unable to connect to `ZooKeeper` "
+									+ "using given config parameters.";
+							HadoopException hdpException = new HadoopException(msgDesc, zce);
+							hdpException.generateResponseDataMap(false, getMessage(zce),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
+							
+						} catch (MasterNotRunningException mnre) {
+							String msgDesc = "getColumnFamilyList: Looks like `Master` is not running, "
+									+ "so couldn't check that running HBase is available or not, "
+									+ "Please try again later.";
+							HadoopException hdpException = new HadoopException(
+									msgDesc, mnre);
+							hdpException.generateResponseDataMap(false,
+									getMessage(mnre), msgDesc + errMsg,
+									null, null);
+							throw hdpException;
+
+						}  catch(IOException io) {
+							String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for "
+									+ "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tableName
+									+ ", table-match:" + columnFamilyMatching + "], "
+									+ "current thread might not be able set the context ClassLoader.";
+							HadoopException hdpException = new HadoopException(msgDesc, io);
+							hdpException.generateResponseDataMap(false, getMessage(io),
+									msgDesc + errMsg, null, null);
+							throw hdpException; 
+						} catch (SecurityException se) {
+								String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for "
+										+ "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tableName
+										+ ", table-match:" + columnFamilyMatching + "], "
+										+ "current thread might not be able set the context ClassLoader.";
+								HadoopException hdpException = new HadoopException(msgDesc, se);
+								hdpException.generateResponseDataMap(false, getMessage(se),
+										msgDesc + errMsg, null, null);
+								throw hdpException;							
+							
+						}  catch (Throwable e) {
+							String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for "
+									+ "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tableName
+									+ ", table-match:" + columnFamilyMatching + "], "
+									+ "current thread might not be able set the context ClassLoader.";
+							LOG.error(msgDesc);
+							HadoopException hdpException = new HadoopException(msgDesc, e);
+							hdpException.generateResponseDataMap(false, getMessage(e),
+									msgDesc + errMsg, null, null);
+							throw hdpException;
+						}
+						finally {
+							if (admin != null) {
+								try {
+									admin.close() ;
+								} catch (IOException e) {
+									LOG.error("Unable to close HBase connection [" + getConfigHolder().getDatasourceName() + "]", e);
+								}
+							}
+						}
+						return colfList ;
+					}
+					
+				}) ;
+			} catch (SecurityException se) {
+				String msgDesc = "getColumnFamilyList: Unable to connect to HBase Server instance, "
+						+ "current thread might not be able set the context ClassLoader.";
+				HadoopException hdpException = new HadoopException(msgDesc, se);
+				hdpException.generateResponseDataMap(false, getMessage(se),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} finally {
+				Thread.currentThread().setContextClassLoader(prevCl);
+			}
+		}
+		return ret ;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClientTester.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClientTester.java b/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClientTester.java
new file mode 100644
index 0000000..dc177e6
--- /dev/null
+++ b/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClientTester.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.hbase.client;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class HBaseClientTester {
+
+	private static final Log LOG = LogFactory.getLog(HBaseClientTester.class) ;
+
+	public static void main(String[] args) throws Throwable {
+
+		HBaseClient hc = null;
+
+		if (args.length <= 2) {
+			System.err.println("USAGE: java " + HBaseClientTester.class.getName() + " dataSourceName propertyFile <tableName> <columnFamilyName>");
+			System.exit(1);
+		}
+		
+		LOG.info("Starting ...");
+
+		Properties conf = new Properties();
+		
+		conf.load(HBaseClientTester.class.getClassLoader().getResourceAsStream(args[1]));
+
+		HashMap<String, String> prop = new HashMap<String, String>();
+		for (Object key : conf.keySet()) {
+			Object val = conf.get(key);
+			prop.put((String) key, (String) val);
+		}
+
+		hc = new HBaseClient(args[0], prop);
+
+		if (args.length == 3) {
+			List<String> dbList = hc.getTableList(args[2]);
+			if (dbList.size() == 0) {
+				System.out.println("No tables found with db filter [" + args[2] + "]");
+			} else {
+				for (String str : dbList) {
+					System.out.println("table: " + str);
+				}
+			}
+		} else if (args.length == 4) {
+			List<String> tableList = hc.getColumnFamilyList(args[2], args[3]);
+			if (tableList.size() == 0) {
+				System.out.println("No column families found under table [" + args[2] + "] with columnfamily filter [" + args[3] + "]");
+			} else {
+				for (String str : tableList) {
+					System.out.println("ColumnFamily: " + str);
+				}
+			}
+		}
+
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClient.java b/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClient.java
new file mode 100644
index 0000000..f8f50f8
--- /dev/null
+++ b/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClient.java
@@ -0,0 +1,510 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.hive.client;
+
+import java.io.Closeable;
+import java.security.PrivilegedAction;
+import java.sql.Connection;
+import java.sql.Driver;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.SQLTimeoutException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Properties;
+
+import javax.security.auth.Subject;
+
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.ranger.hadoop.client.config.BaseClient;
+import org.apache.ranger.hadoop.client.exceptions.HadoopException;
+
+public class HiveClient extends BaseClient implements Closeable {
+
+	private static final Log LOG = LogFactory.getLog(HiveClient.class) ;
+	
+	Connection con = null ;
+	boolean isKerberosAuth=false;
+	
+
+	public HiveClient(String dataSource) {
+		super(dataSource) ;
+		initHive() ;
+	}
+	
+	public HiveClient(String dataSource,HashMap<String,String> connectionProp) {
+		super(dataSource,connectionProp) ;
+		initHive() ;
+	}
+	
+	public void initHive() {
+		isKerberosAuth = getConfigHolder().isKerberosAuthentication();
+		if (isKerberosAuth) {
+			LOG.info("Secured Mode: JDBC Connection done with preAuthenticated Subject");
+			Subject.doAs(getLoginSubject(), new PrivilegedAction<Object>() {
+				public Object run() {
+					initConnection();
+					return null;
+				}
+			}) ;				
+		}
+		else {
+			LOG.info("Since Password is NOT provided, Trying to use UnSecure client with username and password");
+			final String userName = getConfigHolder().getUserName() ;
+			final String password = getConfigHolder().getPassword() ;
+			Subject.doAs(getLoginSubject(), new PrivilegedAction<Object>() {
+				public Object run() {
+					initConnection(userName,password);
+					return null;
+				}
+			}) ;	
+		}
+	}
+	
+	public List<String> getDatabaseList(String databaseMatching){
+	 	final String dbMatching=databaseMatching;
+		List<String> dblist = Subject.doAs(getLoginSubject(), new PrivilegedAction<List<String>>() {
+			public List<String>  run() {
+				return getDBList(dbMatching);
+			}
+		}) ;
+		return dblist;
+	}
+		
+	private List<String> getDBList(String databaseMatching) {
+		List<String> ret = new ArrayList<String>() ;
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+		if (con != null) {
+			Statement stat =  null ;
+			ResultSet rs = null ;
+			String sql = "show databases" ;
+			if (databaseMatching != null && ! databaseMatching.isEmpty()) {
+				sql = sql + " like \"" + databaseMatching  + "\"" ;
+			}
+			try {
+				stat =  con.createStatement()  ;
+				rs = stat.executeQuery(sql) ;
+				while (rs.next()) {
+					ret.add(rs.getString(1)) ;
+				}
+			} catch (SQLTimeoutException sqlt) {
+				String msgDesc = "Time Out, Unable to execute SQL [" + sql
+						+ "].";
+				HadoopException hdpException = new HadoopException(msgDesc,
+						sqlt);
+				hdpException.generateResponseDataMap(false, getMessage(sqlt),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} catch (SQLException sqle) {
+				String msgDesc = "Unable to execute SQL [" + sql + "].";
+				HadoopException hdpException = new HadoopException(msgDesc,
+						sqle);
+				hdpException.generateResponseDataMap(false, getMessage(sqle),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} finally {
+				close(rs) ;
+				close(stat) ;
+			}
+			
+		}
+		return ret ;
+	}
+	
+	public List<String> getTableList(String database, String tableNameMatching){
+		final String db=database;
+		final String tblNameMatching=tableNameMatching;
+		List<String> tableList = Subject.doAs(getLoginSubject(), new PrivilegedAction<List<String>>() {
+			public List<String>  run() {
+				return getTblList(db,tblNameMatching);
+			}
+		}) ;
+		return tableList;
+	}
+
+	public List<String> getTblList(String database, String tableNameMatching) {
+		List<String> ret = new ArrayList<String>() ;
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+		if (con != null) {
+			Statement stat =  null ;
+			ResultSet rs = null ;
+			
+			String sql = null ;
+			
+			try {
+				sql = "use " + database;
+				
+				try {
+					stat = con.createStatement() ;
+					stat.execute(sql) ;
+				}
+				finally {
+					close(stat) ;
+				}
+				
+				sql = "show tables " ;
+				if (tableNameMatching != null && ! tableNameMatching.isEmpty()) {
+					sql = sql + " like \"" + tableNameMatching  + "\"" ;
+				}
+				stat =  con.createStatement()  ;
+				rs = stat.executeQuery(sql) ;
+				while (rs.next()) {
+					ret.add(rs.getString(1)) ;
+				}
+			} catch (SQLTimeoutException sqlt) {
+				String msgDesc = "Time Out, Unable to execute SQL [" + sql
+						+ "].";
+				HadoopException hdpException = new HadoopException(msgDesc,
+						sqlt);
+				hdpException.generateResponseDataMap(false, getMessage(sqlt),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} catch (SQLException sqle) {
+				String msgDesc = "Unable to execute SQL [" + sql + "].";
+				HadoopException hdpException = new HadoopException(msgDesc,
+						sqle);
+				hdpException.generateResponseDataMap(false, getMessage(sqle),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} finally {
+				close(rs) ;
+				close(stat) ;
+			}
+			
+		}
+		return ret ;
+	}
+
+	public List<String> getViewList(String database, String viewNameMatching) {
+		List<String> ret = null ;
+		return ret ;
+	}
+
+	public List<String> getUDFList(String database, String udfMatching) {
+		List<String> ret = null ;
+		return ret ;
+	}
+	
+	public List<String> getColumnList(String database, String tableName, String columnNameMatching) {
+		final String db=database;
+		final String tblName=tableName;
+		final String clmNameMatching=columnNameMatching;
+		List<String> columnList = Subject.doAs(getLoginSubject(), new PrivilegedAction<List<String>>() {
+			public List<String>  run() {
+					return getClmList(db,tblName,clmNameMatching);
+				}
+			}) ;
+		return columnList;
+	}
+	
+	public List<String> getClmList(String database, String tableName, String columnNameMatching) {
+		List<String> ret = new ArrayList<String>() ;
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+		if (con != null) {
+			
+			String columnNameMatchingRegEx = null ;
+			
+			if (columnNameMatching != null && ! columnNameMatching.isEmpty()) {
+				columnNameMatchingRegEx = columnNameMatching ;
+			}
+			
+			Statement stat =  null ;
+			ResultSet rs = null ;
+			
+			String sql = null ;
+			
+			try {
+				sql = "use " + database;
+				
+				try {
+					stat = con.createStatement() ;
+					stat.execute(sql) ;
+				}
+				finally {
+					close(stat) ;
+				}
+				
+				sql = "describe  " + tableName ;
+				stat =  con.createStatement()  ;
+				rs = stat.executeQuery(sql) ;
+				while (rs.next()) {
+					String columnName = rs.getString(1) ;
+					if (columnNameMatchingRegEx == null) {
+						ret.add(columnName) ;
+					}
+					else if (FilenameUtils.wildcardMatch(columnName,columnNameMatchingRegEx)) {
+						ret.add(columnName) ;
+					}
+				}
+			} catch (SQLTimeoutException sqlt) {
+				String msgDesc = "Time Out, Unable to execute SQL [" + sql
+						+ "].";
+				HadoopException hdpException = new HadoopException(msgDesc,
+						sqlt);
+				hdpException.generateResponseDataMap(false, getMessage(sqlt),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} catch (SQLException sqle) {
+				String msgDesc = "Unable to execute SQL [" + sql + "].";
+				HadoopException hdpException = new HadoopException(msgDesc,
+						sqle);
+				hdpException.generateResponseDataMap(false, getMessage(sqle),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} finally {
+				close(rs) ;
+				close(stat) ;
+			}
+			
+		}
+		return ret ;
+	}
+	
+	
+	public void close() {
+		Subject.doAs(getLoginSubject(), new PrivilegedAction<Void>(){
+			public Void run() {
+				close(con) ;
+				return null;
+			}
+		});
+	}
+	
+	private void close(Statement aStat) {
+		try {
+			if (aStat != null) {
+				aStat.close();
+			}
+		} catch (SQLException e) {
+			LOG.error("Unable to close SQL statement", e);
+		}
+	}
+
+	private void close(ResultSet aResultSet) {
+		try {
+			if (aResultSet != null) {
+				aResultSet.close();
+			}
+		} catch (SQLException e) {
+			LOG.error("Unable to close ResultSet", e);
+		}
+	}
+
+	private void close(Connection aCon) {
+		try {
+			if (aCon != null) {
+				aCon.close();
+			}
+		} catch (SQLException e) {
+			LOG.error("Unable to close SQL Connection", e);
+		}
+	}
+
+	private void initConnection() {
+		initConnection(null,null) ;
+	}
+
+	
+	private void initConnection(String userName, String password) {
+	
+		Properties prop = getConfigHolder().getRangerSection() ;
+		String driverClassName = prop.getProperty("jdbc.driverClassName") ;
+		String url =  prop.getProperty("jdbc.url") ;	
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+	
+		if (driverClassName != null) {
+			try {
+				Driver driver = (Driver)Class.forName(driverClassName).newInstance() ;
+				DriverManager.registerDriver(driver);
+			} catch (SQLException e) {
+				String msgDesc = "initConnection: Caught SQLException while registering "
+						+ "Hive driver, so Unable to connect to Hive Thrift Server instance.";
+				HadoopException hdpException = new HadoopException(msgDesc, e);
+				hdpException.generateResponseDataMap(false, getMessage(e),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} catch (IllegalAccessException ilae) {
+				String msgDesc = "initConnection: Class or its nullary constructor might not accessible."
+						+ "So unable to initiate connection to hive thrift server instance.";
+				HadoopException hdpException = new HadoopException(msgDesc, ilae);
+				hdpException.generateResponseDataMap(false, getMessage(ilae),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} catch (InstantiationException ie) {
+				String msgDesc = "initConnection: Class may not have its nullary constructor or "
+						+ "may be the instantiation fails for some other reason."
+						+ "So unable to initiate connection to hive thrift server instance.";
+				HadoopException hdpException = new HadoopException(msgDesc, ie);
+				hdpException.generateResponseDataMap(false, getMessage(ie),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+				
+			} catch (ExceptionInInitializerError eie) {
+				String msgDesc = "initConnection: Got ExceptionInInitializerError, "
+						+ "The initialization provoked by this method fails."
+						+ "So unable to initiate connection to hive thrift server instance.";
+				HadoopException hdpException = new HadoopException(msgDesc, eie);
+				hdpException.generateResponseDataMap(false, getMessage(eie),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} catch (SecurityException se) {
+				String msgDesc = "initConnection: unable to initiate connection to hive thrift server instance,"
+						+ " The caller's class loader is not the same as or an ancestor "
+						+ "of the class loader for the current class and invocation of "
+						+ "s.checkPackageAccess() denies access to the package of this class.";
+				HadoopException hdpException = new HadoopException(msgDesc, se);
+				hdpException.generateResponseDataMap(false, getMessage(se),
+						msgDesc + errMsg, null, null);
+				throw hdpException;
+			} catch (Throwable t) {
+				String msgDesc = "initConnection: Unable to connect to Hive Thrift Server instance, "
+						+ "please provide valid value of field : {jdbc.driverClassName}.";
+				HadoopException hdpException = new HadoopException(msgDesc, t);
+				hdpException.generateResponseDataMap(false, getMessage(t),
+						msgDesc + errMsg, null, "jdbc.driverClassName");
+				throw hdpException;
+			}
+		}
+		
+		try {
+			
+			if (userName == null && password == null) {
+				con = DriverManager.getConnection(url) ;
+			}
+			else {			
+				con = DriverManager.getConnection(url, userName, password) ;
+			}
+		
+		} catch (SQLException e) {
+			String msgDesc = "Unable to connect to Hive Thrift Server instance.";
+			HadoopException hdpException = new HadoopException(msgDesc, e);
+			hdpException.generateResponseDataMap(false, getMessage(e), msgDesc
+					+ errMsg, null, null);
+			throw hdpException;
+		} catch (SecurityException se) {
+			String msgDesc = "Unable to connect to Hive Thrift Server instance.";
+			HadoopException hdpException = new HadoopException(msgDesc, se);
+			hdpException.generateResponseDataMap(false, getMessage(se), msgDesc
+					+ errMsg, null, null);
+			throw hdpException;
+		}
+	}
+
+	
+	public static void main(String[] args) {
+		
+		HiveClient hc = null ;
+		
+		if (args.length == 0) {
+			System.err.println("USAGE: java " + HiveClient.class.getName() + " dataSourceName <databaseName> <tableName> <columnName>") ;
+			System.exit(1) ;
+		}
+		
+		try {
+			hc = new HiveClient(args[0]) ;
+			
+			if (args.length == 2) {
+				List<String> dbList = hc.getDatabaseList(args[1]) ;
+				if (dbList.size() == 0) {
+					System.out.println("No database found with db filter [" + args[1] + "]") ;
+				}
+				else {
+					for (String str : dbList ) {
+						System.out.println("database: " + str ) ;
+					}
+				}
+			}
+			else if (args.length == 3) {
+				List<String> tableList = hc.getTableList(args[1], args[2]) ;
+				if (tableList.size() == 0) {
+					System.out.println("No tables found under database[" + args[1] + "] with table filter [" + args[2] + "]") ;
+				}
+				else {
+					for(String str : tableList) {
+						System.out.println("Table: " + str) ;
+					}
+				}
+			}
+			else if (args.length == 4) {
+				List<String> columnList = hc.getColumnList(args[1], args[2], args[3]) ;
+				if (columnList.size() == 0) {
+					System.out.println("No columns found for db:" + args[1] + ", table: [" + args[2] + "], with column filter [" + args[3] + "]") ;
+				}
+				else {
+					for (String str : columnList ) {
+						System.out.println("Column: " + str) ;
+					}
+				}
+			}
+			
+		}
+		finally {
+			if (hc != null) {
+				hc.close();
+			}
+		}	
+	}
+
+	public static HashMap<String, Object> testConnection(String dataSource,
+			HashMap<String, String> connectionProperties) {
+
+		HashMap<String, Object> responseData = new HashMap<String, Object>();
+		boolean connectivityStatus = false;
+		String errMsg = " You can still save the repository and start creating "
+				+ "policies, but you would not be able to use autocomplete for "
+				+ "resource names. Check xa_portal.log for more info.";
+
+		HiveClient connectionObj = new HiveClient(dataSource,
+				connectionProperties);
+		if (connectionObj != null) {
+		
+			List<String> testResult = connectionObj.getDatabaseList("*");
+			if (testResult != null && testResult.size() != 0) {
+				connectivityStatus = true;
+			}
+		}
+		if (connectivityStatus) {
+			String successMsg = "TestConnection Successful";
+			generateResponseDataMap(connectivityStatus, successMsg, successMsg,
+					null, null, responseData);
+		} else {
+			String failureMsg = "Unable to retrieve any databases using given parameters.";
+			generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + errMsg,
+					null, null, responseData);
+		}
+		
+		connectionObj.close();
+		return responseData;
+	}
+	
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClientTester.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClientTester.java b/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClientTester.java
new file mode 100644
index 0000000..0128622
--- /dev/null
+++ b/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClientTester.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.hive.client;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Properties;
+
+public class HiveClientTester  {
+
+	public static void main(String[] args) throws Throwable {
+		
+		HiveClient hc = null ;
+		
+		if (args.length <= 2) {
+			System.err.println("USAGE: java " + HiveClientTester.class.getName() + " dataSourceName propertyFile <databaseName> <tableName> <columnName>") ;
+			System.exit(1) ;
+		}
+		
+		
+		try {
+			
+			Properties conf = new Properties() ;
+			conf.load(HiveClientTester.class.getClassLoader().getResourceAsStream(args[1]));
+			
+			HashMap<String,String> prop = new HashMap<String,String>() ;
+			for(Object key : conf.keySet()) {
+				Object val = conf.get(key) ;
+				prop.put((String)key, (String)val) ;
+			}
+
+			
+			hc = new HiveClient(args[0], prop) ;
+			
+			
+			if (args.length == 3) {
+				List<String> dbList = hc.getDatabaseList(args[2]) ;
+				if (dbList.size() == 0) {
+					System.out.println("No database found with db filter [" + args[2] + "]") ;
+				}
+				else {
+					for (String str : dbList ) {
+						System.out.println("database: " + str ) ;
+					}
+				}
+			}
+			else if (args.length == 4) {
+				List<String> tableList = hc.getTableList(args[2], args[3]) ;
+				if (tableList.size() == 0) {
+					System.out.println("No tables found under database[" + args[2] + "] with table filter [" + args[3] + "]") ;
+				}
+				else {
+					for(String str : tableList) {
+						System.out.println("Table: " + str) ;
+					}
+				}
+			}
+			else if (args.length == 5) {
+				List<String> columnList = hc.getColumnList(args[2], args[3], args[4]) ;
+				if (columnList.size() == 0) {
+					System.out.println("No columns found for db:" + args[2] + ", table: [" + args[3] + "], with column filter [" + args[4] + "]") ;
+				}
+				else {
+					for (String str : columnList ) {
+						System.out.println("Column: " + str) ;
+					}
+				}
+			}
+			
+		}
+		finally {
+			if (hc != null) {
+				hc.close();
+			}
+		}
+		
+	}
+	
+
+}