You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ranger.apache.org by ma...@apache.org on 2014/12/12 02:30:13 UTC

[30/51] [partial] incubator-ranger git commit: RANGER-194: Rename packages from xasecure to apache ranger

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/com/xasecure/hadoop/client/config/HadoopConfigHolder.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hadoop/client/config/HadoopConfigHolder.java b/lookup-client/src/main/java/com/xasecure/hadoop/client/config/HadoopConfigHolder.java
deleted file mode 100644
index b1bd62b..0000000
--- a/lookup-client/src/main/java/com/xasecure/hadoop/client/config/HadoopConfigHolder.java
+++ /dev/null
@@ -1,361 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package com.xasecure.hadoop.client.config;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.Properties;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import com.xasecure.hadoop.client.exceptions.HadoopException;
-
-public class HadoopConfigHolder  {
-	private static final Log LOG = LogFactory.getLog(HadoopConfigHolder.class) ;
-	public static final String GLOBAL_LOGIN_PARAM_PROP_FILE = "hadoop-login.properties" ;
-	public static final String DEFAULT_DATASOURCE_PARAM_PROP_FILE = "datasource.properties" ;
-	public static final String RESOURCEMAP_PROP_FILE = "resourcenamemap.properties" ;
-	public static final String DEFAULT_RESOURCE_NAME = "core-site.xml" ;
-	public static final String XASECURE_SECTION_NAME = "xalogin.xml" ;
-	public static final String XASECURE_LOGIN_USER_NAME_PROP = "username" ;
-	public static final String XASECURE_LOGIN_KEYTAB_FILE_PROP = "keytabfile" ;
-	public static final String XASECURE_LOGIN_PASSWORD = "password" ;
-	public static final String HADOOP_SECURITY_AUTHENTICATION = "hadoop.security.authentication";
-	public static final String HADOOP_SECURITY_AUTHENTICATION_METHOD = "kerberos";
-	public static final String HADOOP_RPC_PROTECTION = "hadoop.rpc.protection";
-	
-
-	private static boolean initialized = false ;
-	private static HashMap<String,HashMap<String,Properties>> dataSource2ResourceListMap = new HashMap<String,HashMap<String,Properties>>() ;
-	private static Properties globalLoginProp = new Properties() ;
-	private static HashMap<String,HadoopConfigHolder> dataSource2HadoopConfigHolder = new HashMap<String,HadoopConfigHolder>() ;
-	private static Properties resourcemapProperties = null ;
-	
-	
-	private String datasourceName ;
-	private String userName ;
-	private String keyTabFile ;
-	private String password ;
-	private boolean isKerberosAuth ;
-	
-	private HadoopClassLoader classLoader ;
-	private HashMap<String,String>  connectionProperties; 
-	
-	public static HadoopConfigHolder getInstance(String aDatasourceName) {
-		HadoopConfigHolder ret = dataSource2HadoopConfigHolder.get(aDatasourceName) ;
-		if (ret == null) {
-			synchronized(HadoopConfigHolder.class) {
-				HadoopConfigHolder temp = ret ;
-				if (temp == null) {
-					ret = new HadoopConfigHolder(aDatasourceName) ;
-					dataSource2HadoopConfigHolder.put(aDatasourceName, ret) ;
-				}
-			}
-		}
-		return ret ;
-	}
-	
-	public static HadoopConfigHolder getInstance(String aDatasourceName, HashMap<String,String> connectionProperties) {
-		HadoopConfigHolder ret = dataSource2HadoopConfigHolder.get(aDatasourceName) ;
-		if (ret == null) {
-			synchronized(HadoopConfigHolder.class) {
-				HadoopConfigHolder temp = ret ;
-				if (temp == null) {
-					ret = new HadoopConfigHolder(aDatasourceName,connectionProperties) ;
-					dataSource2HadoopConfigHolder.put(aDatasourceName, ret) ;
-				}
-			}
-		}
-		else {
-			if (connectionProperties !=null  &&  !connectionProperties.equals(ret.connectionProperties)) {
-				ret = new HadoopConfigHolder(aDatasourceName,connectionProperties) ;
-				dataSource2HadoopConfigHolder.remove(aDatasourceName) ;
-				dataSource2HadoopConfigHolder.put(aDatasourceName, ret) ;
-			}
-		}
-		return ret ;
-	}
-	
-	
-
-	private HadoopConfigHolder(String aDatasourceName) {
-		datasourceName = aDatasourceName;
-		if ( ! initialized ) {
-			init() ;
-		}
-		initLoginInfo();
-		initClassLoader() ;
-	}
-	
-	private HadoopConfigHolder(String aDatasourceName, HashMap<String,String> connectionProperties) {
-		datasourceName = aDatasourceName;
-		this.connectionProperties = connectionProperties ;
-		initConnectionProp() ;
-		initLoginInfo();
-		initClassLoader() ;
-	}
-	
-	private void initConnectionProp() {
-		for(String key : connectionProperties.keySet()) {
-			
-			String resourceName = getResourceName(key) ;
-			
-			if (resourceName == null) {
-				resourceName = XASECURE_SECTION_NAME ;
-			}
-			String val = connectionProperties.get(key) ;
-			addConfiguration(datasourceName, resourceName, key, val );
-		}
-	}
-	
-	private String getResourceName(String key) {
-		
-		if (resourcemapProperties == null) {
-			initResourceMap();
-		}
-		
-		if (resourcemapProperties != null) {
-			return resourcemapProperties.getProperty(key);
-		}
-		else {
-			return null;
-		}
-	}
-
-	public static void initResourceMap() {
-		if (resourcemapProperties == null) {
-			resourcemapProperties = new Properties() ;
-			InputStream in = HadoopConfigHolder.class.getClassLoader().getResourceAsStream(RESOURCEMAP_PROP_FILE) ;
-			if (in != null) {
-				try {
-					resourcemapProperties.load(in);
-				} catch (IOException e) {
-					throw new HadoopException("Unable to load resource map properties from [" + RESOURCEMAP_PROP_FILE + "]", e);
-				}
-			}
-			else {
-				throw new HadoopException("Unable to locate resource map properties from [" + RESOURCEMAP_PROP_FILE + "] in the class path.");
-			}
-		}
-	}
-
-	
-	
-	private static synchronized void init() {
-
-		if (initialized) {
-			return ;
-		}
-
-		try {
-			InputStream in = HadoopConfigHolder.class.getClassLoader().getResourceAsStream(DEFAULT_DATASOURCE_PARAM_PROP_FILE) ;
-			if (in != null) {
-				Properties prop = new Properties() ;
-				try {
-					prop.load(in) ;
-				} catch (IOException e) {
-					throw new HadoopException("Unable to get configuration information for Hadoop environments", e);
-				}
-				finally {
-					try {
-						in.close();
-					} catch (IOException e) {
-						// Ignored exception when the stream is closed.
-					} 
-				}
-	
-				if (prop.size() == 0) 
-					return ;
-				
-				for(Object keyobj : prop.keySet()) {
-					String key = (String)keyobj;
-					String val = prop.getProperty(key) ;
-					
-					int dotLocatedAt = key.indexOf(".") ;
-					
-					if (dotLocatedAt == -1) {
-						continue ;
-					}
-					
-					String dataSource = key.substring(0,dotLocatedAt) ;
-					
-					String propKey = key.substring(dotLocatedAt+1) ;
-					int resourceFoundAt =  propKey.indexOf(".") ;
-					if (resourceFoundAt > -1) {
-						String resourceName = propKey.substring(0, resourceFoundAt) + ".xml" ; 
-						propKey = propKey.substring(resourceFoundAt+1) ;
-						addConfiguration(dataSource, resourceName, propKey, val) ;
-					}
-					
-				}
-			}
-			
-			in = HadoopConfigHolder.class.getClassLoader().getResourceAsStream(GLOBAL_LOGIN_PARAM_PROP_FILE) ;
-			if (in != null) {
-				Properties tempLoginProp = new Properties() ;
-				try {
-					tempLoginProp.load(in) ;
-				} catch (IOException e) {
-					throw new HadoopException("Unable to get login configuration information for Hadoop environments from file: [" + GLOBAL_LOGIN_PARAM_PROP_FILE + "]", e);
-				}
-				finally {
-					try {
-						in.close();
-					} catch (IOException e) {
-						// Ignored exception when the stream is closed.
-					} 
-				}
-				globalLoginProp = tempLoginProp ;
-			}
-		}
-		finally {
-			initialized = true ;
-		}
-	}
-	
-	
-	private void initLoginInfo() {
-		Properties prop = this.getXASecureSection() ;
-		if (prop != null) {
-			userName = prop.getProperty(XASECURE_LOGIN_USER_NAME_PROP) ;
-			keyTabFile = prop.getProperty(XASECURE_LOGIN_KEYTAB_FILE_PROP) ;
-			password = prop.getProperty(XASECURE_LOGIN_PASSWORD) ;
-		
-			if ( getHadoopSecurityAuthentication() != null) {
-				isKerberosAuth = ( getHadoopSecurityAuthentication().equalsIgnoreCase(HADOOP_SECURITY_AUTHENTICATION_METHOD));
-			}
-			else {
-				isKerberosAuth = (userName != null) && (userName.indexOf("@") > -1) ;
-			}
-					
-		}
-	}
-	
-	private void initClassLoader() {
-		classLoader = new HadoopClassLoader(this) ;
-	}
-	
-	
-	public Properties getXASecureSection() {
-		Properties prop = this.getProperties(XASECURE_SECTION_NAME) ;
-		if (prop == null) {
-			prop = globalLoginProp ;
-		}
-		return prop ;
-	}
-
-
-
-	private static void addConfiguration(String dataSource, String resourceName, String propertyName, String value) {
-
-		if (dataSource == null || dataSource.isEmpty()) {
-			return ;
-		}
-		
-		if (propertyName == null || propertyName.isEmpty()) {
-			return ;
-		}
-		
-		if (resourceName == null) {
-			resourceName = DEFAULT_RESOURCE_NAME ;
-		}
-		
-		
-		HashMap<String,Properties> resourceName2PropertiesMap  = dataSource2ResourceListMap.get(dataSource) ;
-		
-		if (resourceName2PropertiesMap == null) {
-			resourceName2PropertiesMap = new HashMap<String,Properties>() ;
-			dataSource2ResourceListMap.put(dataSource, resourceName2PropertiesMap) ;
-		}
-		
-		Properties prop = resourceName2PropertiesMap.get(resourceName) ;
-		if (prop == null) {
-			prop = new Properties() ;
-			resourceName2PropertiesMap.put(resourceName, prop) ;
-		}
-		if (value == null) {
-			prop.remove(propertyName) ;
-		}
-		else {
-			prop.put(propertyName, value) ;
-		}
-	}
-	
-	
-	public String getDatasourceName() {
-		return datasourceName ;
-	}
-	
-	public boolean hasResourceExists(String aResourceName) {
-		HashMap<String,Properties> resourceName2PropertiesMap  = dataSource2ResourceListMap.get(datasourceName) ;
-		return (resourceName2PropertiesMap != null && resourceName2PropertiesMap.containsKey(aResourceName)) ;
- 	}
-
-	public Properties getProperties(String aResourceName) {
-		Properties ret = null ;
-		HashMap<String,Properties> resourceName2PropertiesMap  = dataSource2ResourceListMap.get(datasourceName) ;
-		if (resourceName2PropertiesMap != null) {
-			ret =  resourceName2PropertiesMap.get(aResourceName) ;
-		}
-		return ret ;
- 	}
-	
-	public String getHadoopSecurityAuthentication() {
-		Properties repoParam = null ;
-		String ret = null;
-		
-		HashMap<String,Properties> resourceName2PropertiesMap  = dataSource2ResourceListMap.get(this.getDatasourceName()) ;
-		
-		if ( resourceName2PropertiesMap != null) {
-			repoParam=resourceName2PropertiesMap.get(DEFAULT_RESOURCE_NAME);
-		}
-		
-		if ( repoParam != null ) {
-			ret = (String)repoParam.get(HADOOP_SECURITY_AUTHENTICATION);
-		}
-		return ret;
- 	}
-	
-	public String getUserName() {
-		return userName;
-	}
-
-	public String getKeyTabFile() {
-		return keyTabFile;
-	}
-
-	public String getPassword() {
-		return password;
-	}
-
-	public HadoopClassLoader getClassLoader() {
-		return classLoader;
-	}
-
-	public boolean isKerberosAuthentication() {
-		return isKerberosAuth;
-	}
-
-  
-	
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/com/xasecure/hadoop/client/exceptions/HadoopException.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hadoop/client/exceptions/HadoopException.java b/lookup-client/src/main/java/com/xasecure/hadoop/client/exceptions/HadoopException.java
deleted file mode 100644
index 3cb838e..0000000
--- a/lookup-client/src/main/java/com/xasecure/hadoop/client/exceptions/HadoopException.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package com.xasecure.hadoop.client.exceptions;
-
-import java.util.HashMap;
-
-public class HadoopException extends RuntimeException {
-
-	private static final long serialVersionUID = 8872734935128535649L;
-	
-	public HashMap<String, Object> responseData;
-
-	public HadoopException() {
-		super();
-		// TODO Auto-generated constructor stub
-	}
-
-	public HadoopException(String message, Throwable cause) {
-		super(message, cause);
-		// TODO Auto-generated constructor stub
-	}
-
-	public HadoopException(String message) {
-		super(message);
-		// TODO Auto-generated constructor stub
-	}
-
-	public HadoopException(Throwable cause) {
-		super(cause);
-		// TODO Auto-generated constructor stub
-	}
-
-	public void generateResponseDataMap(boolean connectivityStatus,
-			String message, String description, Long objectId, String fieldName) {
-		responseData = new HashMap<String, Object>();
-		responseData.put("connectivityStatus", connectivityStatus);
-		responseData.put("message", message);
-		responseData.put("description", description);
-		responseData.put("objectId", objectId);
-		responseData.put("fieldName", fieldName);
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClient.java b/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClient.java
deleted file mode 100644
index c1d18d7..0000000
--- a/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClient.java
+++ /dev/null
@@ -1,403 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package com.xasecure.hbase.client;
-
-import java.io.IOException;
-import java.security.PrivilegedAction;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map.Entry;
-
-import javax.security.auth.Subject;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.MasterNotRunningException;
-import org.apache.hadoop.hbase.ZooKeeperConnectionException;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-
-import com.google.protobuf.ServiceException;
-import com.xasecure.hadoop.client.config.BaseClient;
-import com.xasecure.hadoop.client.exceptions.HadoopException;
-
-public class HBaseClient extends BaseClient {
-
-	private static final Log LOG = LogFactory.getLog(HBaseClient.class) ;
-
-	private Subject subj = null ;
-
-	public HBaseClient(String dataSource) {
-		super(dataSource) ;		
-	}
-
-	public HBaseClient(String dataSource,HashMap<String,String> connectionProp) {
-		super(dataSource, addDefaultHBaseProp(connectionProp)) ;		
-	}
-	
-	//TODO: temporary solution - to be added to the UI for HBase 
-	private static HashMap<String,String> addDefaultHBaseProp(HashMap<String,String> connectionProp) {
-		if (connectionProp != null) {
-			String param = "zookeeper.znode.parent" ;
-			String unsecuredPath = "/hbase-unsecure" ;
-			String authParam = "hadoop.security.authorization" ;
-			
-			String ret = connectionProp.get(param) ;
-			LOG.info("HBase connection has [" + param + "] with value [" + ret + "]");
-			if (ret == null) {
-				ret = connectionProp.get(authParam) ;
-				LOG.info("HBase connection has [" + authParam + "] with value [" + ret + "]");
-				if (ret != null && ret.trim().equalsIgnoreCase("false")) {
-					LOG.info("HBase connection is resetting [" + param + "] with value [" + unsecuredPath + "]");
-					connectionProp.put(param, unsecuredPath) ;
-				}
-			}
-		}
-		return connectionProp;
-	}
-	
-	public static HashMap<String, Object> testConnection(String dataSource,
-			HashMap<String, String> connectionProperties) {
-
-		HashMap<String, Object> responseData = new HashMap<String, Object>();
-		final String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-		boolean connectivityStatus = false;
-
-		HBaseClient connectionObj = new HBaseClient(dataSource,
-				connectionProperties);
-		if (connectionObj != null) {
-			connectivityStatus = connectionObj.getHBaseStatus();
-		}
-
-		if (connectivityStatus) {
-			String successMsg = "TestConnection Successful";
-			generateResponseDataMap(connectivityStatus, successMsg, successMsg,
-					null, null, responseData);
-		} else {
-			String failureMsg = "Unable to retrieve any databases using given parameters.";
-			generateResponseDataMap(connectivityStatus, failureMsg, failureMsg
-					+ errMsg, null, null, responseData);
-		}
-		return responseData;
-	}
-	
-	public boolean getHBaseStatus() {
-		boolean hbaseStatus = false;
-		subj = getLoginSubject();
-		final String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-		if (subj != null) {
-			ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
-			try {
-				Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
-	
-				hbaseStatus = Subject.doAs(subj, new PrivilegedAction<Boolean>() {
-					@Override
-					public Boolean run() {
-						Boolean hbaseStatus1 = false;
-						try {
-						    LOG.info("getHBaseStatus: creating default Hbase configuration");
-							Configuration conf = HBaseConfiguration.create() ;					
-							LOG.info("getHBaseStatus: setting config values from client");
-							setClientConfigValues(conf);						
-						    LOG.info("getHBaseStatus: checking HbaseAvailability with the new config");
-							HBaseAdmin.checkHBaseAvailable(conf);					
-						    LOG.info("getHBaseStatus: no exception: HbaseAvailability true");
-							hbaseStatus1 = true;
-						} catch (ZooKeeperConnectionException zce) {
-							String msgDesc = "getHBaseStatus: Unable to connect to `ZooKeeper` "
-									+ "using given config parameters.";
-							HadoopException hdpException = new HadoopException(msgDesc, zce);
-							hdpException.generateResponseDataMap(false, getMessage(zce),
-									msgDesc + errMsg, null, null);
-							throw hdpException;
-							
-						} catch (MasterNotRunningException mnre) {
-							String msgDesc = "getHBaseStatus: Looks like `Master` is not running, "
-									+ "so couldn't check that running HBase is available or not, "
-									+ "Please try again later.";
-							HadoopException hdpException = new HadoopException(
-									msgDesc, mnre);
-							hdpException.generateResponseDataMap(false,
-									getMessage(mnre), msgDesc + errMsg,
-									null, null);
-							throw hdpException;
-
-						} catch (ServiceException se) {
-							String msgDesc = "getHBaseStatus: Unable to check availability of "
-									+ "Hbase environment [" + getConfigHolder().getDatasourceName() + "].";
-							HadoopException hdpException = new HadoopException(msgDesc, se);
-							hdpException.generateResponseDataMap(false, getMessage(se),
-									msgDesc + errMsg, null, null);
-							throw hdpException;
-							
-						} catch(IOException io) {
-							String msgDesc = "getHBaseStatus: Unable to check availability of"
-									+ " Hbase environment [" + getConfigHolder().getDatasourceName() + "].";
-							HadoopException hdpException = new HadoopException(msgDesc, io);
-							hdpException.generateResponseDataMap(false, getMessage(io),
-									msgDesc + errMsg, null, null);
-							throw hdpException;
-							
-						}  catch (Throwable e) {
-							String msgDesc = "getHBaseStatus: Unable to check availability of"
-									+ " Hbase environment [" + getConfigHolder().getDatasourceName() + "].";
-							LOG.error(msgDesc);
-							hbaseStatus1 = false;
-							HadoopException hdpException = new HadoopException(msgDesc, e);
-							hdpException.generateResponseDataMap(false, getMessage(e),
-									msgDesc + errMsg, null, null);
-							throw hdpException;
-						}
-						return hbaseStatus1;
-					}
-				}) ;
-			} catch (SecurityException se) {
-				String msgDesc = "getHBaseStatus: Unable to connect to HBase Server instance, "
-						+ "current thread might not be able set the context ClassLoader.";
-				HadoopException hdpException = new HadoopException(msgDesc, se);
-				hdpException.generateResponseDataMap(false, getMessage(se),
-						msgDesc + errMsg, null, null);
-				throw hdpException;
-			} finally {
-				Thread.currentThread().setContextClassLoader(prevCl);
-			}
-		} else {
-			LOG.error("getHBaseStatus: secure login not done, subject is null");
-		}
-		
-		return hbaseStatus;
-	}
-	
-	private void setClientConfigValues(Configuration conf) {
-		if (this.connectionProperties == null) return;
-		Iterator<Entry<String, String>> i =  this.connectionProperties.entrySet().iterator();
-		while (i.hasNext()) {
-			Entry<String, String> e = i.next();
-			String v = conf.get(e.getKey());
-			if (v != null && !v.equalsIgnoreCase(e.getValue())) {
-				conf.set(e.getKey(), e.getValue());
-			}
-		}		
-	}
-
-	public List<String> getTableList(final String tableNameMatching) {
-		List<String> ret = null ;
-		final String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-		
-		subj = getLoginSubject();
-		
-		if (subj != null) {
-			ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
-			try {
-				Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
-	
-				ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() {
-		
-					@Override
-					public List<String> run() {
-						
-						List<String> tableList = new ArrayList<String>() ;
-						HBaseAdmin admin = null ;
-						try {
-							
-							Configuration conf = HBaseConfiguration.create() ;
-							admin = new HBaseAdmin(conf) ;
-							for (HTableDescriptor htd : admin.listTables(tableNameMatching)) {
-								tableList.add(htd.getNameAsString()) ;
-							}
-						} catch (ZooKeeperConnectionException zce) {
-							String msgDesc = "getTableList: Unable to connect to `ZooKeeper` "
-									+ "using given config parameters.";
-							HadoopException hdpException = new HadoopException(msgDesc, zce);
-							hdpException.generateResponseDataMap(false, getMessage(zce),
-									msgDesc + errMsg, null, null);
-							throw hdpException;
-							
-						} catch (MasterNotRunningException mnre) {
-							String msgDesc = "getTableList: Looks like `Master` is not running, "
-									+ "so couldn't check that running HBase is available or not, "
-									+ "Please try again later.";
-							HadoopException hdpException = new HadoopException(
-									msgDesc, mnre);
-							hdpException.generateResponseDataMap(false,
-									getMessage(mnre), msgDesc + errMsg,
-									null, null);
-							throw hdpException;
-
-						}  catch(IOException io) {
-							String msgDesc = "Unable to get HBase table List for [repository:"
-									+ getConfigHolder().getDatasourceName() + ",table-match:" 
-									+ tableNameMatching + "].";
-							HadoopException hdpException = new HadoopException(msgDesc, io);
-							hdpException.generateResponseDataMap(false, getMessage(io),
-									msgDesc + errMsg, null, null);
-							throw hdpException;
-						}   catch (Throwable e) {
-							String msgDesc = "Unable to get HBase table List for [repository:"
-									+ getConfigHolder().getDatasourceName() + ",table-match:" 
-									+ tableNameMatching + "].";
-							LOG.error(msgDesc);
-							HadoopException hdpException = new HadoopException(msgDesc, e);
-							hdpException.generateResponseDataMap(false, getMessage(e),
-									msgDesc + errMsg, null, null);
-							throw hdpException;
-						}
-						finally {
-							if (admin != null) {
-								try {
-									admin.close() ;
-								} catch (IOException e) {
-									LOG.error("Unable to close HBase connection [" + getConfigHolder().getDatasourceName() + "]", e);
-								}
-							}
-						}
-						return tableList ;
-					}
-					
-				}) ;
-			}
-			finally {
-				Thread.currentThread().setContextClassLoader(prevCl);
-			}
-		}
-		return ret ;
-	}
-	
-	
-	public List<String> getColumnFamilyList(final String tableName, final String columnFamilyMatching) {
-		List<String> ret = null ;
-		final String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-		
-		subj = getLoginSubject();
-		if (subj != null) {
-			ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
-			try {
-				Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
-				
-				ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() {
-		
-					@Override
-					public List<String> run() {
-						
-						List<String> colfList = new ArrayList<String>() ;
-						HBaseAdmin admin = null ;
-						try {
-							Configuration conf = HBaseConfiguration.create();
-							admin = new HBaseAdmin(conf) ;
-							HTableDescriptor htd = admin.getTableDescriptor(tableName.getBytes()) ;
-							if (htd != null) {
-								for (HColumnDescriptor hcd : htd.getColumnFamilies()) {
-									String colf = hcd.getNameAsString() ;
-									if (colf.matches(columnFamilyMatching)) {
-										if (!colfList.contains(colf)) {
-											colfList.add(colf) ;
-										}
-									}
-								}
-							}
-						}  catch (ZooKeeperConnectionException zce) {
-							String msgDesc = "getColumnFamilyList: Unable to connect to `ZooKeeper` "
-									+ "using given config parameters.";
-							HadoopException hdpException = new HadoopException(msgDesc, zce);
-							hdpException.generateResponseDataMap(false, getMessage(zce),
-									msgDesc + errMsg, null, null);
-							throw hdpException;
-							
-						} catch (MasterNotRunningException mnre) {
-							String msgDesc = "getColumnFamilyList: Looks like `Master` is not running, "
-									+ "so couldn't check that running HBase is available or not, "
-									+ "Please try again later.";
-							HadoopException hdpException = new HadoopException(
-									msgDesc, mnre);
-							hdpException.generateResponseDataMap(false,
-									getMessage(mnre), msgDesc + errMsg,
-									null, null);
-							throw hdpException;
-
-						}  catch(IOException io) {
-							String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for "
-									+ "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tableName
-									+ ", table-match:" + columnFamilyMatching + "], "
-									+ "current thread might not be able set the context ClassLoader.";
-							HadoopException hdpException = new HadoopException(msgDesc, io);
-							hdpException.generateResponseDataMap(false, getMessage(io),
-									msgDesc + errMsg, null, null);
-							throw hdpException; 
-						} catch (SecurityException se) {
-								String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for "
-										+ "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tableName
-										+ ", table-match:" + columnFamilyMatching + "], "
-										+ "current thread might not be able set the context ClassLoader.";
-								HadoopException hdpException = new HadoopException(msgDesc, se);
-								hdpException.generateResponseDataMap(false, getMessage(se),
-										msgDesc + errMsg, null, null);
-								throw hdpException;							
-							
-						}  catch (Throwable e) {
-							String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for "
-									+ "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tableName
-									+ ", table-match:" + columnFamilyMatching + "], "
-									+ "current thread might not be able set the context ClassLoader.";
-							LOG.error(msgDesc);
-							HadoopException hdpException = new HadoopException(msgDesc, e);
-							hdpException.generateResponseDataMap(false, getMessage(e),
-									msgDesc + errMsg, null, null);
-							throw hdpException;
-						}
-						finally {
-							if (admin != null) {
-								try {
-									admin.close() ;
-								} catch (IOException e) {
-									LOG.error("Unable to close HBase connection [" + getConfigHolder().getDatasourceName() + "]", e);
-								}
-							}
-						}
-						return colfList ;
-					}
-					
-				}) ;
-			} catch (SecurityException se) {
-				String msgDesc = "getColumnFamilyList: Unable to connect to HBase Server instance, "
-						+ "current thread might not be able set the context ClassLoader.";
-				HadoopException hdpException = new HadoopException(msgDesc, se);
-				hdpException.generateResponseDataMap(false, getMessage(se),
-						msgDesc + errMsg, null, null);
-				throw hdpException;
-			} finally {
-				Thread.currentThread().setContextClassLoader(prevCl);
-			}
-		}
-		return ret ;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClientTester.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClientTester.java b/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClientTester.java
deleted file mode 100644
index 84f06b6..0000000
--- a/lookup-client/src/main/java/com/xasecure/hbase/client/HBaseClientTester.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package com.xasecure.hbase.client;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Properties;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-public class HBaseClientTester {
-
-	private static final Log LOG = LogFactory.getLog(HBaseClientTester.class) ;
-
-	public static void main(String[] args) throws Throwable {
-
-		HBaseClient hc = null;
-
-		if (args.length <= 2) {
-			System.err.println("USAGE: java " + HBaseClientTester.class.getName() + " dataSourceName propertyFile <tableName> <columnFamilyName>");
-			System.exit(1);
-		}
-		
-		LOG.info("Starting ...");
-
-		Properties conf = new Properties();
-		
-		conf.load(HBaseClientTester.class.getClassLoader().getResourceAsStream(args[1]));
-
-		HashMap<String, String> prop = new HashMap<String, String>();
-		for (Object key : conf.keySet()) {
-			Object val = conf.get(key);
-			prop.put((String) key, (String) val);
-		}
-
-		hc = new HBaseClient(args[0], prop);
-
-		if (args.length == 3) {
-			List<String> dbList = hc.getTableList(args[2]);
-			if (dbList.size() == 0) {
-				System.out.println("No tables found with db filter [" + args[2] + "]");
-			} else {
-				for (String str : dbList) {
-					System.out.println("table: " + str);
-				}
-			}
-		} else if (args.length == 4) {
-			List<String> tableList = hc.getColumnFamilyList(args[2], args[3]);
-			if (tableList.size() == 0) {
-				System.out.println("No column families found under table [" + args[2] + "] with columnfamily filter [" + args[3] + "]");
-			} else {
-				for (String str : tableList) {
-					System.out.println("ColumnFamily: " + str);
-				}
-			}
-		}
-
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/com/xasecure/hive/client/HiveClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hive/client/HiveClient.java b/lookup-client/src/main/java/com/xasecure/hive/client/HiveClient.java
deleted file mode 100644
index ce586a5..0000000
--- a/lookup-client/src/main/java/com/xasecure/hive/client/HiveClient.java
+++ /dev/null
@@ -1,511 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package com.xasecure.hive.client;
-
-import java.io.Closeable;
-import java.security.PrivilegedAction;
-import java.sql.Connection;
-import java.sql.Driver;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.SQLTimeoutException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Properties;
-
-import javax.security.auth.Subject;
-
-import org.apache.commons.io.FilenameUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import com.xasecure.hadoop.client.config.BaseClient;
-import com.xasecure.hadoop.client.exceptions.HadoopException;
-
-public class HiveClient extends BaseClient implements Closeable {
-
-	private static final Log LOG = LogFactory.getLog(HiveClient.class) ;
-	
-	Connection con = null ;
-	boolean isKerberosAuth=false;
-	
-
-	public HiveClient(String dataSource) {
-		super(dataSource) ;
-		initHive() ;
-	}
-	
-	public HiveClient(String dataSource,HashMap<String,String> connectionProp) {
-		super(dataSource,connectionProp) ;
-		initHive() ;
-	}
-	
-	public void initHive() {
-		isKerberosAuth = getConfigHolder().isKerberosAuthentication();
-		if (isKerberosAuth) {
-			LOG.info("Secured Mode: JDBC Connection done with preAuthenticated Subject");
-			Subject.doAs(getLoginSubject(), new PrivilegedAction<Object>() {
-				public Object run() {
-					initConnection();
-					return null;
-				}
-			}) ;				
-		}
-		else {
-			LOG.info("Since Password is NOT provided, Trying to use UnSecure client with username and password");
-			final String userName = getConfigHolder().getUserName() ;
-			final String password = getConfigHolder().getPassword() ;
-			Subject.doAs(getLoginSubject(), new PrivilegedAction<Object>() {
-				public Object run() {
-					initConnection(userName,password);
-					return null;
-				}
-			}) ;	
-		}
-	}
-	
-	public List<String> getDatabaseList(String databaseMatching){
-	 	final String dbMatching=databaseMatching;
-		List<String> dblist = Subject.doAs(getLoginSubject(), new PrivilegedAction<List<String>>() {
-			public List<String>  run() {
-				return getDBList(dbMatching);
-			}
-		}) ;
-		return dblist;
-	}
-		
-	private List<String> getDBList(String databaseMatching) {
-		List<String> ret = new ArrayList<String>() ;
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-		if (con != null) {
-			Statement stat =  null ;
-			ResultSet rs = null ;
-			String sql = "show databases" ;
-			if (databaseMatching != null && ! databaseMatching.isEmpty()) {
-				sql = sql + " like \"" + databaseMatching  + "\"" ;
-			}
-			try {
-				stat =  con.createStatement()  ;
-				rs = stat.executeQuery(sql) ;
-				while (rs.next()) {
-					ret.add(rs.getString(1)) ;
-				}
-			} catch (SQLTimeoutException sqlt) {
-				String msgDesc = "Time Out, Unable to execute SQL [" + sql
-						+ "].";
-				HadoopException hdpException = new HadoopException(msgDesc,
-						sqlt);
-				hdpException.generateResponseDataMap(false, getMessage(sqlt),
-						msgDesc + errMsg, null, null);
-				throw hdpException;
-			} catch (SQLException sqle) {
-				String msgDesc = "Unable to execute SQL [" + sql + "].";
-				HadoopException hdpException = new HadoopException(msgDesc,
-						sqle);
-				hdpException.generateResponseDataMap(false, getMessage(sqle),
-						msgDesc + errMsg, null, null);
-				throw hdpException;
-			} finally {
-				close(rs) ;
-				close(stat) ;
-			}
-			
-		}
-		return ret ;
-	}
-	
-	public List<String> getTableList(String database, String tableNameMatching){
-		final String db=database;
-		final String tblNameMatching=tableNameMatching;
-		List<String> tableList = Subject.doAs(getLoginSubject(), new PrivilegedAction<List<String>>() {
-			public List<String>  run() {
-				return getTblList(db,tblNameMatching);
-			}
-		}) ;
-		return tableList;
-	}
-
-	public List<String> getTblList(String database, String tableNameMatching) {
-		List<String> ret = new ArrayList<String>() ;
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-		if (con != null) {
-			Statement stat =  null ;
-			ResultSet rs = null ;
-			
-			String sql = null ;
-			
-			try {
-				sql = "use " + database;
-				
-				try {
-					stat = con.createStatement() ;
-					stat.execute(sql) ;
-				}
-				finally {
-					close(stat) ;
-				}
-				
-				sql = "show tables " ;
-				if (tableNameMatching != null && ! tableNameMatching.isEmpty()) {
-					sql = sql + " like \"" + tableNameMatching  + "\"" ;
-				}
-				stat =  con.createStatement()  ;
-				rs = stat.executeQuery(sql) ;
-				while (rs.next()) {
-					ret.add(rs.getString(1)) ;
-				}
-			} catch (SQLTimeoutException sqlt) {
-				String msgDesc = "Time Out, Unable to execute SQL [" + sql
-						+ "].";
-				HadoopException hdpException = new HadoopException(msgDesc,
-						sqlt);
-				hdpException.generateResponseDataMap(false, getMessage(sqlt),
-						msgDesc + errMsg, null, null);
-				throw hdpException;
-			} catch (SQLException sqle) {
-				String msgDesc = "Unable to execute SQL [" + sql + "].";
-				HadoopException hdpException = new HadoopException(msgDesc,
-						sqle);
-				hdpException.generateResponseDataMap(false, getMessage(sqle),
-						msgDesc + errMsg, null, null);
-				throw hdpException;
-			} finally {
-				close(rs) ;
-				close(stat) ;
-			}
-			
-		}
-		return ret ;
-	}
-
-	public List<String> getViewList(String database, String viewNameMatching) {
-		List<String> ret = null ;
-		return ret ;
-	}
-
-	public List<String> getUDFList(String database, String udfMatching) {
-		List<String> ret = null ;
-		return ret ;
-	}
-	
-	public List<String> getColumnList(String database, String tableName, String columnNameMatching) {
-		final String db=database;
-		final String tblName=tableName;
-		final String clmNameMatching=columnNameMatching;
-		List<String> columnList = Subject.doAs(getLoginSubject(), new PrivilegedAction<List<String>>() {
-			public List<String>  run() {
-					return getClmList(db,tblName,clmNameMatching);
-				}
-			}) ;
-		return columnList;
-	}
-	
-	public List<String> getClmList(String database, String tableName, String columnNameMatching) {
-		List<String> ret = new ArrayList<String>() ;
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-		if (con != null) {
-			
-			String columnNameMatchingRegEx = null ;
-			
-			if (columnNameMatching != null && ! columnNameMatching.isEmpty()) {
-				columnNameMatchingRegEx = columnNameMatching ;
-			}
-			
-			Statement stat =  null ;
-			ResultSet rs = null ;
-			
-			String sql = null ;
-			
-			try {
-				sql = "use " + database;
-				
-				try {
-					stat = con.createStatement() ;
-					stat.execute(sql) ;
-				}
-				finally {
-					close(stat) ;
-				}
-				
-				sql = "describe  " + tableName ;
-				stat =  con.createStatement()  ;
-				rs = stat.executeQuery(sql) ;
-				while (rs.next()) {
-					String columnName = rs.getString(1) ;
-					if (columnNameMatchingRegEx == null) {
-						ret.add(columnName) ;
-					}
-					else if (FilenameUtils.wildcardMatch(columnName,columnNameMatchingRegEx)) {
-						ret.add(columnName) ;
-					}
-				}
-			} catch (SQLTimeoutException sqlt) {
-				String msgDesc = "Time Out, Unable to execute SQL [" + sql
-						+ "].";
-				HadoopException hdpException = new HadoopException(msgDesc,
-						sqlt);
-				hdpException.generateResponseDataMap(false, getMessage(sqlt),
-						msgDesc + errMsg, null, null);
-				throw hdpException;
-			} catch (SQLException sqle) {
-				String msgDesc = "Unable to execute SQL [" + sql + "].";
-				HadoopException hdpException = new HadoopException(msgDesc,
-						sqle);
-				hdpException.generateResponseDataMap(false, getMessage(sqle),
-						msgDesc + errMsg, null, null);
-				throw hdpException;
-			} finally {
-				close(rs) ;
-				close(stat) ;
-			}
-			
-		}
-		return ret ;
-	}
-	
-	
-	public void close() {
-		Subject.doAs(getLoginSubject(), new PrivilegedAction<Void>(){
-			public Void run() {
-				close(con) ;
-				return null;
-			}
-		});
-	}
-	
-	private void close(Statement aStat) {
-		try {
-			if (aStat != null) {
-				aStat.close();
-			}
-		} catch (SQLException e) {
-			LOG.error("Unable to close SQL statement", e);
-		}
-	}
-
-	private void close(ResultSet aResultSet) {
-		try {
-			if (aResultSet != null) {
-				aResultSet.close();
-			}
-		} catch (SQLException e) {
-			LOG.error("Unable to close ResultSet", e);
-		}
-	}
-
-	private void close(Connection aCon) {
-		try {
-			if (aCon != null) {
-				aCon.close();
-			}
-		} catch (SQLException e) {
-			LOG.error("Unable to close SQL Connection", e);
-		}
-	}
-
-	private void initConnection() {
-		initConnection(null,null) ;
-	}
-
-	
-	private void initConnection(String userName, String password) {
-	
-		Properties prop = getConfigHolder().getXASecureSection() ;
-		String driverClassName = prop.getProperty("jdbc.driverClassName") ;
-		String url =  prop.getProperty("jdbc.url") ;	
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-	
-		if (driverClassName != null) {
-			try {
-				Driver driver = (Driver)Class.forName(driverClassName).newInstance() ;
-				DriverManager.registerDriver(driver);
-			} catch (SQLException e) {
-				String msgDesc = "initConnection: Caught SQLException while registering "
-						+ "Hive driver, so Unable to connect to Hive Thrift Server instance.";
-				HadoopException hdpException = new HadoopException(msgDesc, e);
-				hdpException.generateResponseDataMap(false, getMessage(e),
-						msgDesc + errMsg, null, null);
-				throw hdpException;
-			} catch (IllegalAccessException ilae) {
-				String msgDesc = "initConnection: Class or its nullary constructor might not accessible."
-						+ "So unable to initiate connection to hive thrift server instance.";
-				HadoopException hdpException = new HadoopException(msgDesc, ilae);
-				hdpException.generateResponseDataMap(false, getMessage(ilae),
-						msgDesc + errMsg, null, null);
-				throw hdpException;
-			} catch (InstantiationException ie) {
-				String msgDesc = "initConnection: Class may not have its nullary constructor or "
-						+ "may be the instantiation fails for some other reason."
-						+ "So unable to initiate connection to hive thrift server instance.";
-				HadoopException hdpException = new HadoopException(msgDesc, ie);
-				hdpException.generateResponseDataMap(false, getMessage(ie),
-						msgDesc + errMsg, null, null);
-				throw hdpException;
-				
-			} catch (ExceptionInInitializerError eie) {
-				String msgDesc = "initConnection: Got ExceptionInInitializerError, "
-						+ "The initialization provoked by this method fails."
-						+ "So unable to initiate connection to hive thrift server instance.";
-				HadoopException hdpException = new HadoopException(msgDesc, eie);
-				hdpException.generateResponseDataMap(false, getMessage(eie),
-						msgDesc + errMsg, null, null);
-				throw hdpException;
-			} catch (SecurityException se) {
-				String msgDesc = "initConnection: unable to initiate connection to hive thrift server instance,"
-						+ " The caller's class loader is not the same as or an ancestor "
-						+ "of the class loader for the current class and invocation of "
-						+ "s.checkPackageAccess() denies access to the package of this class.";
-				HadoopException hdpException = new HadoopException(msgDesc, se);
-				hdpException.generateResponseDataMap(false, getMessage(se),
-						msgDesc + errMsg, null, null);
-				throw hdpException;
-			} catch (Throwable t) {
-				String msgDesc = "initConnection: Unable to connect to Hive Thrift Server instance, "
-						+ "please provide valid value of field : {jdbc.driverClassName}.";
-				HadoopException hdpException = new HadoopException(msgDesc, t);
-				hdpException.generateResponseDataMap(false, getMessage(t),
-						msgDesc + errMsg, null, "jdbc.driverClassName");
-				throw hdpException;
-			}
-		}
-		
-		try {
-			
-			if (userName == null && password == null) {
-				con = DriverManager.getConnection(url) ;
-			}
-			else {			
-				con = DriverManager.getConnection(url, userName, password) ;
-			}
-		
-		} catch (SQLException e) {
-			String msgDesc = "Unable to connect to Hive Thrift Server instance.";
-			HadoopException hdpException = new HadoopException(msgDesc, e);
-			hdpException.generateResponseDataMap(false, getMessage(e), msgDesc
-					+ errMsg, null, null);
-			throw hdpException;
-		} catch (SecurityException se) {
-			String msgDesc = "Unable to connect to Hive Thrift Server instance.";
-			HadoopException hdpException = new HadoopException(msgDesc, se);
-			hdpException.generateResponseDataMap(false, getMessage(se), msgDesc
-					+ errMsg, null, null);
-			throw hdpException;
-		}
-	}
-
-	
-	public static void main(String[] args) {
-		
-		HiveClient hc = null ;
-		
-		if (args.length == 0) {
-			System.err.println("USAGE: java " + HiveClient.class.getName() + " dataSourceName <databaseName> <tableName> <columnName>") ;
-			System.exit(1) ;
-		}
-		
-		try {
-			hc = new HiveClient(args[0]) ;
-			
-			if (args.length == 2) {
-				List<String> dbList = hc.getDatabaseList(args[1]) ;
-				if (dbList.size() == 0) {
-					System.out.println("No database found with db filter [" + args[1] + "]") ;
-				}
-				else {
-					for (String str : dbList ) {
-						System.out.println("database: " + str ) ;
-					}
-				}
-			}
-			else if (args.length == 3) {
-				List<String> tableList = hc.getTableList(args[1], args[2]) ;
-				if (tableList.size() == 0) {
-					System.out.println("No tables found under database[" + args[1] + "] with table filter [" + args[2] + "]") ;
-				}
-				else {
-					for(String str : tableList) {
-						System.out.println("Table: " + str) ;
-					}
-				}
-			}
-			else if (args.length == 4) {
-				List<String> columnList = hc.getColumnList(args[1], args[2], args[3]) ;
-				if (columnList.size() == 0) {
-					System.out.println("No columns found for db:" + args[1] + ", table: [" + args[2] + "], with column filter [" + args[3] + "]") ;
-				}
-				else {
-					for (String str : columnList ) {
-						System.out.println("Column: " + str) ;
-					}
-				}
-			}
-			
-		}
-		finally {
-			if (hc != null) {
-				hc.close();
-			}
-		}	
-	}
-
-	public static HashMap<String, Object> testConnection(String dataSource,
-			HashMap<String, String> connectionProperties) {
-
-		HashMap<String, Object> responseData = new HashMap<String, Object>();
-		boolean connectivityStatus = false;
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-
-		HiveClient connectionObj = new HiveClient(dataSource,
-				connectionProperties);
-		if (connectionObj != null) {
-		
-			List<String> testResult = connectionObj.getDatabaseList("*");
-			if (testResult != null && testResult.size() != 0) {
-				connectivityStatus = true;
-			}
-		}
-		if (connectivityStatus) {
-			String successMsg = "TestConnection Successful";
-			generateResponseDataMap(connectivityStatus, successMsg, successMsg,
-					null, null, responseData);
-		} else {
-			String failureMsg = "Unable to retrieve any databases using given parameters.";
-			generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + errMsg,
-					null, null, responseData);
-		}
-		
-		connectionObj.close();
-		return responseData;
-	}
-	
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/com/xasecure/hive/client/HiveClientTester.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/hive/client/HiveClientTester.java b/lookup-client/src/main/java/com/xasecure/hive/client/HiveClientTester.java
deleted file mode 100644
index bf390f5..0000000
--- a/lookup-client/src/main/java/com/xasecure/hive/client/HiveClientTester.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package com.xasecure.hive.client;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Properties;
-
-public class HiveClientTester  {
-
-	public static void main(String[] args) throws Throwable {
-		
-		HiveClient hc = null ;
-		
-		if (args.length <= 2) {
-			System.err.println("USAGE: java " + HiveClientTester.class.getName() + " dataSourceName propertyFile <databaseName> <tableName> <columnName>") ;
-			System.exit(1) ;
-		}
-		
-		
-		try {
-			
-			Properties conf = new Properties() ;
-			conf.load(HiveClientTester.class.getClassLoader().getResourceAsStream(args[1]));
-			
-			HashMap<String,String> prop = new HashMap<String,String>() ;
-			for(Object key : conf.keySet()) {
-				Object val = conf.get(key) ;
-				prop.put((String)key, (String)val) ;
-			}
-
-			
-			hc = new HiveClient(args[0], prop) ;
-			
-			
-			if (args.length == 3) {
-				List<String> dbList = hc.getDatabaseList(args[2]) ;
-				if (dbList.size() == 0) {
-					System.out.println("No database found with db filter [" + args[2] + "]") ;
-				}
-				else {
-					for (String str : dbList ) {
-						System.out.println("database: " + str ) ;
-					}
-				}
-			}
-			else if (args.length == 4) {
-				List<String> tableList = hc.getTableList(args[2], args[3]) ;
-				if (tableList.size() == 0) {
-					System.out.println("No tables found under database[" + args[2] + "] with table filter [" + args[3] + "]") ;
-				}
-				else {
-					for(String str : tableList) {
-						System.out.println("Table: " + str) ;
-					}
-				}
-			}
-			else if (args.length == 5) {
-				List<String> columnList = hc.getColumnList(args[2], args[3], args[4]) ;
-				if (columnList.size() == 0) {
-					System.out.println("No columns found for db:" + args[2] + ", table: [" + args[3] + "], with column filter [" + args[4] + "]") ;
-				}
-				else {
-					for (String str : columnList ) {
-						System.out.println("Column: " + str) ;
-					}
-				}
-			}
-			
-		}
-		finally {
-			if (hc != null) {
-				hc.close();
-			}
-		}
-		
-	}
-	
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/com/xasecure/knox/client/KnoxClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/knox/client/KnoxClient.java b/lookup-client/src/main/java/com/xasecure/knox/client/KnoxClient.java
deleted file mode 100644
index e9825de..0000000
--- a/lookup-client/src/main/java/com/xasecure/knox/client/KnoxClient.java
+++ /dev/null
@@ -1,387 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.xasecure.knox.client;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.Callable;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.codehaus.jackson.JsonNode;
-import org.codehaus.jackson.map.ObjectMapper;
-
-import com.sun.jersey.api.client.Client;
-import com.sun.jersey.api.client.ClientResponse;
-import com.sun.jersey.api.client.WebResource;
-import com.sun.jersey.api.client.filter.HTTPBasicAuthFilter;
-import com.xasecure.hadoop.client.config.BaseClient;
-import com.xasecure.hadoop.client.exceptions.HadoopException;
-
-public class KnoxClient {
-
-	private static final String EXPECTED_MIME_TYPE = "application/json";
-	private static final Log LOG = LogFactory.getLog(KnoxClient.class);
-
-	private String knoxUrl;
-	private String userName;
-	private String password;
-	
-	/*
-   Sample curl calls to Knox to discover topologies
-	 curl -ivk -u admin:admin-password https://localhost:8443/gateway/admin/api/v1/topologies
-	 curl -ivk -u admin:admin-password https://localhost:8443/gateway/admin/api/v1/topologies/admin
-	*/
-	
-	public KnoxClient(String knoxUrl, String userName, String password) {
-		LOG.debug("Constructed KnoxClient with knoxUrl: " + knoxUrl +
-				", userName: " + userName);
-		this.knoxUrl = knoxUrl;
-		this.userName = userName;
-		this.password = password;
-	}
-
-	public  List<String> getTopologyList(String topologyNameMatching) {
-		
-		// sample URI: https://hdp.example.com:8443/gateway/admin/api/v1/topologies
-		LOG.debug("Getting Knox topology list for topologyNameMatching : " +
-				topologyNameMatching);
-		List<String> topologyList = new ArrayList<String>();
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-		if ( topologyNameMatching == null ||  topologyNameMatching.trim().isEmpty()) {
-			topologyNameMatching = "";
-		}
-		try {
-
-			Client client = null;
-			ClientResponse response = null;
-
-			try {
-				client = Client.create();;
-				
-				client.addFilter(new HTTPBasicAuthFilter(userName, password));
-				WebResource webResource = client.resource(knoxUrl);
-				response = webResource.accept(EXPECTED_MIME_TYPE)
-					    .get(ClientResponse.class);
-				LOG.debug("Knox topology list response: " + response);
-				if (response != null) {
-
-					if (response.getStatus() == 200) {
-						String jsonString = response.getEntity(String.class);
-						LOG.debug("Knox topology list response JSON string: "+ jsonString);
-						
-						ObjectMapper objectMapper = new ObjectMapper();
-						
-						JsonNode rootNode = objectMapper.readTree(jsonString);
-						JsonNode topologyNode = rootNode.findValue("topology");
-						if (topologyNode == null) {
-							return topologyList;
-						}
-						Iterator<JsonNode> elements = topologyNode.getElements();
-						while (elements.hasNext()) {
-							JsonNode element = elements.next();
-							String topologyName = element.get("name").getValueAsText();
-							LOG.debug("Found Knox topologyName: " + topologyName);
-							if (topologyName.startsWith(topologyNameMatching)) {
-								topologyList.add(topologyName);
-							}
-						}
-					} else {
-						LOG.error("Got invalid  REST response from: "+ knoxUrl + ", responsStatus: " + response.getStatus());
-					}
-
-				} else {
-					String msgDesc = "Unable to get a valid response for "
-							+ "isFileChanged() call for KnoxUrl : [" + knoxUrl
-							+ "] - got null response.";
-					LOG.error(msgDesc);
-					HadoopException hdpException = new HadoopException(msgDesc);
-					hdpException.generateResponseDataMap(false, msgDesc,
-							msgDesc + errMsg, null, null);
-					throw hdpException;
-				}
-
-			} finally {
-				if (response != null) {
-					response.close();
-				}
-				if (client != null) {
-					client.destroy();
-				}
-			}
-		} catch (HadoopException he) {
-			throw he;
-		} catch (Throwable t) {
-			String msgDesc = "Exception on REST call to KnoxUrl : " + knoxUrl + ".";
-			HadoopException hdpException = new HadoopException(msgDesc, t);
-			LOG.error(msgDesc, t);
-
-			hdpException.generateResponseDataMap(false,
-					BaseClient.getMessage(t), msgDesc + errMsg, null, null);
-			throw hdpException;
-		} finally {
-		}
-		return topologyList;
-	}
-
-	
-	public List<String> getServiceList(String topologyName, String serviceNameMatching) {
-		
-		// sample URI: .../admin/api/v1/topologies/<topologyName>
-		
-		List<String> serviceList = new ArrayList<String>();
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-		if ( serviceNameMatching == null ||  serviceNameMatching.trim().isEmpty()) {
-			serviceNameMatching = "";
-		}
-		try {
-
-			Client client = null;
-			ClientResponse response = null;
-
-			try {
-				client = Client.create();;
-				
-				client.addFilter(new HTTPBasicAuthFilter(userName, password));
-				
-				WebResource webResource = client.resource(knoxUrl + "/" + topologyName);
-				
-				response = webResource.accept(EXPECTED_MIME_TYPE)
-					    .get(ClientResponse.class);
-				LOG.debug("Knox service lookup response: " + response);
-				if (response != null) {
-					
-					if (response.getStatus() == 200) {
-						String jsonString = response.getEntity(String.class);
-						LOG.debug("Knox service look up response JSON string: " + jsonString);
-						
-						ObjectMapper objectMapper = new ObjectMapper();
-						
-						JsonNode rootNode = objectMapper.readTree(jsonString);
-						JsonNode topologyNode = rootNode.findValue("topology");
-						JsonNode servicesNode = topologyNode.get("services");
-						Iterator<JsonNode> services = servicesNode.getElements();
-						while (services.hasNext()) {
-							JsonNode service = services.next();
-							String serviceName = service.get("role").getValueAsText();
-							LOG.debug("Knox serviceName: " + serviceName);
-							if (serviceName.startsWith(serviceNameMatching)) {
-								serviceList.add(serviceName);
-							}
-						}
-					} else {
-						LOG.error("Got invalid  REST response from: "+ knoxUrl + ", responsStatus: " + response.getStatus());
-					}
-
-				} else {
-					String msgDesc = "Unable to get a valid response for "
-							+ "isFileChanged() call for KnoxUrl : [" + knoxUrl
-							+ "] - got null response.";
-					LOG.error(msgDesc);
-					HadoopException hdpException = new HadoopException(msgDesc);
-					hdpException.generateResponseDataMap(false, msgDesc,
-							msgDesc + errMsg, null, null);
-					throw hdpException;
-				}
-
-			} finally {
-				if (response != null) {
-					response.close();
-				}
-				if (client != null) {
-					client.destroy();
-				}
-			}
-		} catch (HadoopException he) {
-			throw he;
-		} catch (Throwable t) {
-			String msgDesc = "Exception on REST call to KnoxUrl : " + knoxUrl + ".";
-			HadoopException hdpException = new HadoopException(msgDesc, t);
-			LOG.error(msgDesc, t);
-
-			hdpException.generateResponseDataMap(false,
-					BaseClient.getMessage(t), msgDesc + errMsg, null, null);
-			throw hdpException;
-
-		} finally {
-		}
-		return serviceList;
-	}
-
-	public static void main(String[] args) {
-
-		KnoxClient knoxClient = null;
-
-		if (args.length != 3) {
-			System.err.println("USAGE: java " + KnoxClient.class.getName()
-					+ " knoxUrl userName password [sslConfigFileName]");
-			System.exit(1);
-		}
-
-		try {
-			knoxClient = new KnoxClient(args[0], args[1], args[2]);
-			List<String> topologyList = knoxClient.getTopologyList("");
-			if ((topologyList == null) || topologyList.isEmpty()) {
-				System.out.println("No knox topologies found");
-			} else {
-				for (String topology : topologyList) {
-					System.out.println("Found Topology: " + topology);
-					List<String> serviceList = knoxClient.getServiceList(topology, "");
-					if ((serviceList == null) || serviceList.isEmpty()) {
-						System.out.println("No services found for knox topology: " + topology);
-					} else {
-						for (String service : serviceList) {
-							System.out.println("	Found service for topology: " + service +", " + topology);
-						}
-					}
-				}
-			}
-		} finally {
-		}
-	}
-	
-	public static HashMap<String, Object> testConnection(String dataSource,
-			HashMap<String, String> connectionProperties) {
-
-		List<String> strList = new ArrayList<String>();
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-		boolean connectivityStatus = false;
-		HashMap<String, Object> responseData = new HashMap<String, Object>();
-
-		KnoxClient knoxClient = getKnoxClient(dataSource, connectionProperties);
-		strList = getKnoxResources(knoxClient, "", null);
-
-		if (strList != null && (strList.size() != 0)) {
-			connectivityStatus = true;
-		}
-		
-		if (connectivityStatus) {
-			String successMsg = "TestConnection Successful";
-			BaseClient.generateResponseDataMap(connectivityStatus, successMsg, successMsg,
-					null, null, responseData);
-		} else {
-			String failureMsg = "Unable to retrieve any topologies/services using given parameters.";
-			BaseClient.generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + errMsg,
-					null, null, responseData);
-		}
-		
-		return responseData;
-	}
-
-	public static KnoxClient getKnoxClient(String dataSourceName,
-			Map<String, String> configMap) {
-		KnoxClient knoxClient = null;
-		LOG.debug("Getting knoxClient for datasource: " + dataSourceName
-				+ "configMap: " + configMap);
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-		if (configMap == null || configMap.isEmpty()) {
-			String msgDesc = "Could not connect as Connection ConfigMap is empty.";
-			LOG.error(msgDesc);
-			HadoopException hdpException = new HadoopException(msgDesc);
-			hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null,
-					null);
-			throw hdpException;
-		} else {
-			String knoxUrl = configMap.get("knox.url");
-			String knoxAdminUser = configMap.get("username");
-			String knoxAdminPassword = configMap.get("password");
-			knoxClient = new KnoxClient(knoxUrl, knoxAdminUser,
-					knoxAdminPassword);
-		}
-		return knoxClient;
-	}
-
-	public static List<String> getKnoxResources(final KnoxClient knoxClient,
-			String topologyName, String serviceName) {
-
-		List<String> resultList = new ArrayList<String>();
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-
-		try {
-			if (knoxClient == null) {
-				// LOG.error("Unable to get knox resources: knoxClient is null");
-				// return new ArrayList<String>();
-				String msgDesc = "Unable to get knox resources: knoxClient is null.";
-				LOG.error(msgDesc);
-				HadoopException hdpException = new HadoopException(msgDesc);
-				hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg,
-						null, null);
-				throw hdpException;
-			}
-
-			final Callable<List<String>> callableObj;
-			if (serviceName != null) {
-				final String finalServiceNameMatching = serviceName.trim();
-				final String finalTopologyName = topologyName;
-				callableObj = new Callable<List<String>>() {
-					@Override
-					public List<String> call() {
-						return knoxClient.getServiceList(finalTopologyName,
-								finalServiceNameMatching);
-					}
-				};
-
-			} else {
-				final String finalTopologyNameMatching = (topologyName == null) ? ""
-						: topologyName.trim();
-				callableObj = new Callable<List<String>>() {
-					@Override
-					public List<String> call() {
-						return knoxClient
-								.getTopologyList(finalTopologyNameMatching);
-					}
-				};
-			}
-			resultList = timedTask(callableObj, 5, TimeUnit.SECONDS);
-
-		} catch (HadoopException he) {
-			throw he;
-		} catch (Exception e) {
-			String msgDesc = "Unable to get knox resources.";
-			LOG.error(msgDesc, e);
-			HadoopException hdpException = new HadoopException(msgDesc);
-
-			hdpException.generateResponseDataMap(false,
-					BaseClient.getMessage(e), msgDesc + errMsg, null, null);
-			throw hdpException;
-		}
-
-		return resultList;
-	}
-
-	public static <T> T timedTask(Callable<T> callableObj, long timeout,
-			TimeUnit timeUnit) throws Exception {
-		return callableObj.call();
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/com/xasecure/knox/client/KnoxClientTest.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/knox/client/KnoxClientTest.java b/lookup-client/src/main/java/com/xasecure/knox/client/KnoxClientTest.java
deleted file mode 100644
index 3ba3e76..0000000
--- a/lookup-client/src/main/java/com/xasecure/knox/client/KnoxClientTest.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.xasecure.knox.client;
-
-public class KnoxClientTest  {
-	
-	
-	/*
-   Sample curl calls to knox REST API to discover topologies
-	 curl -ivk -u admin:admin-password https://localhost:8443/gateway/admin/api/v1/topologies
-	 curl -ivk -u admin:admin-password https://localhost:8443/gateway/admin/api/v1/topologies/admin
-	*/
-	
-	public static void main(String[] args) {
-		System.out.println(System.getProperty("java.class.path"));
-		System.setProperty("javax.net.ssl.trustStore", "/tmp/cacertswithknox)");
-		String[] testArgs = {
-				"https://localhost:8443/gateway/admin/api/v1/topologies",
-				"admin",
-				"admin-password"
-				};
-		KnoxClient.main(testArgs);
-	}
-	
-	
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/com/xasecure/knox/client/XaSecureConstants.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/knox/client/XaSecureConstants.java b/lookup-client/src/main/java/com/xasecure/knox/client/XaSecureConstants.java
deleted file mode 100644
index 191bebb..0000000
--- a/lookup-client/src/main/java/com/xasecure/knox/client/XaSecureConstants.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.xasecure.knox.client;
-
-public class XaSecureConstants {
-	
-	// xasecure 2-way ssl configuration 
-
-	public static final String XASECURE_KNOX_CLIENT_KEY_FILE 						  = "xasecure.knoxclient.ssl.keystore";	
-	public static final String XASECURE_KNOX_CLIENT_KEY_FILE_PASSWORD				  = "xasecure.knoxclien.tssl.keystore.password";	
-	public static final String XASECURE_KNOX_CLIENT_KEY_FILE_TYPE 					  = "xasecure.knoxclient.ssl.keystore.type";	
-
-	public static final String XASECURE_KNOX_CLIENT_KEY_FILE_TYPE_DEFAULT 			  = "jks";	
-
-	public static final String XASECURE_KNOX_CLIENT_TRUSTSTORE_FILE					  = "xasecure.knoxclient.ssl.truststore";	
-	public static final String XASECURE_KNOX_CLIENT_TRUSTSTORE_FILE_PASSWORD		  = "xasecure.knoxclient.ssl.truststore.password";	
-	public static final String XASECURE_KNOX_CLIENT_TRUSTSTORE_FILE_TYPE			  = "xasecure.knoxclient.ssl.truststore.type";	
-
-	public static final String XASECURE_KNOX_CLIENT_TRUSTSTORE_FILE_TYPE_DEFAULT	  = "jks";	
-	
-	
-	public static final String XASECURE_SSL_KEYMANAGER_ALGO_TYPE					  = "SunX509" ;
-	public static final String XASECURE_SSL_TRUSTMANAGER_ALGO_TYPE					  = "SunX509" ;
-	public static final String XASECURE_SSL_CONTEXT_ALGO_TYPE						  = "SSL" ;
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/com/xasecure/storm/client/StormClient.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/com/xasecure/storm/client/StormClient.java b/lookup-client/src/main/java/com/xasecure/storm/client/StormClient.java
deleted file mode 100644
index f1c7449..0000000
--- a/lookup-client/src/main/java/com/xasecure/storm/client/StormClient.java
+++ /dev/null
@@ -1,395 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.xasecure.storm.client;
-
-import java.io.IOException;
-import java.security.PrivilegedAction;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import javax.security.auth.Subject;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
-import javax.security.auth.login.LoginContext;
-import javax.security.auth.login.LoginException;
-
-import org.apache.commons.io.FilenameUtils;
-import org.apache.hadoop.security.KrbPasswordSaverLoginModule;
-import org.apache.hadoop.security.authentication.util.KerberosUtil;
-import org.apache.log4j.Logger;
-
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.sun.jersey.api.client.Client;
-import com.sun.jersey.api.client.ClientResponse;
-import com.sun.jersey.api.client.WebResource;
-import com.xasecure.hadoop.client.config.BaseClient;
-import com.xasecure.hadoop.client.exceptions.HadoopException;
-import com.xasecure.storm.client.json.model.Topology;
-import com.xasecure.storm.client.json.model.TopologyListResponse;
-
-public class StormClient {
-	
-	public static final Logger LOG = Logger.getLogger(StormClient.class) ;
-
-	private static final String EXPECTED_MIME_TYPE = "application/json";
-	
-	private static final String TOPOLOGY_LIST_API_ENDPOINT = "/api/v1/topology/summary" ;
-	
-
-	String stormUIUrl;
-	String userName;
-	String password;
-
-	public StormClient(String aStormUIUrl, String aUserName, String aPassword) {
-		
-		this.stormUIUrl = aStormUIUrl;
-		this.userName = aUserName ;
-		this.password = aPassword;
-		
-		if (LOG.isDebugEnabled()) {
-			LOG.debug("Storm Client is build with url [" + aStormUIUrl + "] user: [" + aUserName + "], password: [" + "" + "]");
-		}
-
-	}
-
-	public List<String> getTopologyList(final String topologyNameMatching) {
-		
-		LOG.debug("Getting Storm topology list for topologyNameMatching : " +
-				topologyNameMatching);
-		final String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-		
-		List<String> ret = new ArrayList<String>();
-		
-		PrivilegedAction<ArrayList<String>> topologyListGetter = new PrivilegedAction<ArrayList<String>>() {
-			@Override
-			public ArrayList<String> run() {
-				
-				ArrayList<String> lret = new ArrayList<String>();
-				
-				String url = stormUIUrl + TOPOLOGY_LIST_API_ENDPOINT ;
-				
-				Client client = null ;
-				ClientResponse response = null ;
-				
-				try {
-					client = Client.create() ;
-					
-					WebResource webResource = client.resource(url);
-					
-					response = webResource.accept(EXPECTED_MIME_TYPE)
-						    .get(ClientResponse.class);
-					
-					LOG.debug("getTopologyList():calling " + url);
-					
-					if (response != null) {
-						LOG.debug("getTopologyList():response.getStatus()= " + response.getStatus());	
-						if (response.getStatus() == 200) {
-							String jsonString = response.getEntity(String.class);
-							Gson gson = new GsonBuilder().setPrettyPrinting().create();
-							TopologyListResponse topologyListResponse = gson.fromJson(jsonString, TopologyListResponse.class);
-							if (topologyListResponse != null) {
-								if (topologyListResponse.getTopologyList() != null) {
-									for(Topology topology : topologyListResponse.getTopologyList()) {
-										String toplogyName = topology.getName() ;
-										LOG.debug("getTopologyList():Found topology " + toplogyName);
-										LOG.debug("getTopologyList():topology Name=[" + topology.getName() + "], topologyNameMatching=[" + topologyNameMatching + "]");
-										if (toplogyName != null) {
-											if (topologyNameMatching == null || topologyNameMatching.isEmpty() || FilenameUtils.wildcardMatch(topology.getName(), topologyNameMatching + "*")) {
-												LOG.debug("getTopologyList():Adding topology " + toplogyName);
-												lret.add(toplogyName) ;
-											}
-										}
-									}
-								}
-							}
-						} else{
-							LOG.info("getTopologyList():response.getStatus()= " + response.getStatus() + " for URL " + url + ", so returning null list");	
-							String jsonString = response.getEntity(String.class);
-							LOG.info(jsonString);
-							lret = null;
-						}
-					} else {
-						String msgDesc = "Unable to get a valid response for "
-								+ "expected mime type : [" + EXPECTED_MIME_TYPE
-								+ "] URL : " + url + " - got null response.";
-						LOG.error(msgDesc);
-						HadoopException hdpException = new HadoopException(msgDesc);
-						hdpException.generateResponseDataMap(false, msgDesc,
-								msgDesc + errMsg, null, null);
-						throw hdpException;
-					}
-				} catch (HadoopException he) {
-					throw he;
-				} catch (Throwable t) {
-					String msgDesc = "Exception while getting Storm TopologyList."
-							+ " URL : " + url;
-					HadoopException hdpException = new HadoopException(msgDesc,
-							t);
-					LOG.error(msgDesc, t);
-
-					hdpException.generateResponseDataMap(false,
-							BaseClient.getMessage(t), msgDesc + errMsg, null,
-							null);
-					throw hdpException;
-					
-				} finally {
-					if (response != null) {
-						response.close();
-					}
-					
-					if (client != null) {
-						client.destroy(); 
-					}
-				
-				}
-				return lret ;
-			}
-		} ;
-		
-		try {
-			ret = executeUnderKerberos(this.userName, this.password, topologyListGetter) ;
-		} catch (IOException e) {
-			LOG.error("Unable to get Topology list from [" + stormUIUrl + "]", e) ;
-		}
-		
-		return ret;
-	}
-	
-	public static <T> T executeUnderKerberos(String userName, String password,
-			PrivilegedAction<T> action) throws IOException {
-		
-		final String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-		class MySecureClientLoginConfiguration extends
-				javax.security.auth.login.Configuration {
-
-			private String userName;
-			private String password ;
-
-			MySecureClientLoginConfiguration(String aUserName,
-					String password) {
-				this.userName = aUserName;
-				this.password = password;
-			}
-
-			@Override
-			public AppConfigurationEntry[] getAppConfigurationEntry(
-					String appName) {
-
-				Map<String, String> kerberosOptions = new HashMap<String, String>();
-				kerberosOptions.put("principal", this.userName);
-				kerberosOptions.put("debug", "true");
-				kerberosOptions.put("useKeyTab", "false");
-				kerberosOptions.put(KrbPasswordSaverLoginModule.USERNAME_PARAM, this.userName);
-				kerberosOptions.put(KrbPasswordSaverLoginModule.PASSWORD_PARAM, this.password);
-				kerberosOptions.put("doNotPrompt", "false");
-				kerberosOptions.put("useFirstPass", "true");
-				kerberosOptions.put("tryFirstPass", "false");
-				kerberosOptions.put("storeKey", "true");
-				kerberosOptions.put("refreshKrb5Config", "true");
-
-				AppConfigurationEntry KEYTAB_KERBEROS_LOGIN = null;
-				AppConfigurationEntry KERBEROS_PWD_SAVER = null;
-				try {
-					KEYTAB_KERBEROS_LOGIN = new AppConfigurationEntry(
-							KerberosUtil.getKrb5LoginModuleName(),
-							AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
-							kerberosOptions);
-					KERBEROS_PWD_SAVER = new AppConfigurationEntry(KrbPasswordSaverLoginModule.class.getName(), LoginModuleControlFlag.REQUIRED, kerberosOptions);
-
-				} catch (IllegalArgumentException e) {
-					String msgDesc = "executeUnderKerberos: Exception while getting Storm TopologyList.";
-					HadoopException hdpException = new HadoopException(msgDesc,
-							e);
-					LOG.error(msgDesc, e);
-
-					hdpException.generateResponseDataMap(false,
-							BaseClient.getMessage(e), msgDesc + errMsg, null,
-							null);
-					throw hdpException;
-				}
-                
-				LOG.debug("getAppConfigurationEntry():" + kerberosOptions.get("principal"));
-				
-                return new AppConfigurationEntry[] { KERBEROS_PWD_SAVER, KEYTAB_KERBEROS_LOGIN };
-			}
-
-		};
-
-		T ret = null;
-
-		Subject subject = null;
-		LoginContext loginContext = null;
-
-		try {
-		    subject = new Subject();
-			LOG.debug("executeUnderKerberos():user=" + userName + ",pass=");
-			LOG.debug("executeUnderKerberos():Creating config..");
-			MySecureClientLoginConfiguration loginConf = new MySecureClientLoginConfiguration(
-					userName, password);
-			LOG.debug("executeUnderKerberos():Creating Context..");
-			loginContext = new LoginContext("hadoop-keytab-kerberos", subject,
-					null, loginConf);
-			
-			LOG.debug("executeUnderKerberos():Logging in..");
-			loginContext.login();
-
-			Subject loginSubj = loginContext.getSubject();
-
-			if (loginSubj != null) {
-				ret = Subject.doAs(loginSubj, action);
-			}
-		} catch (LoginException le) {
-			String msgDesc = "executeUnderKerberos: Login failure using given"
-					+ " configuration parameters, username : `" + userName + "`.";
-			HadoopException hdpException = new HadoopException(msgDesc, le);
-			LOG.error(msgDesc, le);
-
-			hdpException.generateResponseDataMap(false,
-					BaseClient.getMessage(le), msgDesc + errMsg, null, null);
-			throw hdpException;
-		} catch (SecurityException se) {
-			String msgDesc = "executeUnderKerberos: Exception while getting Storm TopologyList.";
-			HadoopException hdpException = new HadoopException(msgDesc, se);
-			LOG.error(msgDesc, se);
-
-			hdpException.generateResponseDataMap(false,
-					BaseClient.getMessage(se), msgDesc + errMsg, null, null);
-			throw hdpException;
-
-		} finally {
-			if (loginContext != null) {
-				if (subject != null) {
-					try {
-						loginContext.logout();
-					} catch (LoginException e) {
-						throw new IOException("logout failure", e);
-					}
-				}
-			}
-		}
-
-		return ret;
-	}
-
-	public static HashMap<String, Object> testConnection(String dataSource,
-			HashMap<String, String> connectionProperties) {
-
-		List<String> strList = new ArrayList<String>();
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-		boolean connectivityStatus = false;
-		HashMap<String, Object> responseData = new HashMap<String, Object>();
-
-		StormClient stormClient = getStormClient(dataSource,
-				connectionProperties);
-		strList = getStormResources(stormClient, "");
-
-		if (strList != null) {
-			connectivityStatus = true;
-		}
-
-		if (connectivityStatus) {
-			String successMsg = "TestConnection Successful";
-			BaseClient.generateResponseDataMap(connectivityStatus, successMsg,
-					successMsg, null, null, responseData);
-		} else {
-			String failureMsg = "Unable to retrieve any topologies using given parameters.";
-			BaseClient.generateResponseDataMap(connectivityStatus, failureMsg,
-					failureMsg + errMsg, null, null, responseData);
-		}
-
-		return responseData;
-	}
-
-	public static StormClient getStormClient(String dataSourceName,
-			Map<String, String> configMap) {
-		StormClient stormClient = null;
-		LOG.debug("Getting StormClient for datasource: " + dataSourceName
-				+ "configMap: " + configMap);
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-		if (configMap == null || configMap.isEmpty()) {
-			String msgDesc = "Could not connect as Connection ConfigMap is empty.";
-			LOG.error(msgDesc);
-			HadoopException hdpException = new HadoopException(msgDesc);
-			hdpException.generateResponseDataMap(false, msgDesc, msgDesc
-					+ errMsg, null, null);
-			throw hdpException;
-		} else {
-			String stormUrl = configMap.get("nimbus.url");
-			String stormAdminUser = configMap.get("username");
-			String stormAdminPassword = configMap.get("password");
-			stormClient = new StormClient(stormUrl, stormAdminUser,
-					stormAdminPassword);
-		}
-		return stormClient;
-	}
-
-	public static List<String> getStormResources(final StormClient stormClient,
-			String topologyName) {
-
-		List<String> resultList = new ArrayList<String>();
-		String errMsg = " You can still save the repository and start creating "
-				+ "policies, but you would not be able to use autocomplete for "
-				+ "resource names. Check xa_portal.log for more info.";
-
-		try {
-			if (stormClient == null) {
-				String msgDesc = "Unable to get Storm resources: StormClient is null.";
-				LOG.error(msgDesc);
-				HadoopException hdpException = new HadoopException(msgDesc);
-				hdpException.generateResponseDataMap(false, msgDesc, msgDesc
-						+ errMsg, null, null);
-				throw hdpException;
-			}
-
-			if (topologyName != null) {
-				String finalTopologyNameMatching = (topologyName == null) ? ""
-						: topologyName.trim();
-				resultList = stormClient
-						.getTopologyList(finalTopologyNameMatching);
-				if (resultList != null) {
-					LOG.debug("Returning list of " + resultList.size() + " topologies");
-				}
-			}
-		} catch (HadoopException he) {
-			throw he;
-		} catch (Exception e) {
-			String msgDesc = "getStormResources: Unable to get Storm resources.";
-			LOG.error(msgDesc, e);
-			HadoopException hdpException = new HadoopException(msgDesc);
-
-			hdpException.generateResponseDataMap(false,
-					BaseClient.getMessage(e), msgDesc + errMsg, null, null);
-			throw hdpException;
-		}
-		return resultList;
-	}
-	
-}