You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ranger.apache.org by di...@apache.org on 2015/03/16 23:14:34 UTC

incubator-ranger git commit: RANGER-314: Remove custom class loader used by ranger admin for resource lookup

Repository: incubator-ranger
Updated Branches:
  refs/heads/master ccf8d6264 -> a200d82d8


RANGER-314: Remove custom class loader used by ranger admin for resource lookup


Project: http://git-wip-us.apache.org/repos/asf/incubator-ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ranger/commit/a200d82d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ranger/tree/a200d82d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ranger/diff/a200d82d

Branch: refs/heads/master
Commit: a200d82d8dd08d7b562019d57ba9aa681abd185c
Parents: ccf8d62
Author: Dilli Dorai Arumugam <da...@hortonworks.com>
Authored: Thu Mar 12 16:27:23 2015 -0700
Committer: Dilli Dorai Arumugam <da...@hortonworks.com>
Committed: Mon Mar 16 15:10:33 2015 -0700

----------------------------------------------------------------------
 .../apache/ranger/plugin/client/BaseClient.java |   8 +-
 .../ranger/plugin/client/HadoopClassLoader.java | 104 ------------------
 .../plugin/client/HadoopConfigHolder.java       |  35 +++----
 .../service-defs/ranger-servicedef-hdfs.json    |   2 +-
 .../services/hbase/client/HBaseClient.java      |  70 ++++++-------
 .../hbase/client/HBaseConnectionMgr.java        |   2 +-
 .../ranger/services/hdfs/client/HdfsClient.java |  55 +++++-----
 .../services/hdfs/client/HdfsConnectionMgr.java |   2 +-
 .../services/hdfs/client/HdfsClientTest.java    |  25 ++++-
 .../ranger/services/hive/client/HiveClient.java |   2 +-
 .../hadoop/client/config/HadoopClassLoader.java | 105 -------------------
 .../client/config/HadoopConfigHolder.java       |  19 +---
 .../main/resources/resourcenamemap.properties   |  16 ---
 13 files changed, 103 insertions(+), 342 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/a200d82d/agents-common/src/main/java/org/apache/ranger/plugin/client/BaseClient.java
----------------------------------------------------------------------
diff --git a/agents-common/src/main/java/org/apache/ranger/plugin/client/BaseClient.java b/agents-common/src/main/java/org/apache/ranger/plugin/client/BaseClient.java
index cdd813e..4ef3b48 100644
--- a/agents-common/src/main/java/org/apache/ranger/plugin/client/BaseClient.java
+++ b/agents-common/src/main/java/org/apache/ranger/plugin/client/BaseClient.java
@@ -43,12 +43,6 @@ public abstract class BaseClient {
 	
 	protected Map<String,String> connectionProperties ;
 	
-	public BaseClient(String serviceName) {
-    this.serviceName = serviceName ;
-    init() ;
-    login() ;
-	}
-
   public BaseClient(String svcName, Map<String,String> connectionProperties) {
     this(svcName, connectionProperties, null);
   }
@@ -78,7 +72,7 @@ public abstract class BaseClient {
 				+ "policies, but you would not be able to use autocomplete for "
 				+ "resource names. Check xa_portal.log for more info.";
 		try {
-			Thread.currentThread().setContextClassLoader(configHolder.getClassLoader());
+			//Thread.currentThread().setContextClassLoader(configHolder.getClassLoader());
 			String userName = configHolder.getUserName() ;
 			if (userName == null) {
 				String msgDesc = "Unable to find login username for hadoop environment, ["

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/a200d82d/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopClassLoader.java
----------------------------------------------------------------------
diff --git a/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopClassLoader.java b/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopClassLoader.java
deleted file mode 100644
index b90f4f6..0000000
--- a/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopClassLoader.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.plugin.client;
-
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.net.URL;
-import java.util.Properties;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-public class HadoopClassLoader extends ClassLoader {
-	
-	private static final Log LOG = LogFactory.getLog(HadoopClassLoader.class) ;
-	
-	private HadoopConfigHolder confHolder ;
-	
-	public HadoopClassLoader(HadoopConfigHolder confHolder) {
-		super(Thread.currentThread().getContextClassLoader()) ;
-		this.confHolder = confHolder;
-	}
-	
-	
-	@Override
-	protected URL findResource(String resourceName) {
-		LOG.debug("findResource(" + resourceName + ") is called.") ;
-		URL ret = null;
-	
-		if (confHolder.hasResourceExists(resourceName)) {
-			ret = buildResourceFile(resourceName) ;
-		}
-		else {
-			ret = super.findResource(resourceName);
-		}
-		LOG.debug("findResource(" + resourceName + ") is returning [" + ret + "]") ;
-		return ret ;
-	}
-	
-	
-	@SuppressWarnings("deprecation")
-	private URL buildResourceFile(String aResourceName) {
-		URL ret = null ;
-		String prefix = aResourceName ;
-		String suffix = ".txt" ;
-
-		Properties prop = confHolder.getProperties(aResourceName) ;
-		LOG.debug("Building XML for: " + prop.toString());
-		if (prop != null && prop.size() > 0) {
-			if (aResourceName.contains(".")) {
-				int lastDotFound = aResourceName.indexOf(".") ;
-				prefix = aResourceName.substring(0,lastDotFound) + "-" ;
-				suffix = aResourceName.substring(lastDotFound) ;
-			}
-			
-			try {
-				File tempFile = File.createTempFile(prefix, suffix) ;
-				tempFile.deleteOnExit();
-				PrintWriter out = new PrintWriter(new FileWriter(tempFile)) ;
-				out.println("<?xml version=\"1.0\"?>") ;
-				out.println("<?xml-stylesheet type=\"text/xsl\" href=\"configuration.xsl\"?>") ;
-				out.println("<configuration xmlns:xi=\"http://www.w3.org/2001/XInclude\">") ;
-				for(Object keyobj : prop.keySet()) {
-					String key = (String)keyobj;
-					String val = prop.getProperty(key) ;
-					if (HadoopConfigHolder.HADOOP_RPC_PROTECTION.equals(key) && (val == null || val.trim().isEmpty()))  {
-						continue;
-					}
-					out.println("<property><name>" + key.trim() + "</name><value>" + val + "</value></property>") ;
-				}
-				out.println("</configuration>") ;
-				out.close() ;
-				ret = tempFile.toURL() ;
-			} catch (IOException e) {
-				throw new HadoopException("Unable to load create hadoop configuration file [" + aResourceName + "]", e) ;
-			}
-			
-		}
-		
-		return ret ;
-
-	}
-	
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/a200d82d/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
----------------------------------------------------------------------
diff --git a/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java b/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
index c0fdbd8..a341a44 100644
--- a/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
+++ b/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
@@ -21,9 +21,7 @@
 
 import java.io.IOException;
 import java.io.InputStream;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
+import java.util.*;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -57,8 +55,9 @@ public class HadoopConfigHolder  {
 	private String password ;
 	private boolean isKerberosAuth ;
 	
-	private HadoopClassLoader classLoader ;
-	private Map<String,String>  connectionProperties; 
+	private Map<String,String>  connectionProperties;
+
+  private static Set<String> rangerInternalPropertyKeys = new HashSet<String>();
 	
 	public static HadoopConfigHolder getInstance(String aDatasourceName) {
 		HadoopConfigHolder ret = dataSource2HadoopConfigHolder.get(aDatasourceName) ;
@@ -108,7 +107,6 @@ public class HadoopConfigHolder  {
 			init() ;
 		}
 		initLoginInfo();
-		initClassLoader() ;
 	}
 
   private HadoopConfigHolder(String aDatasourceName, Map<String,String> connectionProperties) {
@@ -122,7 +120,6 @@ public class HadoopConfigHolder  {
     this.defaultConfigFile = defaultConfigFile;
 		initConnectionProp() ;
 		initLoginInfo();
-		initClassLoader() ;
 	}
 	
 	private void initConnectionProp() {
@@ -160,6 +157,13 @@ public class HadoopConfigHolder  {
 			if (in != null) {
 				try {
 					resourcemapProperties.load(in);
+          for (Map.Entry<Object, Object> entry : resourcemapProperties.entrySet() ) {
+            String key = (String)entry.getKey();
+            String value = (String)entry.getValue();
+            if (RANGER_SECTION_NAME.equals(value))  {
+              rangerInternalPropertyKeys.add(key);
+            }
+          }
 				} catch (IOException e) {
 					throw new HadoopException("Unable to load resource map properties from [" + RESOURCEMAP_PROP_FILE + "]", e);
 				}
@@ -261,11 +265,7 @@ public class HadoopConfigHolder  {
 					
 		}
 	}
-	
-	private void initClassLoader() {
-		classLoader = new HadoopClassLoader(this) ;
-	}
-	
+
 	
 	public Properties getRangerSection() {
 		Properties prop = this.getProperties(RANGER_SECTION_NAME) ;
@@ -317,7 +317,7 @@ public class HadoopConfigHolder  {
 		return datasourceName ;
 	}
 	
-	public boolean hasResourceExists(String aResourceName) {
+	public boolean hasResourceExists(String aResourceName) {    // dilli
 		HashMap<String,Properties> resourceName2PropertiesMap  = dataSource2ResourceListMap.get(datasourceName) ;
 		return (resourceName2PropertiesMap != null && resourceName2PropertiesMap.containsKey(aResourceName)) ;
  	}
@@ -359,15 +359,14 @@ public class HadoopConfigHolder  {
 		return password;
 	}
 
-	public HadoopClassLoader getClassLoader() {
-		return classLoader;
-	}
-
 	public boolean isKerberosAuthentication() {
 		return isKerberosAuth;
 	}
 
-  
+  public Set<String> getRangerInternalPropertyKeys() {
+    return rangerInternalPropertyKeys;
+
+  }
 	
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/a200d82d/agents-common/src/main/resources/service-defs/ranger-servicedef-hdfs.json
----------------------------------------------------------------------
diff --git a/agents-common/src/main/resources/service-defs/ranger-servicedef-hdfs.json b/agents-common/src/main/resources/service-defs/ranger-servicedef-hdfs.json
index bb9d428..925cc77 100755
--- a/agents-common/src/main/resources/service-defs/ranger-servicedef-hdfs.json
+++ b/agents-common/src/main/resources/service-defs/ranger-servicedef-hdfs.json
@@ -75,7 +75,7 @@
 			"name": "fs.default.name",
 			"type": "string",
 			"subType": "",
-			"mandatory": false,
+			"mandatory": true,
 			"validationRegEx":"",
 			"validationMessage": "",
 			"uiHint":"",

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/a200d82d/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java
----------------------------------------------------------------------
diff --git a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java
index 320e084..e051bb9 100644
--- a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java
+++ b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java
@@ -21,11 +21,7 @@
 
 import java.io.IOException;
 import java.security.PrivilegedAction;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 import java.util.Map.Entry;
 
 import javax.security.auth.Subject;
@@ -50,12 +46,27 @@ public class HBaseClient extends BaseClient {
 
 	private static Subject subj 			 = null;
 
-	public HBaseClient(String serivceName) {
-		super(serivceName) ;		
-	}
+  private Configuration conf;
+
+  private static List<String> rangerInternalPropertyKeys = Arrays.asList("username",
+    "password", "keytabfile");
 
 	public HBaseClient(String serivceName,Map<String,String> connectionProp) {
-		super(serivceName, addDefaultHBaseProp(connectionProp)) ;		
+
+		super(serivceName, addDefaultHBaseProp(connectionProp)) ;
+    conf = HBaseConfiguration.create() ;
+
+    Set<String> rangerInternalPropertyKeys = getConfigHolder().getRangerInternalPropertyKeys();
+    for (Map.Entry<String, String> entry: connectionProperties.entrySet())  {
+      String key = entry.getKey();
+      String value = entry.getValue();
+      if (rangerInternalPropertyKeys.contains(key)) {
+        // skip
+      }  else {
+        conf.set(key, value);
+      }
+    }
+
 	}
 	
 	//TODO: temporary solution - to be added to the UI for HBase 
@@ -113,17 +124,15 @@ public class HBaseClient extends BaseClient {
 				+ "policies, but you would not be able to use autocomplete for "
 				+ "resource names. Check xa_portal.log for more info.";
 		if (subj != null) {
-			ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
 			try {
-				Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
-		
+
 				hbaseStatus = Subject.doAs(subj, new PrivilegedAction<Boolean>() {
 					@Override
 					public Boolean run() {
 						Boolean hbaseStatus1 = false;
 						try {
 						    LOG.info("getHBaseStatus: creating default Hbase configuration");
-							Configuration conf = HBaseConfiguration.create() ;					
+
 							LOG.info("getHBaseStatus: setting config values from client");
 							setClientConfigValues(conf);						
 						    LOG.info("getHBaseStatus: checking HbaseAvailability with the new config");
@@ -184,15 +193,12 @@ public class HBaseClient extends BaseClient {
 					}
 				}) ;
 			} catch (SecurityException se) {
-				String msgDesc = "getHBaseStatus: Unable to connect to HBase Server instance, "
-						+ "current thread might not be able set the context ClassLoader.";
+				String msgDesc = "getHBaseStatus: Unable to connect to HBase Server instance ";
 				HadoopException hdpException = new HadoopException(msgDesc, se);
 				hdpException.generateResponseDataMap(false, getMessage(se),
 						msgDesc + errMsg, null, null);
 				LOG.error(msgDesc + se) ;
 				throw hdpException;
-			} finally {
-				Thread.currentThread().setContextClassLoader(prevCl);
 			}
 		} else {
 			LOG.error("getHBaseStatus: secure login not done, subject is null");
@@ -222,10 +228,6 @@ public class HBaseClient extends BaseClient {
 		subj = getLoginSubject();
 		
 		if (subj != null) {
-			ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
-			try {
-				Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
-	
 				ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() {
 		
 					@Override
@@ -235,7 +237,7 @@ public class HBaseClient extends BaseClient {
 						HBaseAdmin admin = null ;
 						try {
 							LOG.info("getTableList: creating default Hbase configuration");
-							Configuration conf = HBaseConfiguration.create() ;					
+							Configuration conf = HBaseConfiguration.create() ;	// dilli
 							LOG.info("getTableList: setting config values from client");
 							setClientConfigValues(conf);						
 						    LOG.info("getTableList: checking HbaseAvailability with the new config");
@@ -303,10 +305,6 @@ public class HBaseClient extends BaseClient {
 					}
 					
 				}) ;
-			}
-			finally {
-				Thread.currentThread().setContextClassLoader(prevCl);
-			}
 		}
 		return ret ;
 	}
@@ -320,10 +318,8 @@ public class HBaseClient extends BaseClient {
 		
 		subj = getLoginSubject();
 		if (subj != null) {
-			ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
 			try {
-				Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
-				
+
 				ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() {
 					String tblName = null;
 					@Override
@@ -332,7 +328,7 @@ public class HBaseClient extends BaseClient {
 						HBaseAdmin admin = null ;
 						try {
 							LOG.info("getColumnFamilyList: creating default Hbase configuration");
-							Configuration conf = HBaseConfiguration.create() ;					
+							Configuration conf = HBaseConfiguration.create() ;		// dilli
 							LOG.info("getColumnFamilyList: setting config values from client");
 							setClientConfigValues(conf);						
 						    LOG.info("getColumnFamilyList: checking HbaseAvailability with the new config");
@@ -382,8 +378,7 @@ public class HBaseClient extends BaseClient {
 						}  catch(IOException io) {
 							String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for "
 									+ "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tblName
-									+ ", table-match:" + columnFamilyMatching + "], "
-									+ "current thread might not be able set the context ClassLoader.";
+									+ ", table-match:" + columnFamilyMatching + "] ";
 							HadoopException hdpException = new HadoopException(msgDesc, io);
 							hdpException.generateResponseDataMap(false, getMessage(io),
 									msgDesc + errMsg, null, null);
@@ -392,8 +387,7 @@ public class HBaseClient extends BaseClient {
 						} catch (SecurityException se) {
 								String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for "
 										+ "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tblName
-										+ ", table-match:" + columnFamilyMatching + "], "
-										+ "current thread might not be able set the context ClassLoader.";
+										+ ", table-match:" + columnFamilyMatching + "] ";
 								HadoopException hdpException = new HadoopException(msgDesc, se);
 								hdpException.generateResponseDataMap(false, getMessage(se),
 										msgDesc + errMsg, null, null);
@@ -403,8 +397,7 @@ public class HBaseClient extends BaseClient {
 						}  catch (Throwable e) {
 							String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for "
 									+ "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tblName
-									+ ", table-match:" + columnFamilyMatching + "], "
-									+ "current thread might not be able set the context ClassLoader.";
+									+ ", table-match:" + columnFamilyMatching + "] ";
 							LOG.error(msgDesc);
 							HadoopException hdpException = new HadoopException(msgDesc, e);
 							hdpException.generateResponseDataMap(false, getMessage(e),
@@ -426,15 +419,12 @@ public class HBaseClient extends BaseClient {
 					
 				}) ;
 			} catch (SecurityException se) {
-				String msgDesc = "getColumnFamilyList: Unable to connect to HBase Server instance, "
-						+ "current thread might not be able set the context ClassLoader.";
+				String msgDesc = "getColumnFamilyList: Unable to connect to HBase Server instance ";
 				HadoopException hdpException = new HadoopException(msgDesc, se);
 				hdpException.generateResponseDataMap(false, getMessage(se),
 						msgDesc + errMsg, null, null);
 				LOG.error(msgDesc + se) ;
 				throw hdpException;
-			} finally {
-				Thread.currentThread().setContextClassLoader(prevCl);
 			}
 		}
 		return ret ;

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/a200d82d/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseConnectionMgr.java
----------------------------------------------------------------------
diff --git a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseConnectionMgr.java b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseConnectionMgr.java
index ef0b9ae..5c1c73b 100644
--- a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseConnectionMgr.java
+++ b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseConnectionMgr.java
@@ -58,7 +58,7 @@ public class HBaseConnectionMgr {
 								HBaseClient hBaseClient=null;
 								if(serviceName!=null){
 									try{
-										hBaseClient=new HBaseClient(serviceName);
+										hBaseClient=new HBaseClient(serviceName, configs);
 									}catch(Exception ex){
 										LOG.error("Error connecting HBase repository : ", ex);
 									}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/a200d82d/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
----------------------------------------------------------------------
diff --git a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
index 1d1eef9..779133f 100644
--- a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
+++ b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
@@ -23,10 +23,7 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.UnknownHostException;
 import java.security.PrivilegedAction;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 
 import javax.security.auth.Subject;
 
@@ -44,30 +41,39 @@ import org.apache.ranger.plugin.client.HadoopException;
 public class HdfsClient extends BaseClient {
 
 	private static final Log LOG = LogFactory.getLog(HdfsClient.class) ;
+  private Configuration conf;
+  private static List<String> rangerInternalPropertyKeys = Arrays.asList("username",
+    "password", "keytabfile");
 
-  public HdfsClient(String serviceName) {
-		super(serviceName) ;
-	}
-	
 	public HdfsClient(String serviceName, Map<String,String> connectionProperties) {
 		super(serviceName,connectionProperties, "hdfs-client") ;
+    conf = new Configuration() ;
+    Set<String> rangerInternalPropertyKeys = getConfigHolder().getRangerInternalPropertyKeys();
+    for (Map.Entry<String, String> entry: connectionProperties.entrySet())  {
+      String key = entry.getKey();
+      String value = entry.getValue();
+      if (rangerInternalPropertyKeys.contains(key)) {
+         // skip
+      }  else {
+        conf.set(key, value);
+      }
+    }
+
 	}
 	
 	private List<String> listFilesInternal(String baseDir, String fileMatching, final List<String> pathList) {
 		List<String> fileList = new ArrayList<String>() ;
-		ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ;
 		String errMsg = " You can still save the repository and start creating "
 				+ "policies, but you would not be able to use autocomplete for "
 				+ "resource names. Check xa_portal.log for more info.";
 		try {
-			Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
 			String dirPrefix = (baseDir.endsWith("/") ? baseDir : (baseDir + "/")) ;
 			String filterRegEx = null;
 			if (fileMatching != null && fileMatching.trim().length() > 0) {
 				filterRegEx = fileMatching.trim() ;
 			}
 			
-			Configuration conf = new Configuration() ;
+
 			UserGroupInformation.setConfiguration(conf);
 			
 			FileSystem fs = null ;
@@ -147,9 +153,6 @@ public class HdfsClient extends BaseClient {
 			}
 			throw hdpException;
 		}
-		finally {
-			Thread.currentThread().setContextClassLoader(prevCl);
-		}
 		return fileList ;
 	}
 
@@ -177,7 +180,7 @@ public class HdfsClient extends BaseClient {
 		String baseDir = args[1] ;
 		String fileNameToMatch = (args.length == 2 ? null : args[2]) ;
 		
-		HdfsClient fs = new HdfsClient(repositoryName) ;
+		HdfsClient fs = new HdfsClient(repositoryName, null) ;
 		List<String> fsList = fs.listFiles(baseDir, fileNameToMatch,null) ;
 		if (fsList != null && fsList.size() > 0) {
 			for(String s : fsList) {
@@ -252,25 +255,25 @@ public class HdfsClient extends BaseClient {
 
     String fsDefaultName = configs.get("fs.default.name") ;
     fsDefaultName = (fsDefaultName == null) ? "" : fsDefaultName.trim();
-    String dfsNameservices = configs.get("dfs.nameservices");
-    dfsNameservices = (dfsNameservices == null) ? "" : dfsNameservices.trim();
-
-    if (fsDefaultName.isEmpty() && dfsNameservices.isEmpty())  {
-      throw new IllegalArgumentException("Value for neither fs.default.name nor dfs.nameservices is specified");
-    }
-
-    if (!fsDefaultName.isEmpty() && !dfsNameservices.isEmpty())  {
-      throw new IllegalArgumentException("Value for both fs.default.name and dfs.nameservices are specified. They are mutually exclusive");
+    if (fsDefaultName.isEmpty())  {
+      throw new IllegalArgumentException("Value for neither fs.default.name is specified");
     }
 
+    String dfsNameservices = configs.get("dfs.nameservices");
+    dfsNameservices = (dfsNameservices == null) ? "" : dfsNameservices.trim();
     if (!dfsNameservices.isEmpty()) {
+      String proxyProvider = configs.get("dfs.client.failover.proxy.provider." + dfsNameservices);
+      proxyProvider =   (proxyProvider == null) ? "" : proxyProvider.trim();
+      if (proxyProvider.isEmpty())  {
+        throw new IllegalArgumentException("Value for " + "dfs.client.failover.proxy.provider." + dfsNameservices + " not specified");
+      }
+
       String dfsNameNodes = configs.get("dfs.ha.namenodes." + dfsNameservices);
       dfsNameNodes = (dfsNameNodes == null) ? "" : dfsNameNodes.trim();
       if (dfsNameNodes.isEmpty())  {
-        throw new IllegalArgumentException("Value for " + "dfs.ha.namenodes." + dfsNameservices + " not specified");
+        throw new IllegalArgumentException("Value for " + "dfs.ha.namenodes." + proxyProvider + " not specified");
       }
       String[] dfsNameNodeElements = dfsNameNodes.split(",");
-      System.out.println("elements: " + dfsNameNodeElements);
       for (String dfsNameNodeElement : dfsNameNodeElements)  {
         String nameNodeUrlKey = "dfs.namenode.rpc-address." +
             dfsNameservices + "." + dfsNameNodeElement.trim();

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/a200d82d/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java
----------------------------------------------------------------------
diff --git a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java
index c329a94..d62bb9c 100644
--- a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java
+++ b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java
@@ -54,7 +54,7 @@ public class HdfsConnectionMgr {
 						final Callable<HdfsClient> connectHDFS = new Callable<HdfsClient>() {
 							@Override
 							public HdfsClient call() throws Exception {
-								return new HdfsClient(serviceName);
+								return new HdfsClient(serviceName, configs);
 							}
 						};
 						

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/a200d82d/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/client/HdfsClientTest.java
----------------------------------------------------------------------
diff --git a/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/client/HdfsClientTest.java b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/client/HdfsClientTest.java
index b5ccbf0..5e4fa57 100644
--- a/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/client/HdfsClientTest.java
+++ b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/client/HdfsClientTest.java
@@ -48,7 +48,7 @@ public class HdfsClientTest {
   }
 
   @Test(expected = IllegalArgumentException.class)
-  public void testNeietherNnNorHaSpecified()  throws IllegalArgumentException {
+  public void testFsDefaultNameNotSpecified()  throws IllegalArgumentException {
     Map configs = new HashMap<String, String>();
     configs.put("username", "hdfsuser");
     configs.put("password", "hdfsuser");
@@ -57,15 +57,15 @@ public class HdfsClientTest {
   }
 
   @Test(expected = IllegalArgumentException.class)
-	public void testBothNnAndHaSpecified()  throws IllegalArgumentException {
+  public void testProxyProviderNotSpecified()  throws IllegalArgumentException {
     Map configs = new HashMap<String, String>();
     configs.put("username", "hdfsuser");
     configs.put("password", "hdfsuser");
     configs.put("hadoop.security.authentication", "simple");
-    configs.put("fs.default.name", "hdfs://node-2.example.com:8020");
+    configs.put("fs.default.name", "hdfs://hwqe-1425428405");
     configs.put("dfs.nameservices", "hwqe-1425428405");
     HdfsClient.validateConnectionConfigs(configs);
-	}
+  }
 
   @Test(expected = IllegalArgumentException.class)
 	public void testNnElementsNotSpecified()  throws IllegalArgumentException {
@@ -73,7 +73,10 @@ public class HdfsClientTest {
     configs.put("username", "hdfsuser");
     configs.put("password", "hdfsuser");
     configs.put("hadoop.security.authentication", "simple");
+    configs.put("fs.default.name", "hdfs://hwqe-1425428405");
     configs.put("dfs.nameservices", "hwqe-1425428405");
+    configs.put("dfs.client.failover.proxy.provider.hwqe-1425428405",
+      "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
     HdfsClient.validateConnectionConfigs(configs);
 	}
 
@@ -83,7 +86,10 @@ public class HdfsClientTest {
     configs.put("username", "hdfsuser");
     configs.put("password", "hdfsuser");
     configs.put("hadoop.security.authentication", "simple");
+    configs.put("fs.default.name", "hdfs://hwqe-1425428405");
     configs.put("dfs.nameservices", "hwqe-1425428405");
+    configs.put("dfs.client.failover.proxy.provider.hwqe-1425428405",
+      "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
     configs.put("dfs.ha.namenodes.hwqe-1425428405", "nn1,nn2");
     HdfsClient.validateConnectionConfigs(configs);
 	}
@@ -94,7 +100,10 @@ public class HdfsClientTest {
     configs.put("username", "hdfsuser");
     configs.put("password", "hdfsuser");
     configs.put("hadoop.security.authentication", "simple");
+    configs.put("fs.default.name", "hdfs://hwqe-1425428405");
     configs.put("dfs.nameservices", "hwqe-1425428405");
+    configs.put("dfs.client.failover.proxy.provider.hwqe-1425428405",
+      "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
     configs.put("dfs.ha.namenodes.hwqe-1425428405", "nn1,nn2");
     configs.put("dfs.namenode.rpc-address.hwqe-1425428405.nn2", "node-2.example.com:8020");
     HdfsClient.validateConnectionConfigs(configs);
@@ -106,7 +115,10 @@ public class HdfsClientTest {
     configs.put("username", "hdfsuser");
     configs.put("password", "hdfsuser");
     configs.put("hadoop.security.authentication", "simple");
+    configs.put("fs.default.name", "hdfs://hwqe-1425428405");
     configs.put("dfs.nameservices", "hwqe-1425428405");
+    configs.put("dfs.client.failover.proxy.provider.hwqe-1425428405",
+      "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
     configs.put("dfs.ha.namenodes.hwqe-1425428405", "nn1,nn2");
     configs.put("dfs.namenode.rpc-address.hwqe-1425428405.nn1", "node-1.example.com:8020");
     HdfsClient.validateConnectionConfigs(configs);
@@ -144,8 +156,11 @@ public class HdfsClientTest {
     configs.put("username", "hdfsuser");
     configs.put("password", "hdfsuser");
     configs.put("hadoop.security.authentication", "simple");
-    // configs.put("fs.default.name", "hdfs://node-2.example.com:8020");
+    configs.put("fs.default.name", "hdfs://node-2.example.com:8020");
+    configs.put("fs.default.name", "hdfs://hwqe-1425428405");
     configs.put("dfs.nameservices", "hwqe-1425428405");
+    configs.put("dfs.client.failover.proxy.provider.hwqe-1425428405",
+      "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
     configs.put("dfs.ha.namenodes.hwqe-1425428405", "nn1,nn2");
     configs.put("dfs.namenode.rpc-address.hwqe-1425428405.nn1", "node-1.example.com:8020");
     configs.put("dfs.namenode.rpc-address.hwqe-1425428405.nn2", "node-2.example.com:8020");

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/a200d82d/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
----------------------------------------------------------------------
diff --git a/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
index da08240..40efe2e 100644
--- a/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
+++ b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
@@ -51,7 +51,7 @@ public class HiveClient extends BaseClient implements Closeable {
 	
 
 	public HiveClient(String serviceName) {
-		super(serviceName) ;
+		super(serviceName, null) ;
 		initHive() ;
 	}
 	

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/a200d82d/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopClassLoader.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopClassLoader.java b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopClassLoader.java
deleted file mode 100644
index bb13538..0000000
--- a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopClassLoader.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.hadoop.client.config;
-
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.net.URL;
-import java.util.Properties;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.ranger.hadoop.client.exceptions.HadoopException;
-
-public class HadoopClassLoader extends ClassLoader {
-	
-	private static final Log LOG = LogFactory.getLog(HadoopClassLoader.class) ;
-	
-	private HadoopConfigHolder confHolder ;
-	
-	public HadoopClassLoader(HadoopConfigHolder confHolder) {
-		super(Thread.currentThread().getContextClassLoader()) ;
-		this.confHolder = confHolder;
-	}
-	
-	
-	@Override
-	protected URL findResource(String resourceName) {
-		LOG.debug("findResource(" + resourceName + ") is called.") ;
-		URL ret = null;
-	
-		if (confHolder.hasResourceExists(resourceName)) {
-			ret = buildResourceFile(resourceName) ;
-		}
-		else {
-			ret = super.findResource(resourceName);
-		}
-		LOG.debug("findResource(" + resourceName + ") is returning [" + ret + "]") ;
-		return ret ;
-	}
-	
-	
-	@SuppressWarnings("deprecation")
-	private URL buildResourceFile(String aResourceName) {
-		URL ret = null ;
-		String prefix = aResourceName ;
-		String suffix = ".txt" ;
-
-		Properties prop = confHolder.getProperties(aResourceName) ;
-		LOG.debug("Building XML for: " + prop.toString());
-		if (prop != null && prop.size() > 0) {
-			if (aResourceName.contains(".")) {
-				int lastDotFound = aResourceName.indexOf(".") ;
-				prefix = aResourceName.substring(0,lastDotFound) + "-" ;
-				suffix = aResourceName.substring(lastDotFound) ;
-			}
-			
-			try {
-				File tempFile = File.createTempFile(prefix, suffix) ;
-				tempFile.deleteOnExit();
-				PrintWriter out = new PrintWriter(new FileWriter(tempFile)) ;
-				out.println("<?xml version=\"1.0\"?>") ;
-				out.println("<?xml-stylesheet type=\"text/xsl\" href=\"configuration.xsl\"?>") ;
-				out.println("<configuration xmlns:xi=\"http://www.w3.org/2001/XInclude\">") ;
-				for(Object keyobj : prop.keySet()) {
-					String key = (String)keyobj;
-					String val = prop.getProperty(key) ;
-					if (HadoopConfigHolder.HADOOP_RPC_PROTECTION.equals(key) && (val == null || val.trim().isEmpty()))  {
-						continue;
-					}
-					out.println("<property><name>" + key.trim() + "</name><value>" + val + "</value></property>") ;
-				}
-				out.println("</configuration>") ;
-				out.close() ;
-				ret = tempFile.toURL() ;
-			} catch (IOException e) {
-				throw new HadoopException("Unable to load create hadoop configuration file [" + aResourceName + "]", e) ;
-			}
-			
-		}
-		
-		return ret ;
-
-	}
-	
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/a200d82d/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopConfigHolder.java
----------------------------------------------------------------------
diff --git a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopConfigHolder.java b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopConfigHolder.java
index 7651bb5..f9b3eee 100644
--- a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopConfigHolder.java
+++ b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopConfigHolder.java
@@ -56,8 +56,7 @@ public class HadoopConfigHolder  {
 	private String password ;
 	private boolean isKerberosAuth ;
 	
-	private HadoopClassLoader classLoader ;
-	private HashMap<String,String>  connectionProperties; 
+	private HashMap<String,String>  connectionProperties;
 	
 	public static HadoopConfigHolder getInstance(String aDatasourceName) {
 		HadoopConfigHolder ret = dataSource2HadoopConfigHolder.get(aDatasourceName) ;
@@ -102,7 +101,6 @@ public class HadoopConfigHolder  {
 			init() ;
 		}
 		initLoginInfo();
-		initClassLoader() ;
 	}
 	
 	private HadoopConfigHolder(String aDatasourceName, HashMap<String,String> connectionProperties) {
@@ -110,7 +108,6 @@ public class HadoopConfigHolder  {
 		this.connectionProperties = connectionProperties ;
 		initConnectionProp() ;
 		initLoginInfo();
-		initClassLoader() ;
 	}
 	
 	private void initConnectionProp() {
@@ -248,12 +245,7 @@ public class HadoopConfigHolder  {
 					
 		}
 	}
-	
-	private void initClassLoader() {
-		classLoader = new HadoopClassLoader(this) ;
-	}
-	
-	
+
 	public Properties getRangerSection() {
 		Properties prop = this.getProperties(RANGER_SECTION_NAME) ;
 		if (prop == null) {
@@ -346,15 +338,8 @@ public class HadoopConfigHolder  {
 		return password;
 	}
 
-	public HadoopClassLoader getClassLoader() {
-		return classLoader;
-	}
-
 	public boolean isKerberosAuthentication() {
 		return isKerberosAuth;
 	}
 
-  
-	
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/a200d82d/security-admin/src/main/resources/resourcenamemap.properties
----------------------------------------------------------------------
diff --git a/security-admin/src/main/resources/resourcenamemap.properties b/security-admin/src/main/resources/resourcenamemap.properties
index 7a9e89c..201c0fa 100644
--- a/security-admin/src/main/resources/resourcenamemap.properties
+++ b/security-admin/src/main/resources/resourcenamemap.properties
@@ -13,22 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-fs.default.name=core-site.xml
-hadoop.security.authentication=core-site.xml
-hadoop.security.authorization=core-site.xml
-hadoop.security.auth_to_local=core-site.xml
-hadoop.rpc.protection=core-site.xml
-dfs.nameservices=hdfs-site.xml
-dfs.datanode.kerberos.principal=hdfs-site.xml
-dfs.namenode.kerberos.principal=hdfs-site.xml
-dfs.secondary.namenode.kerberos.principal=hdfs-site.xml
 username=xalogin.xml
 keytabfile=xalogin.xml
 password=xalogin.xml
-hbase.master.kerberos.principal=hbase-site.xml
-hbase.rpc.engine=hbase-site.xml
-hbase.rpc.protection=hbase-site.xml
-hbase.security.authentication=hbase-site.xml
-hbase.zookeeper.property.clientPort=hbase-site.xml
-hbase.zookeeper.quorum=hbase-site.xml
-zookeeper.znode.parent=hbase-site.xml