You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ranger.apache.org by ma...@apache.org on 2015/01/27 02:58:08 UTC

[1/6] incubator-ranger git commit: RANGER-203: HDFS Plugin - remove unused sources from previous version

Repository: incubator-ranger
Updated Branches:
  refs/heads/stack 8d0378c56 -> d7bf8e09d


RANGER-203: HDFS Plugin - remove unused sources from previous version

Project: http://git-wip-us.apache.org/repos/asf/incubator-ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ranger/commit/ce1808af
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ranger/tree/ce1808af
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ranger/diff/ce1808af

Branch: refs/heads/stack
Commit: ce1808afe7e904b0d3f3da9fea762e5102f178e7
Parents: 8d0378c
Author: Madhan Neethiraj <ma...@apache.org>
Authored: Mon Jan 26 16:03:02 2015 -0800
Committer: Madhan Neethiraj <ma...@apache.org>
Committed: Mon Jan 26 16:03:02 2015 -0800

----------------------------------------------------------------------
 .../ranger/pdp/hdfs/AdminPolicyChecker.java     | 183 -------
 .../ranger/pdp/hdfs/RangerAuthorizer.java       |  40 --
 .../apache/ranger/pdp/hdfs/URLBasedAuthDB.java  | 479 -------------------
 .../ranger/pdp/hdfs/PolicyCacheStoreTest.java   | 170 -------
 .../ranger/pdp/hdfs/URLBasedAuthDBTest.java     |  66 ---
 ...asedAuthDB_IsAuditLogEnabledByACL_PTest.java | 340 -------------
 6 files changed, 1278 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/ce1808af/agents-impl/src/main/java/org/apache/ranger/pdp/hdfs/AdminPolicyChecker.java
----------------------------------------------------------------------
diff --git a/agents-impl/src/main/java/org/apache/ranger/pdp/hdfs/AdminPolicyChecker.java b/agents-impl/src/main/java/org/apache/ranger/pdp/hdfs/AdminPolicyChecker.java
deleted file mode 100644
index 919a7a1..0000000
--- a/agents-impl/src/main/java/org/apache/ranger/pdp/hdfs/AdminPolicyChecker.java
+++ /dev/null
@@ -1,183 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.pdp.hdfs;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
-
-import org.apache.commons.io.FilenameUtils;
-
-public class AdminPolicyChecker {
-		
-	private  static final String PATH_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrst0123456789-_." ;
-	private  static char[] PATH_CHAR_SET = PATH_CHARS.toCharArray() ; 
-	private static int PATH_CHAR_SET_LEN = PATH_CHAR_SET.length ; 
-
-	
-	public static List<String> adminUserList = new ArrayList<String>() ;  // "cli@adfpros.com"
-	public static List<String> adminGroupList = new ArrayList<String>()  ;
-	
-	static {
-		adminUserList.add("cli@adfpros.com") ;
-		adminGroupList.add("policymgradmin") ;
-	}
-	
-	
-	public void checkAdminAccessForResource(String selectedResourcePath, boolean isRecursiveFlag, String username) {
-		
-		if (adminUserList.contains(username)) {
-			return ;  
-		}
-		
-		List<String> groups = getUserGroupsForUser(username) ;
-		
-		if (adminGroupList.contains(groups)) {
-			
-		}
-		
-		checkAdminAccessForResource(new Path(selectedResourcePath, isRecursiveFlag), username) ;
-	}
-
-	private void checkAdminAccessForResource(Path resourcePath, String username) {
-
-		List<Path> adminPathList = getAdminPathFromDB(username)  ;
-
-		if (!adminPathList.isEmpty()) {
-			for(Path adminPath : adminPathList ) {
-				if (adminPath.isMatched(resourcePath)) {
-					return  ;
-				}
-			}
-		}
-
-		throw new SecurityException("User [" + username + "]  does not have admin privileges on path [" + resourcePath + "]") ;
-
-	}
-	
-	class Path {
-		String fullPath ;
-		boolean recursiveFlag ;
-
-		Path(String fullPath, boolean recursiveFlag) {
-			this.fullPath = fullPath;
-			this.recursiveFlag = recursiveFlag;
-		}
-
-		public boolean isMatched(Path resourcePath) {
-			// Since it is a Regular Expression Compared with Regular Expression
-			// We will expand the resourcepath to a normalized form and see if it matches with the fullpath using a WildCardMatch
-			// THIS IS JUST A WORK-AROUND. Need more permanent solution - 11/19/2013
-			
-			String expandedPath = repaceMetaChars(resourcePath) ;
-			
-			if (recursiveFlag) {
-				return URLBasedAuthDB.isRecursiveWildCardMatch(expandedPath, fullPath) ;
-			}
-			else {
-				return FilenameUtils.wildcardMatch(expandedPath, fullPath) ;
-			}
-		}
-		
-		private String repaceMetaChars(Path regEx) {
-			
-			String expandedPath = regEx.fullPath ;
-			
-			if (expandedPath.contains("*")) {
-				String replacement = getRandomString(5,60) ;
-				expandedPath.replaceAll("\\*", replacement) ;
-			}
-			
-			if (expandedPath.contains("?")) {
-				String replacement = getRandomString(1,1) ;
-				expandedPath.replaceAll("\\?", replacement) ;
-			}
-			
-			if (regEx.recursiveFlag) {
-				int level = getRandomInt(3,10) ;
-				if (! expandedPath.endsWith("/")) {
-					expandedPath = expandedPath + "/" ;
-				}
-				expandedPath = expandedPath + getRandomString(5,60) ;
-				
-				for(int i = 1 ; i  < level ; i++) {
-					expandedPath = expandedPath + "/" + getRandomString(5,60) ;
-				}
-			}
-			return expandedPath ;
-		}
-		
-		
-		private Random random = new Random() ;
-
-		private String getRandomString(int minLen, int maxLen) {
-			StringBuilder sb = new StringBuilder() ;
-			int len = getRandomInt(minLen,maxLen) ;
-			for(int i = 0 ; i < len ; i++) {
-				int charIdx = random.nextInt(PATH_CHAR_SET_LEN) ;
-				sb.append( PATH_CHAR_SET[charIdx] ) ;
-			}
-			return null;
-		}
-		
-		private int getRandomInt(int min, int max) {
-			if (min == max) {
-				return min ;
-			}
-			else {
-				int interval = max - min ;
-				return ((random.nextInt() % interval) + min) ;
-			}
-		}
-
-	}
-	
-	
-	private List<Path> getAdminPathFromDB(String username) {
-		
-		List<Path> ret = new ArrayList<Path>() ;
-
-		//
-		// TODO:  database work to get ACL ....
-		//
-		
-		// Get all policy acl where the user has ADMIN permission +
-		// Get all policy acl where group associated with user has ADMIN permission 
-		// For each of the acl
-		//	  For path in acl.getResourcePath().splitBy(",")
-		//	     ret.add(new Path(path, acl.recursiveFlag)) ;
-		
-		return ret;
-	}
-	
-	
-	private List<String>  getUserGroupsForUser(String username) {
-		List<String> groupList = new ArrayList<String>() ;
-
-		//
-		// TODO:  database work to get List of groups ....
-		//
-
-		return groupList ;
-	}
-
-
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/ce1808af/agents-impl/src/main/java/org/apache/ranger/pdp/hdfs/RangerAuthorizer.java
----------------------------------------------------------------------
diff --git a/agents-impl/src/main/java/org/apache/ranger/pdp/hdfs/RangerAuthorizer.java b/agents-impl/src/main/java/org/apache/ranger/pdp/hdfs/RangerAuthorizer.java
deleted file mode 100644
index da6dd65..0000000
--- a/agents-impl/src/main/java/org/apache/ranger/pdp/hdfs/RangerAuthorizer.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.pdp.hdfs;
-
-import java.util.Set;
-
-import org.apache.ranger.authorization.hadoop.HDFSAccessVerifier;
-
-public class RangerAuthorizer implements HDFSAccessVerifier {
-
-	private static URLBasedAuthDB authDB = URLBasedAuthDB.getInstance() ;
-	
-	@Override
-	public boolean isAccessGranted(String aPathName, String aPathOwnerName, String access, String username, Set<String> groups) {
-		return authDB.isAccessGranted(aPathName, aPathOwnerName, access, username, groups);
-	}
-
-	@Override
-	public boolean isAuditLogEnabled(String aPathName) {
-		return authDB.isAuditLogEnabled(aPathName) ;
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/ce1808af/agents-impl/src/main/java/org/apache/ranger/pdp/hdfs/URLBasedAuthDB.java
----------------------------------------------------------------------
diff --git a/agents-impl/src/main/java/org/apache/ranger/pdp/hdfs/URLBasedAuthDB.java b/agents-impl/src/main/java/org/apache/ranger/pdp/hdfs/URLBasedAuthDB.java
deleted file mode 100644
index 4136c6d..0000000
--- a/agents-impl/src/main/java/org/apache/ranger/pdp/hdfs/URLBasedAuthDB.java
+++ /dev/null
@@ -1,479 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ranger.pdp.hdfs;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.TreeSet;
-
-import org.apache.commons.io.FilenameUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.ranger.authorization.hadoop.HDFSAccessVerifier;
-import org.apache.ranger.authorization.hadoop.config.RangerConfiguration;
-import org.apache.ranger.pdp.config.PolicyChangeListener;
-import org.apache.ranger.pdp.config.PolicyRefresher;
-import org.apache.ranger.pdp.constants.RangerConstants;
-import org.apache.ranger.pdp.model.Policy;
-import org.apache.ranger.pdp.model.PolicyContainer;
-import org.apache.ranger.pdp.model.ResourcePath;
-import org.apache.ranger.pdp.model.RolePermission;
-
-public class URLBasedAuthDB implements HDFSAccessVerifier, PolicyChangeListener {
-
-	private static final Log LOG = LogFactory.getLog(URLBasedAuthDB.class) ;
-
-	private static URLBasedAuthDB me = null;
-	
-	private PolicyRefresher refresher = null ;
-	
-	private PolicyContainer policyContainer = null;
-	
-	private HashMap<String,Boolean> cachedAuditFlag = new HashMap<String,Boolean>() ;	// needs to be cleaned when ruleList changes
-	
-	private static final long MAX_NO_OF_AUDIT_CACHE_ENTRIES = 1000L ;
-
-
-	public static URLBasedAuthDB getInstance() {
-		if (me == null) {
-			synchronized (URLBasedAuthDB.class) {
-				URLBasedAuthDB temp = me;
-				if (temp == null) {
-					me = new URLBasedAuthDB();
-					me.init() ;
-				}
-			}
-		}
-		return me;
-	}
-
-	private URLBasedAuthDB() {
-		String url 			 = RangerConfiguration.getInstance().get(RangerConstants.RANGER_HDFS_POLICYMGR_URL_PROP);
-		long  refreshInMilli = RangerConfiguration.getInstance().getLong(
-				RangerConstants.RANGER_HDFS_POLICYMGR_URL_RELOAD_INTERVAL_IN_MILLIS_PROP ,
-				RangerConstants.RANGER_HDFS_POLICYMGR_URL_RELOAD_INTERVAL_IN_MILLIS_DEFAULT);
-		String sslConfigFileName = RangerConfiguration.getInstance().get(RangerConstants.RANGER_HDFS_POLICYMGR_SSL_CONFIG_FILE_PROP) ;
-		
-		String lastStoredFileName = RangerConfiguration.getInstance().get(RangerConstants.RANGER_HDFS_LAST_SAVED_POLICY_FILE_PROP) ;
-		
-		refresher = new PolicyRefresher(url, refreshInMilli,sslConfigFileName,lastStoredFileName) ;
-	
-		String saveAsFileName = RangerConfiguration.getInstance().get(RangerConstants.RANGER_HDFS_POLICYMGR_URL_SAVE_FILE_PROP) ;
-		if (saveAsFileName != null) {
-			refresher.setSaveAsFileName(saveAsFileName) ;
-		}
-		
-		if (lastStoredFileName != null) {
-			refresher.setLastStoredFileName(lastStoredFileName);
-		}	
-	}
-
-	private void init() {
-		refresher.setPolicyChangeListener(this);
-	}
-	
-	@Override
-	public void OnPolicyChange(PolicyContainer aPolicyContainer) {
-		setPolicyContainer(aPolicyContainer);
-	}
-
-
-	@Override
-	public boolean isAccessGranted(String aPathName, String pathOwnerName, String access, String username, Set<String> groups) {
-
-		PolicyContainer pc = getPolicyContainer() ;
-		
-		if (pc == null) {
-			return false ;
-		}
-		
-		for(Policy acl :  pc.getAcl()) {
-			
-			if (! acl.isEnabled()) {
-				LOG.debug("Diabled acl found [" + acl + "]. Skipping this acl ...") ;
-				continue ;
-			}
-
-			for(ResourcePath resource : acl.getResourceList()) {
-				
-				String path = resource.getPath() ;
-				
-				boolean rulePathMatched = false ;
-				
-				if (acl.getRecursiveInd() == 1) {
-					if (resource.isWildcardPath()) {
-						rulePathMatched = isRecursiveWildCardMatch(aPathName, path) ;
-					}
-					else {
-						rulePathMatched = aPathName.startsWith(path) ;
-					}
-				}
-				else {
-					if (resource.isWildcardPath()) {
-						rulePathMatched = FilenameUtils.wildcardMatch(aPathName, path) ;
-					}
-					else {
-						rulePathMatched = aPathName.equals(path) ;
-					}
-				}
-				
-				
-				if (rulePathMatched) {
-					for (RolePermission rp : acl.getPermissions()) {
-						if (rp.getAccess().contains(access)) {
-							if ( rp.getUsers().contains(username) ) {
-								return true ;
-							}
-							for(String ug : groups) {
-								if ( rp.getGroups().contains(ug)) {
-									return true ;
-								}
-							}
-							if (rp.getGroups().contains(RangerConstants.PUBLIC_ACCESS_ROLE)) {
-								return true ;
-							}
-						}
-					}
-				}
-			
-			}
-		}
-		
-		return false ;
-
-	}
-	
-	public static boolean isRecursiveWildCardMatch(String pathToCheck, String wildcardPath) {
-		if (pathToCheck != null) {
-			StringBuilder sb = new StringBuilder() ;
-			for(String p :  pathToCheck.split(File.separator) ) {
-				sb.append(p) ;
-				boolean matchFound = FilenameUtils.wildcardMatch(sb.toString(), wildcardPath) ;
-				if (matchFound) {
-					return true ;
-				}
-				sb.append(File.separator) ;
-			}
-			sb = null;
-		}
-		return false;
-	}
-
-	public PolicyContainer getPolicyContainer() {
-		return policyContainer;
-	}
-
-	private synchronized void setPolicyContainer(PolicyContainer aPolicyContainer) {
-		
-		for(Policy p : aPolicyContainer.getAcl()) {
-			for(RolePermission rp : p.getPermissions()) {
-				List<String> rpaccess = rp.getAccess() ;
-				if (rpaccess != null && rpaccess.size() > 0) {
-					List<String> temp = new ArrayList<String>() ;
-					for(String s : rpaccess) {
-						temp.add(s.toLowerCase()) ;
-					}
-					rp.setAccess(temp);
-				}
-			}
-		}
-		
-		this.policyContainer = aPolicyContainer ;
-		this.cachedAuditFlag.clear(); 
-	}
-	
-
-
-	public UserPermission printPermissionInfo(UserGroupInformation ugi) {
-		return printPermissionInfo(ugi, null) ;
-	}
-
-	public UserPermission printPermissionInfo(UserGroupInformation ugi, String aPathName) {
-		
-		String username = ugi.getShortUserName() ;
-		
-		String[] groups = ugi.getGroupNames() ;
-		
-		UserPermission up = new UserPermission(username,groups, aPathName) ;
-		
-		PolicyContainer pc = getPolicyContainer() ;
-		
-		if (pc != null) {
-		
-			for(Policy acl :  pc.getAcl()) {
-	
-				for(ResourcePath resource : acl.getResourceList()) {
-					
-					String path = resource.getPath() ;
-					
-					boolean rulePathMatched = false ;
-					
-					if (acl.getRecursiveInd() == 1) {
-						if (resource.isWildcardPath()) {
-							rulePathMatched = isRecursiveWildCardMatch(aPathName, path) ;
-						}
-						else {
-							rulePathMatched = aPathName.startsWith(path) ;
-						}
-					}
-					else {
-						if (resource.isWildcardPath()) {
-							rulePathMatched = FilenameUtils.wildcardMatch(aPathName, path) ;
-						}
-						else {
-							rulePathMatched = aPathName.equals(path) ;
-						}
-					}
-					
-					
-					if (rulePathMatched) {
-						for (RolePermission rp : acl.getPermissions()) {
-							boolean isAccessGranted = false ;
-							if (! isAccessGranted ) {
-								if ( rp.getUsers().contains(username) ) {
-									up.add(resource, acl.getRecursiveInd(), username, null,  rp.getAccess());
-									isAccessGranted = true ;
-								}
-							}
-							if ( ! isAccessGranted ) { 
-								for(String ug : groups) {
-									if ( rp.getGroups().contains(ug)) {
-										up.add(resource, acl.getRecursiveInd(), null, ug,  rp.getAccess());
-									}
-								}
-							}
-							if (! isAccessGranted ) {
-								if (rp.getGroups().contains(RangerConstants.PUBLIC_ACCESS_ROLE)) {
-									up.add(resource, acl.getRecursiveInd(), null, RangerConstants.PUBLIC_ACCESS_ROLE,  rp.getAccess());
-								}
-							}
-						}
-					}
-				}
-			}
-		}
-		
-		return up ;
-	}
-	
-	
-	class UserPermission {
-
-		private String userName ;
-		private String groups ;
-		private String pathName ;
-		private HashMap<String,HashSet<String>> userPermissionMap = new HashMap<String,HashSet<String>>() ;
-
-		public UserPermission(String userName, String[] groupList, String pathName) {
-			this.userName = userName ;
-			this.pathName = pathName ;
-			StringBuilder sb = new StringBuilder() ;
-			boolean first = true ;
-			TreeSet<String> gl = new TreeSet<String>() ;
-			for(String g : groupList) {
-				gl.add(g) ;
-			}
-			for(String group : gl) {
-				if (first) {
-					first = false ;
-				}
-				else {
-					sb.append(",") ;
-				}
-				sb.append(group) ;
-			}
-			this.groups = sb.toString()  ;
-		}
-		
-		
-		public void add(ResourcePath resource, int recursiveInd, String userName, String groupName, List<String> accessList) {
-			
-			String path = resource.getPath() ;
-			
-			if (recursiveInd == 1) {
-				if (path.endsWith("/")) {
-					path = path + "**" ;
-				}
-				else {
-					path = path + "/" + "**" ;
-				}
-			}
-			
-			HashSet<String> permMap = userPermissionMap.get(path) ;
-			
-			if (permMap == null) {
-				permMap = new HashSet<String>() ;
-				userPermissionMap.put(path,permMap) ;
-			}
-			
-			for(String access : accessList) {
-				if (! permMap.contains(access)) {
-					permMap.add(access) ;
-				}
-			}
-			
-		}
-		
-		public void printUserInfo() {
-			System.out.println("# USER INFORMATION") ;
-			System.out.println("USER:   " + userName ) ;
-			System.out.println("GROUPS: " + groups ) ;
-		}
-		
-		public void print() {
-			if (pathName != null) {
-				System.out.println("# PERMISSION INFORMATION FOR PATH [" + pathName + "]" + (userPermissionMap.size() == 0 ? " - NO RULES FOUND" : "")) ;
-			}
-			else {
-				System.out.println("# PERMISSION INFORMATION" + (userPermissionMap.size() == 0 ? " - NO RULES FOUND" : "")) ;
-			}
-			
-
-			if (userPermissionMap.size() > 0) {
-				TreeSet<String> pathSet = new TreeSet<String>() ;
-				pathSet.addAll(userPermissionMap.keySet()) ;
-				StringBuilder sb = new StringBuilder();
-				for(String path : pathSet) {
-					sb.setLength(0) ;
-					sb.append(String.format("%-50s", path)).append("|") ;
-					TreeSet<String> permSet = new TreeSet<String>() ;
-					permSet.addAll(userPermissionMap.get(path)) ;
-					boolean first = true ;
-					for(String perm: permSet) {
-						if (! first) {
-							sb.append(",") ;
-						}
-						else {
-							first = false ;
-						}
-						sb.append(perm) ;
-					}
-					System.out.println(sb.toString()) ;
-				}
-			}
-			
-		}
-	}
-	
-	
-	@Override
-	public boolean isAuditLogEnabled(String aPathName) {
-		boolean ret = false ;
-		
-		HashMap<String,Boolean> tempCachedAuditFlag = cachedAuditFlag ;
-		
-		Boolean auditResult = (tempCachedAuditFlag == null ? null : tempCachedAuditFlag.get(aPathName)) ;
-		
-		if (auditResult != null) {
-			ret =  auditResult ;
-		}
-		else {
-			ret = isAuditLogEnabledByACL(aPathName) ;
-			if (tempCachedAuditFlag != null) {
-				// tempCachedAuditFlag.put(aPathName,Boolean.valueOf(ret)) ;
-				synchronized(tempCachedAuditFlag) {
-					if (tempCachedAuditFlag.size() > MAX_NO_OF_AUDIT_CACHE_ENTRIES) {
-						tempCachedAuditFlag.clear(); 
-					}
-					tempCachedAuditFlag.put(aPathName,Boolean.valueOf(ret)) ;
-				}
-			}
-		}
-		
-		return ret ;
-		
-	}
-
-	
-	public boolean isAuditLogEnabledByACL(String aPathName) {
-		
-		boolean ret = false ;
-		
-		PolicyContainer pc = getPolicyContainer() ;
-		
-		if (pc == null) {
-			return false ;
-		}
-		
-		for(Policy acl :  pc.getAcl()) {
-
-			for(ResourcePath resource : acl.getResourceList()) {
-				
-				String path = resource.getPath() ;
-				
-				boolean rulePathMatched = false ;
-				
-				if (acl.getRecursiveInd() == 1) {
-					if (resource.isWildcardPath()) {
-						rulePathMatched = isRecursiveWildCardMatch(aPathName, path) ;
-					}
-					else {
-						rulePathMatched = aPathName.startsWith(path) ;
-					}
-				}
-				else {
-					if (resource.isWildcardPath()) {
-						rulePathMatched = FilenameUtils.wildcardMatch(aPathName, path) ;
-					}
-					else {
-						rulePathMatched = aPathName.equals(path) ;
-					}
-				}
-				
-				
-				if (rulePathMatched) {
-					ret = ( acl.getAuditInd() == 1)  ;
-					break ;
-				}
-			}
-		}
-
-		return ret ;
-	}
-	
-	public static void main(String[] args) throws Throwable {
-		LogManager.getLogger(URLBasedAuthDB.class).setLevel(Level.ERROR);
-		URLBasedAuthDB authDB = URLBasedAuthDB.getInstance() ;
-		UserPermission up = null; 
-		if (args.length == 0) {
-			up = authDB.printPermissionInfo(UserGroupInformation.getCurrentUser());
-			up.printUserInfo() ;
-			up.print();
-		}
-		else {
-			up = authDB.printPermissionInfo(UserGroupInformation.getCurrentUser());
-			up.printUserInfo() ;
-			for(String path : args) {
-				up = authDB.printPermissionInfo(UserGroupInformation.getCurrentUser(), path);
-				up.print();
-				System.out.println();
-			}
-		}
-		System.exit(0);
-	}
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/ce1808af/agents-impl/src/test/java/org/apache/ranger/pdp/hdfs/PolicyCacheStoreTest.java
----------------------------------------------------------------------
diff --git a/agents-impl/src/test/java/org/apache/ranger/pdp/hdfs/PolicyCacheStoreTest.java b/agents-impl/src/test/java/org/apache/ranger/pdp/hdfs/PolicyCacheStoreTest.java
deleted file mode 100644
index ad1f472..0000000
--- a/agents-impl/src/test/java/org/apache/ranger/pdp/hdfs/PolicyCacheStoreTest.java
+++ /dev/null
@@ -1,170 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ranger.pdp.hdfs;
-
-import java.io.BufferedReader;
-import java.io.FileNotFoundException;
-import java.io.FileReader;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.ranger.pdp.config.ConfigWatcher;
-import org.apache.ranger.pdp.config.PolicyRefresher;
-import org.apache.ranger.pdp.hdfs.URLBasedAuthDB;
-import org.apache.ranger.pdp.model.Policy;
-import org.apache.ranger.pdp.model.PolicyContainer;
-import org.apache.ranger.pdp.model.RolePermission;
-import org.junit.Before;
-import org.junit.After;
-import org.junit.Test;
-
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-
-import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.when;
-
-
-public class PolicyCacheStoreTest {
-	URLBasedAuthDB authDB = null;
-	ConfigWatcher watcherDaemon = null;
-	PolicyRefresher pr = null;
-	PolicyContainer policyContainer=null;
-	String url=null;
-	String sslConfigFileName=null;
-	String lastStoredFileName=null;
-	Long refreshInterval =0L;
-	private static final Log LOG = LogFactory.getLog(PolicyCacheStoreTest.class);
-	@Before
-	public void setup(){
-		authDB = URLBasedAuthDB.getInstance();
-		
-	}
-	
-	@After
-	public void teardown(){
-		authDB = null;
-		PolicyRefresher pr = null;
-	}
-
-	@Test	
-	public void testHdfsPolicyCacheStore(){
-		//Check if the policy cache gets created when agent get created;
-		url="dummyurl";
-		refreshInterval=10L;
-		sslConfigFileName = "dummyConfigFileName.xml";
-		lastStoredFileName = System.getProperty("user.home") +"/"+ "haooopPolicyCache.json";
-		policyContainer = buildPolicyContainer(
-				"/demo/data", 
-				1,
-				asList("allow"), 
-				asList("guest"), 
-				asList("sales"),
-				null, // ipAddress
-				true, // policyEnabled
-				true); // auditEnabled
-	    authDB.OnPolicyChange(policyContainer);
-		pr = spy(new PolicyRefresher(url,refreshInterval,sslConfigFileName,lastStoredFileName));
-		pr.setPolicyContainer(policyContainer);
-		pr.setPolicyChangeListener(authDB);
-		PolicyContainer newPr = readPolicyCache(lastStoredFileName);
-		assertEquals(policyToString(policyContainer),policyToString(newPr));
-	}
-
-	private static PolicyContainer buildPolicyContainer(String resource,
-		int recursiveInd, List<String> accessTypes, List<String> users,
-		List<String> groups, List<String> ipAddresses,
-		boolean policyEnabled, boolean auditEnabled) {
-
-		PolicyContainer policyContainer = new PolicyContainer();
-		policyContainer.setRepositoryName("hadoopdev");
-
-		List<Policy> policies = new ArrayList<Policy>();
-
-		Policy policy = new Policy();
-		policy.setResource(resource);
-		policy.setRecursiveInd(recursiveInd);
-		policy.setPolicyStatus(policyEnabled ? "Enabled" : "NotEnabled");
-		policy.setAuditInd(auditEnabled ? 1 : 0);
-
-		List<RolePermission> rolePermissions = new ArrayList<RolePermission>();
-		
-		RolePermission rolePermission =  new RolePermission();
-		
-		rolePermissions.add(rolePermission);
-		rolePermission.setAccess(accessTypes);
-		rolePermission.setUsers(users);
-		rolePermission.setGroups(groups);
-		rolePermission.setIpAddress(ipAddresses);
-		
-		policy.setPermissions(rolePermissions);
-		
-		policies.add(policy);
-
-		policyContainer.setAcl(policies);
-
-		return policyContainer;
-	}
-
-	private static Set<String> asSet(String... a) {
-		Set<String> vals = new HashSet<String>();
-		for (String s : a) {
-			vals.add(s);
-		}
-		return vals;
-	}
-
-	private static List<String> asList(String... a) {
-		List<String> vals = new ArrayList<String>();
-		for (String s : a) {
-			vals.add(s);
-		}
-		return vals;
-	}
-	
-	
-	private PolicyContainer readPolicyCache(String lastStoreFileName) {
-		BufferedReader jsonString = null;
-		try {
-			jsonString = new BufferedReader(new FileReader(lastStoredFileName));
-		} catch (FileNotFoundException e) {
-			// TODO Auto-generated catch block
-			e.printStackTrace();
-		}	                		
-    	Gson gson = new GsonBuilder().create();	                    	
-    	PolicyContainer newPolicyContainer = gson.fromJson(jsonString, PolicyContainer.class);	 
-    	return newPolicyContainer;
-	}
-	
-	private String policyToString(PolicyContainer pc) {
-		Gson gson = new GsonBuilder().create() ;
-		String policyAsJson = gson.toJson(policyContainer) ;
-		return policyAsJson;
-	}
-	
-	
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/ce1808af/agents-impl/src/test/java/org/apache/ranger/pdp/hdfs/URLBasedAuthDBTest.java
----------------------------------------------------------------------
diff --git a/agents-impl/src/test/java/org/apache/ranger/pdp/hdfs/URLBasedAuthDBTest.java b/agents-impl/src/test/java/org/apache/ranger/pdp/hdfs/URLBasedAuthDBTest.java
deleted file mode 100644
index 19023b6..0000000
--- a/agents-impl/src/test/java/org/apache/ranger/pdp/hdfs/URLBasedAuthDBTest.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ranger.pdp.hdfs;
-
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.when;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.ranger.pdp.hdfs.URLBasedAuthDB;
-import org.apache.ranger.pdp.model.Policy;
-import org.apache.ranger.pdp.model.PolicyContainer;
-import org.apache.ranger.pdp.model.ResourcePath;
-import org.junit.Test;
-
-public class URLBasedAuthDBTest {
-
-	@Test
-	public void testIsAuditLogEnabledByACL_emptyPolicyContainer() {
-
-		// audit can't be enabled if authdb isn't initialized 
-		assertFalse(mAuthDB.isAuditLogEnabledByACL("blah"));
-		
-		// or if the policy container in is null!
-		URLBasedAuthDB spy = spy(mAuthDB);
-		when(spy.getPolicyContainer()).thenReturn(null);
-		assertFalse(mAuthDB.isAuditLogEnabledByACL("blah"));
-		
-		// of if policy container is empty, i.e. has no policies!
-		List<Policy> policies = new ArrayList<Policy>();
-		PolicyContainer policyContainer = mock(PolicyContainer.class);
-		when(policyContainer.getAcl()).thenReturn(policies);
-		when(spy.getPolicyContainer()).thenReturn(policyContainer);
-		assertFalse(mAuthDB.isAuditLogEnabledByACL("blah"));
-		
-		// or if all policies are empty, i.e. no acls!
-		Policy aPolicy = mock(Policy.class);
-		when(aPolicy.getResourceList()).thenReturn(new ArrayList<ResourcePath>());
-		policies.add(aPolicy);
-		when(policyContainer.getAcl()).thenReturn(policies);
-		when(spy.getPolicyContainer()).thenReturn(policyContainer);
-		assertFalse(spy.isAuditLogEnabledByACL("blah"));
-	}
-	
-	private final URLBasedAuthDB mAuthDB = URLBasedAuthDB.getInstance();	
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/ce1808af/agents-impl/src/test/java/org/apache/ranger/pdp/hdfs/URLBasedAuthDB_IsAuditLogEnabledByACL_PTest.java
----------------------------------------------------------------------
diff --git a/agents-impl/src/test/java/org/apache/ranger/pdp/hdfs/URLBasedAuthDB_IsAuditLogEnabledByACL_PTest.java b/agents-impl/src/test/java/org/apache/ranger/pdp/hdfs/URLBasedAuthDB_IsAuditLogEnabledByACL_PTest.java
deleted file mode 100644
index d2dfe96..0000000
--- a/agents-impl/src/test/java/org/apache/ranger/pdp/hdfs/URLBasedAuthDB_IsAuditLogEnabledByACL_PTest.java
+++ /dev/null
@@ -1,340 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.ranger.pdp.hdfs;
-
-import static org.hamcrest.Matchers.is;
-import static org.junit.Assert.assertThat;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.when;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.ranger.pdp.hdfs.URLBasedAuthDB;
-import org.apache.ranger.pdp.model.Policy;
-import org.apache.ranger.pdp.model.PolicyContainer;
-import org.apache.ranger.pdp.model.ResourcePath;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
-
-@RunWith(Parameterized.class)
-public class URLBasedAuthDB_IsAuditLogEnabledByACL_PTest {
-
-	static class PolicyIs {
-		static final boolean wildcard = true;
-		static final boolean audited = true;
-		static final boolean recursive = true;
-
-		static final boolean notWildcard = false;
-		static final boolean notAudited = false;
-		static final boolean notRecursive = false;
-	}
-	
-	static final class PolicyPath {
-		static final String path1 = "aPath";
-		static final String path1Child1 = PolicyPath.path1 + "/" + "child1";
-		static final String path1Child2 = PolicyPath.path1 + "/" + "child2";
-
-		static final String path2 = "anotherPath";
-	}
-	static final class TestPath {
-		static final String path1 = PolicyPath.path1;
-		static final String beginsWithPath1 = PolicyPath.path1 + "_";
-		static final String path1Child1 = PolicyPath.path1Child1;
-		static final String path1Child2 = PolicyPath.path1Child2;
-		static final String path1GrandChild1 = String.format("%s/%s/%s", path1, path1Child1, "grandChild1");
-		static final String path1GrandChild2 = String.format("%s/%s/%s", path1, path1Child1, "grandChild2");
-
-		static final String path2 = PolicyPath.path2;
-		static final String beginsWithPath2 = PolicyPath.path2 + "_";
-		static final String path2Child1 = PolicyPath.path2 + "/" + "child1";
-		static final String path2Child2 = PolicyPath.path2 + "/" + "child2";
-	}
-
-	static class ExpectedResult {
-		static final class AuditEnabled {
-			static final boolean yes = true;
-			static final boolean no = false;
-		}
-	}
-	
-	static class TestDataIndex {
-		static final int ExpectedResult = 6;
-		static final int Audited = 3;
-		public static final int TestName = 0;
-		public static final int wildCard = 2;
-	}
-	
-	
-	/**
-	 * ASSUMPTION: set of tests passed as such that they require wildcard flag to be set for them to return audit enabled.
-	 * So turn wildcard flag of them off to assert that they no-longer work.  Of course, those that don't work even with wildcard
-	 * should also continue to not work when wildcard is turned off!
-	 */
-	private static List<Object[]> turnWildcardOffForTestsThatRequireWildcard(List<Object[]> tests) {
-		
-		// in the worse case we would generate one test for each existing test
-		List<Object[]> newTests = new ArrayList<Object[]>(tests.size());
-		for (Object[] aTest: tests) {
-			boolean isPolicyWildcard = (Boolean) aTest[TestDataIndex.wildCard];
-			if (isPolicyWildcard == PolicyIs.wildcard) {
-				Object[] newTest = Arrays.copyOf(aTest, aTest.length);
-				// Change the policy of this test so that Audit is disabled at policy level and accordingly change the expected result
-				newTest[TestDataIndex.wildCard] = PolicyIs.notWildcard;
-				newTest[TestDataIndex.ExpectedResult] = ExpectedResult.AuditEnabled.no;
-				// for debugging purposes alter the test description, too
-				String testName = (String) newTest[TestDataIndex.TestName];
-				newTest[TestDataIndex.TestName] = "[Wildcard-ed base test with wildcard flag turned off] " + testName;
-				newTests.add(newTest);
-			}
-		}
-		return newTests;
-	}
-	
-	/**
-	 * wildcard - policy flag says wildcard by the policy path itself does not have any wildcards worth expanding.
-	 * This should work exactly the same as if wildcard was turned off!
-	 */
-	private static List<Object[]> turnWildcardOnForNonWildcardTests(List<Object[]> tests) {
-		
-		// in the worse case we would generate one test for each existing test
-		List<Object[]> newTests = new ArrayList<Object[]>(tests.size());
-		/*
-		 * If a test currently does not have wildcard set on it, then expectation is changing wildcard flag
-		 * true shouldn't change the result.  ASSUMPTION here, of course, is that "base tests" don't use any
-		 * wild-card characters in their resource paths that would make an otherwise disabled audit to return enabled. 
-		 */
-		for (Object[] aTest: tests) {
-			boolean isPolicyWildcard = (Boolean) aTest[TestDataIndex.wildCard];
-			if (isPolicyWildcard == PolicyIs.notWildcard) {
-				Object[] newTest = Arrays.copyOf(aTest, aTest.length);
-				// Change the policy of this test so that Audit is disabled at policy level and accordingly change the expected result
-				newTest[TestDataIndex.wildCard] = PolicyIs.wildcard;
-				// for debugging purposes alter the test description, too
-				String testName = (String) newTest[TestDataIndex.TestName];
-				newTest[TestDataIndex.TestName] = "[Base test with wildcard enabled] " + testName;
-				newTests.add(newTest);
-			}
-		}
-		return newTests;
-	}
-	
-	/**
-	 * Disabled audit on every test that expects result to be yes to ensure that no matter what answer should be false if policy says that audit is disabled!
-	 */
-	private static List<Object[]> disableAuditForBaseTests(List<Object[]> tests) {
-
-		List<Object[]> newTests = new ArrayList<Object[]>(tests.size());
-		
-		for (Object[] aTest : tests) {
-			boolean expectedResult = (Boolean) aTest[TestDataIndex.ExpectedResult];
-			boolean isPolicyAuditEnabled = (Boolean) aTest[TestDataIndex.Audited];
-			
-			if (expectedResult == ExpectedResult.AuditEnabled.yes 
-					&& isPolicyAuditEnabled == PolicyIs.audited) {
-				Object[] newTest = Arrays.copyOf(aTest, aTest.length);
-				// Change the policy of this test so that Audit is disabled at policy level and accordingly change the expected result
-				newTest[TestDataIndex.Audited] = PolicyIs.notAudited;
-				newTest[TestDataIndex.ExpectedResult] = ExpectedResult.AuditEnabled.no;
-				// for debugging purposes alter the test description, too
-				String testName = (String) newTest[TestDataIndex.TestName];
-				newTest[TestDataIndex.TestName] = "[Base tests with audit disabled] " + testName;
-				newTests.add(newTest);
-			}
-		}
-		
-		return newTests;
-	}
-
-	@Parameters
-	public static Collection<Object[]> data() {
-		Object[][] baseTestData = new Object[][] {
-
-				// no-recursive paths - return true if paths match
-				{"policypath(path1) == testpath(path1) => yes", 
-					PolicyPath.path1, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.notRecursive, TestPath.path1, ExpectedResult.AuditEnabled.yes},
-				{"policypath(path2) == testpath(path2) => yes", 
-					PolicyPath.path2, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.notRecursive, TestPath.path2, ExpectedResult.AuditEnabled.yes},
-
-				// no-recursive paths - return false if paths don't match!
-				{"policypath(path1) != testPath(path2) => no", 
-					PolicyPath.path1, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.notRecursive, TestPath.path2, ExpectedResult.AuditEnabled.no},
-				{"policypath(path2) != testPath(path1) => no", 
-					PolicyPath.path2, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.notRecursive, TestPath.path1, ExpectedResult.AuditEnabled.no},
-				
-				// recursive path policy - should work at least as well as non-recursive, i.e. match when same and not otherwise!
-				{"recursive, policypath(path1) == testpath(path1)",
-					PolicyPath.path1, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.recursive, TestPath.path1, ExpectedResult.AuditEnabled.yes}, 
-				{"recursive, policypath(path2) == testpath(path2)", 
-					PolicyPath.path2, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.recursive, TestPath.path2, ExpectedResult.AuditEnabled.yes}, 
-				{"recursive, policypath(path1) == testpath(path2)",
-					PolicyPath.path1, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.recursive, TestPath.path2, ExpectedResult.AuditEnabled.no},
-				{"recursive, policypath(path1) == testpath(path2)",
-					PolicyPath.path2, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.recursive, TestPath.path1, ExpectedResult.AuditEnabled.no}, 
-
-				// recursive path policy - should match children
-				{"recursive, policypath(path1) == testpath(path1/child1)", 
-					PolicyPath.path1, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.recursive, TestPath.path1Child1, ExpectedResult.AuditEnabled.yes}, 
-				{"recursive, policypath(path1) == testpath(path1/child2)", 
-					PolicyPath.path1, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.recursive, TestPath.path1Child2, ExpectedResult.AuditEnabled.yes}, 
-				{"recursive, policypath(path1) == testpath(path1/child1)", 
-					PolicyPath.path2, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.recursive, TestPath.path2Child1, ExpectedResult.AuditEnabled.yes}, 
-				{"recursive, policypath(path1) == testpath(path1/child2)", 
-					PolicyPath.path2, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.recursive, TestPath.path2Child2, ExpectedResult.AuditEnabled.yes}, 
-
-				// recursive path policy - should match grand children, too!
-				{"recursive, policypath(path1) == testpath(path1/child1/grandChild1)", 
-					PolicyPath.path1, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.recursive, TestPath.path1GrandChild1, ExpectedResult.AuditEnabled.yes}, 
-				{"recursive, policypath(path1) == testpath(path1/child1/grandChild2)", 
-					PolicyPath.path1, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.recursive, TestPath.path1GrandChild2, ExpectedResult.AuditEnabled.yes}, 
-
-				// recursive path policy - shouldn't match child in some other directory
-				{"recursive, policypath(path1) == testpath(path1/child1)", 
-					PolicyPath.path1, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.recursive, TestPath.path2Child1, ExpectedResult.AuditEnabled.no}, 
-				{"recursive, policypath(path1) == testpath(path1/child2)", 
-					PolicyPath.path1, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.recursive, TestPath.path2Child2, ExpectedResult.AuditEnabled.no}, 
-				{"recursive, policypath(path1) == testpath(path1/child1)", 
-					PolicyPath.path2, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.recursive, TestPath.path1Child1, ExpectedResult.AuditEnabled.no}, 
-				{"recursive, policypath(path1) == testpath(path1/child2)", 
-					PolicyPath.path2, PolicyIs.notWildcard, PolicyIs.audited, PolicyIs.recursive, TestPath.path1Child2, ExpectedResult.AuditEnabled.no}, 
-
-		};
-		
-		Object[][] wildCardTestData = new Object[][] {
-				// Pattern contains exact substring
-				{"Wildcard, Pattern contains substring of tested path - 1", 
-					"aPath*", PolicyIs.wildcard, PolicyIs.audited, PolicyIs.recursive, "aPath", ExpectedResult.AuditEnabled.yes}, 
-				{"Wildcard, Pattern contains substring of tested path - 2",
-					"*aPath", PolicyIs.wildcard, PolicyIs.audited, PolicyIs.recursive, "aPath", ExpectedResult.AuditEnabled.yes}, 
-				{"Wildcard, Pattern contains substring of tested path - 3",
-					"aPa*th", PolicyIs.wildcard, PolicyIs.audited, PolicyIs.recursive, "aPath", ExpectedResult.AuditEnabled.yes}, 
-				{"Wildcard, Pattern contains substring of tested path - 4",
-					"aP*at*h", PolicyIs.wildcard, PolicyIs.audited, PolicyIs.recursive, "aPath", ExpectedResult.AuditEnabled.yes},
-
-				// Pattern should match
-				{"Wildcard, Pattern should match - 1",
-					"aPath*", PolicyIs.wildcard, PolicyIs.audited, PolicyIs.recursive, "aPath_", ExpectedResult.AuditEnabled.yes},
-				{"Wildcard, Pattern should match - 2",
-					"aPath*", PolicyIs.wildcard, PolicyIs.audited, PolicyIs.recursive, "aPath_longSuffix", ExpectedResult.AuditEnabled.yes},
-				{"Wildcard, Pattern should match - 3",
-					"*aPath", PolicyIs.wildcard, PolicyIs.audited, PolicyIs.recursive, "_aPath", ExpectedResult.AuditEnabled.yes},
-				{"Wildcard, Pattern should match - 4",
-					"*aPath", PolicyIs.wildcard, PolicyIs.audited, PolicyIs.recursive, "longPrefix_aPath", ExpectedResult.AuditEnabled.yes},
-				{"Wildcard, Pattern should match - 5",
-					"*aPath", PolicyIs.wildcard, PolicyIs.audited, PolicyIs.recursive, "_aPath", ExpectedResult.AuditEnabled.yes},
-				{"Wildcard, Pattern should match - 6",
-					"*aPath", PolicyIs.wildcard, PolicyIs.audited, PolicyIs.recursive, "longPrefix_aPath", ExpectedResult.AuditEnabled.yes},
-				{"Wildcard, Pattern should match - 5",
-					"a*Path", PolicyIs.wildcard, PolicyIs.audited, PolicyIs.recursive, "a___Path", ExpectedResult.AuditEnabled.yes},
-				{"Wildcard, Pattern should match - 6",
-					"a*Path", PolicyIs.wildcard, PolicyIs.audited, PolicyIs.recursive, "aMiddlePath", ExpectedResult.AuditEnabled.yes},
-		};
-		
-		// in the worst case all tests have a corresponding audit disabled test 
-		List<Object[]> baseTests = Arrays.asList(baseTestData);
-		List<Object[]> result = new ArrayList<Object[]>(baseTests);
-		
-		// answer is false no matter what if policy is set to not audit
-		List<Object[]> additionalTests = disableAuditForBaseTests(baseTests);
-		result.addAll(additionalTests);
-		
-		// turning wildcard flag on when policy path itself does not have wildcard characters in it shouldn't change the result!
-		additionalTests = turnWildcardOnForNonWildcardTests(baseTests);
-		result.addAll(additionalTests);
-		
-		List<Object[]> wildcardBaseTests = Arrays.asList(wildCardTestData);
-		result.addAll(wildcardBaseTests);
-		
-		additionalTests = turnWildcardOffForTestsThatRequireWildcard(wildcardBaseTests);
-		result.addAll(additionalTests);
-		return result;
-	}
-
-	public URLBasedAuthDB_IsAuditLogEnabledByACL_PTest(String testName, String policyPath, boolean wildCard, boolean audited, boolean recursive, String testPath, boolean expectedResult) {
-		_testName = testName;
-		_policyPath = policyPath;
-		_policyPathWildcard = wildCard;
-		_policyAudited = audited;
-		_policyRecursive = recursive;
-		_testPath = testPath;
-		_expectedResult = expectedResult;
-	}
-	
-	private final String _testName;
-	private final String _policyPath;
-	private final boolean _policyPathWildcard;
-	private final boolean _policyAudited;
-	private final boolean _policyRecursive;
-	private final String _testPath;
-	private final boolean _expectedResult;
-	
-	@Test
-	public void testIsAuditLogEnabledByACL() {
-		
-		if (LOG.isDebugEnabled()) {
-			LOG.debug(String.format("Test: %sPolicy Path: %s, isWildcard: %b, isAudited: %b, isRecursive: %b, TestPath: %s",
-					_testName, _policyPath, _policyPathWildcard, _policyAudited, _policyRecursive, _testPath));
-		}
-
-		// A policy can have several paths, so let's first stuff our path into a collection
-		ResourcePath path = mock(ResourcePath.class);
-		when(path.getPath()).thenReturn(_policyPath);
-		when(path.isWildcardPath()).thenReturn(_policyPathWildcard);
-		List<ResourcePath> resourcePaths = new ArrayList<ResourcePath>();
-		resourcePaths.add(path);
-		
-		// wire it into the policy and set other aspects of the policy
-		Policy aPolicy = mock(Policy.class);
-		when(aPolicy.getResourceList()).thenReturn(resourcePaths);
-		
-		int recursiveIndicator = _policyRecursive ? 1 : 0;
-		when(aPolicy.getRecursiveInd()).thenReturn(recursiveIndicator);
-		
-		int auditedIndicator = _policyAudited ? 1 : 0;
-		when(aPolicy.getAuditInd()).thenReturn(auditedIndicator);
-
-		// a container can have several policies to first we stuff our policy into a container
-		List<Policy> policies = new ArrayList<Policy>();
-		policies.add(aPolicy);
-		// now wire the policy into the container
-		PolicyContainer policyContainer = mock(PolicyContainer.class);
-		when(policyContainer.getAcl()).thenReturn(policies);
-
-		// finally wire the policy container into the authdb
-		URLBasedAuthDB spy = spy(mAuthDB);
-		when(spy.getPolicyContainer()).thenReturn(policyContainer);
-		
-		// assert the result
-		boolean result = spy.isAuditLogEnabledByACL(_testPath);
-		assertThat(_testName, result, is(_expectedResult));
-		if (LOG.isDebugEnabled()) {
-			LOG.debug(String.format(", Expected Result (Audit enabled?): %b Result: %b\n", _expectedResult, result));
-		}
-	}
-
-	private final URLBasedAuthDB mAuthDB = URLBasedAuthDB.getInstance();
-	private static final Log LOG = LogFactory.getLog(URLBasedAuthDB_IsAuditLogEnabledByACL_PTest.class) ;
-}


[6/6] incubator-ranger git commit: RANGER-203: HDFS plugin update to use newly added result.getPolicyId() method.

Posted by ma...@apache.org.
RANGER-203: HDFS plugin update to use newly added result.getPolicyId()
method.

Project: http://git-wip-us.apache.org/repos/asf/incubator-ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ranger/commit/d7bf8e09
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ranger/tree/d7bf8e09
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ranger/diff/d7bf8e09

Branch: refs/heads/stack
Commit: d7bf8e09de7c047e3f566c5ce3955ac3a25ab6fe
Parents: 7758ed1
Author: Madhan Neethiraj <ma...@apache.org>
Authored: Mon Jan 26 17:57:32 2015 -0800
Committer: Madhan Neethiraj <ma...@apache.org>
Committed: Mon Jan 26 17:57:32 2015 -0800

----------------------------------------------------------------------
 .../hdfs/server/namenode/RangerFSPermissionChecker.java   | 10 +++-------
 1 file changed, 3 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/d7bf8e09/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
----------------------------------------------------------------------
diff --git a/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java b/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
index f4e6dc7..4132706 100644
--- a/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
+++ b/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
@@ -272,12 +272,8 @@ class RangerHdfsAuditHandler extends RangerDefaultAuditHandler {
 
 		RangerAccessRequest request      = result.getAccessRequest();
 		RangerServiceDef    serviceDef   = result.getServiceDef();
-		int                 serviceType  = (serviceDef != null && serviceDef.getId() != null) ? serviceDef.getId().intValue() : -1;
-		String              serviceName  = result.getServiceName();
 		String              resourceType = getResourceName(request.getResource(), serviceDef);
 		String              resourcePath = getResourceValueAsString(request.getResource(), serviceDef);
-		Long                policyId     = (result.getAccessTypeResults() != null && !result.getAccessTypeResults().isEmpty())
-														? result.getAccessTypeResults().values().iterator().next().getPolicyId() : null;
 
 		auditEvent.setUser(request.getUser());
 		auditEvent.setResourcePath(pathToBeValidated);
@@ -287,9 +283,9 @@ class RangerHdfsAuditHandler extends RangerDefaultAuditHandler {
 		auditEvent.setClientIP(request.getClientIPAddress());
 		auditEvent.setEventTime(request.getAccessTime());
 		auditEvent.setAclEnforcer(RangerModuleName);
-		auditEvent.setPolicyId(policyId != null ? policyId.longValue() : -1);
-		auditEvent.setRepositoryType(serviceType);
-		auditEvent.setRepositoryName(serviceName);
+		auditEvent.setPolicyId(result.getPolicyId());
+		auditEvent.setRepositoryType(result.getServiceType());
+		auditEvent.setRepositoryName(result.getServiceName());
 		auditEvent.setResultReason(resourcePath);
 	}
 


[5/6] incubator-ranger git commit: RANGER-203: replaced Hive plugin implementation to use Pluggable-service model.

Posted by ma...@apache.org.
RANGER-203: replaced Hive plugin implementation to use Pluggable-service
model. 

Project: http://git-wip-us.apache.org/repos/asf/incubator-ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ranger/commit/7758ed1c
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ranger/tree/7758ed1c
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ranger/diff/7758ed1c

Branch: refs/heads/stack
Commit: 7758ed1cabb2052d1d212bd3f118036dd2f89efb
Parents: 5a50f5f
Author: Madhan Neethiraj <ma...@apache.org>
Authored: Mon Jan 26 17:56:07 2015 -0800
Committer: Madhan Neethiraj <ma...@apache.org>
Committed: Mon Jan 26 17:56:07 2015 -0800

----------------------------------------------------------------------
 .../org/apache/ranger/pdp/hive/HiveAuthDB.java  | 306 -------------
 .../apache/ranger/pdp/hive/HiveAuthRule.java    | 222 ---------
 .../pdp/hive/HiveAuthorizationProviderBase.java |  64 ---
 .../ranger/pdp/hive/RangerAuthorizer.java       |  47 --
 .../apache/ranger/pdp/hive/URLBasedAuthDB.java  | 221 ---------
 hive-agent/pom.xml                              |   5 +
 .../hive/RangerHiveAccessContext.java           | 107 -----
 .../hive/RangerHiveAccessVerifier.java          |  29 --
 .../hive/RangerHiveAccessVerifierFactory.java   |  66 ---
 .../hive/RangerHiveObjectAccessInfo.java        | 270 -----------
 .../authorizer/RangerHiveAccessRequest.java     |  92 ++++
 .../hive/authorizer/RangerHiveAuditHandler.java | 177 ++++++++
 .../hive/authorizer/RangerHiveAuthorizer.java   | 452 +++++++++----------
 .../authorizer/RangerHiveAuthorizerBase.java    |   6 -
 .../hive/authorizer/RangerHiveResource.java     | 185 ++++++++
 .../plugin/audit/RangerDefaultAuditHandler.java |   7 +-
 .../plugin/policyengine/RangerAccessResult.java |  37 ++
 17 files changed, 722 insertions(+), 1571 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthDB.java
----------------------------------------------------------------------
diff --git a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthDB.java b/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthDB.java
deleted file mode 100644
index f9bdedf..0000000
--- a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthDB.java
+++ /dev/null
@@ -1,306 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ranger.pdp.hive;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo;
-import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo.HiveAccessType;
-import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo.HiveObjectType;
-import org.apache.ranger.authorization.utils.StringUtil;
-
-public class HiveAuthDB {
-
-	private static final Log LOG = LogFactory.getLog(HiveAuthDB.class);
-
-	private ArrayList<HiveAuthRule> allRuleList = null;
-	private ArrayList<HiveAuthRule> tblRuleList = null;
-	private ArrayList<HiveAuthRule> colRuleList = null;
-
-	public HiveAuthDB() {
-		this(null) ;
-	}
-
-
-	public HiveAuthDB(ArrayList<HiveAuthRule> aRuleList) {
-		
-		if (aRuleList == null) {
-			aRuleList = new ArrayList<HiveAuthRule>() ;
-		}
-		
-		LOG.info("Number of Rules in the PolicyContainer: " +  ((aRuleList == null) ? 0 : aRuleList.size()) ) ; 
-		
-		allRuleList = new ArrayList<HiveAuthRule>() ;
-		colRuleList = new  ArrayList<HiveAuthRule>();
-		tblRuleList = new  ArrayList<HiveAuthRule>() ;
-		
-		allRuleList = aRuleList ;
-		
-		for (HiveAuthRule rule : aRuleList) {
-			if (rule.isTableRule()) {
-				this.tblRuleList.add(rule);
-			} else {
-				this.colRuleList.add(rule);
-			}
-		}
-		
-	}
-
-	public boolean isAccessAllowed(UserGroupInformation ugi, RangerHiveObjectAccessInfo objAccessInfo) {
-		boolean ret = false;
-
-		if(objAccessInfo.getAccessType() == HiveAccessType.NONE || objAccessInfo.getObjectType() == HiveObjectType.NONE) {
-			return true;
-		}
-		
-		String accessType = objAccessInfo.getAccessType().name();
-
-		switch(objAccessInfo.getObjectType()) {
-			case DATABASE:
-				ret = isAccessAllowed(ugi, accessType, objAccessInfo.getDatabase());
-			break;
-
-			case TABLE:
-			case INDEX:
-			case PARTITION:
-				ret = isAccessAllowed(ugi, accessType, objAccessInfo.getDatabase(), objAccessInfo.getTable());
-			break;
-
-			case VIEW:
-				ret = isAccessAllowed(ugi, accessType, objAccessInfo.getDatabase(), objAccessInfo.getView());
-			break;
-
-			case COLUMN:
-			{
-				String deniedColumn = findDeniedColumn(ugi, accessType, objAccessInfo.getDatabase(), objAccessInfo.getTable(), objAccessInfo.getColumns());
-				
-				ret = StringUtil.isEmpty(deniedColumn);
-				
-				if(! ret) {
-					objAccessInfo.setDeinedObjectName(RangerHiveObjectAccessInfo.getObjectName(objAccessInfo.getDatabase(), objAccessInfo.getTable(), deniedColumn));
-				}
-			}
-			break;
-
-			case FUNCTION:
-				ret = isUDFAccessAllowed(ugi, accessType, objAccessInfo.getDatabase(), objAccessInfo.getFunction());
-			break;
-
-			case URI:
-				// Handled in RangerHiveAuthorizer
-			break;
-
-			case NONE:
-			break;
-		}
-
-		return ret;
-	}
-
-	public boolean isAudited(RangerHiveObjectAccessInfo objAccessInfo) {
-		boolean ret = false;
-
-		if(   objAccessInfo.getAccessType() == HiveAccessType.NONE
-           || objAccessInfo.getObjectType() == HiveObjectType.NONE
-           || objAccessInfo.getObjectType() == HiveObjectType.URI
-           ) {
-			return false;
-		}
-		
-		String       database = null;
-		String       table    = null;
-		List<String> columns  = null;
-		boolean      isUDF    = false;
-		
-		switch(objAccessInfo.getObjectType()) {
-			case DATABASE:
-				database = objAccessInfo.getDatabase();
-			break;
-
-			case TABLE:
-			case INDEX:
-			case PARTITION:
-				database = objAccessInfo.getDatabase();
-				table    = objAccessInfo.getTable();
-			break;
-
-			case VIEW:
-				database = objAccessInfo.getDatabase();
-				table    = objAccessInfo.getView();
-			break;
-
-			case COLUMN:
-				database = objAccessInfo.getDatabase();
-				table    = objAccessInfo.getTable();
-				columns  = objAccessInfo.getColumns();
-			break;
-
-			case FUNCTION:
-				database = objAccessInfo.getDatabase();
-				table    = objAccessInfo.getFunction();
-				isUDF    = true;
-			break;
-
-			case NONE:
-			case URI:
-			break;
-		}
-		
-		if(StringUtil.isEmpty(columns)) {
-			for (HiveAuthRule rule : allRuleList) {
-				if(isUDF != rule.isUdf()) {
-					continue;
-				}
-
-				if (rule.isTableMatch(database, table)) {
-					ret = rule.isAudited() ;
-
-					if (ret) {
-						if (LOG.isDebugEnabled()) {
-							LOG.debug("isAudited(database=" + database + ", table=" + table + ", columns=" + StringUtil.toString(columns) + ") => [" + ret + "] as matched for rule: " + rule);
-						}
-
-						break ;
-					}
-				}
-			}
-		} else {
-			// is audit enabled for any one column being accessed?
-			for(String colName : columns) {
-				for (HiveAuthRule rule : allRuleList) {
-					if(isUDF != rule.isUdf()) {
-						continue;
-					}
-
-					ret = rule.isMatched(database, table, colName) && rule.isAudited();
-
-					if (ret) {
-						if (LOG.isDebugEnabled()) {
-							LOG.debug("isAudited(database=" + database + ", table=" + table + ", columns=" + StringUtil.toString(columns) + ") => [" + ret + "] as matched for rule: " + rule);
-						}
-
-						break ;
-					}
-				}
-				
-				if(ret) {
-					break;
-				}
-			}
-		}
-
-		return ret ;
-	}
-
-	private boolean isAccessAllowed(UserGroupInformation ugi, String accessType, String database) {
-		boolean ret = false;
-
-		for (HiveAuthRule rule : allRuleList) {
-			ret = rule.isMatched(database, ugi.getShortUserName(), ugi.getGroupNames(), accessType);
-
-			if(ret) {
-				if (LOG.isDebugEnabled()) {
-					LOG.debug("isAccessAllowed(user=" + ugi.getShortUserName() + ", groups=" + StringUtil.toString(ugi.getGroupNames()) + ", accessType=" + accessType + ", database=" + database + ") => [" + ret + "] as matched for rule: " + rule);
-				}
-
-				break;
-			}
-		}
-
-		return ret;
-	}
-
-	private boolean isAccessAllowed(UserGroupInformation ugi, String accessType, String database, String tableOrView) {
-		boolean ret = false;
-
-		for (HiveAuthRule rule : tblRuleList) {
-			ret = rule.isMatched(database, tableOrView, ugi.getShortUserName(), ugi.getGroupNames(), accessType);
-
-			if(ret) {
-				if (LOG.isDebugEnabled()) {
-					LOG.debug("isAccessAllowed(user=" + ugi.getShortUserName() + ", groups=" + StringUtil.toString(ugi.getGroupNames()) + ", accessType=" + accessType + ", database=" + database + ", tableOrView=" + tableOrView + ") => [" + ret + "] as matched for rule: " + rule);
-				}
-
-				break;
-			}
-		}
-
-		return ret;
-	}
-
-	private String findDeniedColumn(UserGroupInformation ugi, String accessType, String database, String tableOrView, List<String> columns) {
-		String deinedColumn = null;
-
-		boolean isAllowed = isAccessAllowed(ugi, accessType, database, tableOrView); // check if access is allowed at the table level
-
-		if(!isAllowed && !StringUtil.isEmpty(columns)) {
-			for(String column : columns) {
-				for (HiveAuthRule rule : colRuleList) {
-					isAllowed = rule.isMatched(database, tableOrView, column, ugi.getShortUserName(), ugi.getGroupNames(), accessType);
-
-					if(isAllowed) {
-						if (LOG.isDebugEnabled()) {
-							LOG.debug("isAccessAllowed(user=" + ugi.getShortUserName() + ", groups=" + StringUtil.toString(ugi.getGroupNames()) + ", accessType=" + accessType + ", database=" + database + ", tableOrView=" + tableOrView + ", column=" + column + ") => [" + isAllowed + "] as matched for rule: " + rule);
-						}
-
-						break;
-					}
-				}
-				
-				if(!isAllowed) {
-					deinedColumn = column;
-
-					if (LOG.isDebugEnabled()) {
-						LOG.debug("isAccessAllowed(user=" + ugi.getShortUserName() + ", groups=" + StringUtil.toString(ugi.getGroupNames()) + ", accessType=" + accessType + ", database=" + database + ", tableOrView=" + tableOrView + ", column=" + column + ") => [" + isAllowed + "]");
-					}
-					break;
-				}
-			}
-		}
-
-		return deinedColumn;
-	}
-
-	private boolean isUDFAccessAllowed(UserGroupInformation ugi, String accessType, String database, String udfName) {
-		boolean ret = false;
-
-		for (HiveAuthRule rule : tblRuleList) {
-			if(! rule.isUdf()) {
-				continue;
-			}
-
-			ret = rule.isMatched(database, udfName, ugi.getShortUserName(), ugi.getGroupNames(), accessType);
-
-			if(ret) {
-				if (LOG.isDebugEnabled()) {
-					LOG.debug("isAccessAllowed(user=" + ugi.getShortUserName() + ", groups=" + StringUtil.toString(ugi.getGroupNames()) + ", accessType=" + accessType + ", database=" + database + ", udfName=" + udfName + ") => [" + ret + "] as matched for rule: " + rule);
-				}
-
-				break;
-			}
-		}
-
-		return ret;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthRule.java
----------------------------------------------------------------------
diff --git a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthRule.java b/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthRule.java
deleted file mode 100644
index 21bd7c1..0000000
--- a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthRule.java
+++ /dev/null
@@ -1,222 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ranger.pdp.hive;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo.HiveAccessType;
-import org.apache.ranger.authorization.hive.constants.RangerHiveConstants;
-import org.apache.ranger.authorization.utils.StringUtil;
-
-
-public class HiveAuthRule {
-	
-	private static final Log LOG = LogFactory.getLog(HiveAuthRule.class) ;
-
-	public static final String WILDCARD_OBJECT = ".*" ;
-	
-	private String databaseName;     
-	private String tableName;  
-	private String columnName;
-	private String accessType;
-	private String group;
-	private String user;
-	private boolean tableRule      = false;
-	private boolean allGranted     = false;
-	private boolean udf            = false;
-	private boolean tableExcluded  = false;
-	private boolean columnExcluded = false;
-	private boolean audited        = false;
-	private boolean encrypted      = false;
-
-	public HiveAuthRule(String dbName, String tableName, String colName, String permission, String user, String group) {
-		this(false, dbName,tableName,colName,permission,user,group, false, false) ;
-	}
-	
-	public HiveAuthRule(boolean udfInd,  String dbName, String tableName, String colName, String permission, String user, String group, boolean tableExclusionFlag, boolean columnExclusionFlag) {
-		this.udf            = udfInd ;
-		this.databaseName   = StringUtil.toLower(dbName);
-		this.tableName      = StringUtil.toLower(tableName);
-		this.columnName     = StringUtil.toLower(colName);
-		this.accessType     = permission ;
-		this.user           = user;
-		this.group          = group ;
-		this.tableExcluded  = tableExclusionFlag ;
-		this.columnExcluded = columnExclusionFlag ;
-
-		this.allGranted = StringUtil.equalsIgnoreCase(HiveAccessType.ALL.name(), accessType);
-
-		tableRule = StringUtil.isEmpty(columnName) || WILDCARD_OBJECT.matches(columnName) ;
-	}
-	
-	@Override
-	public String toString() {
-		return "db:" + databaseName + ", table: " + tableName + ", columnName: " + columnName + ", accessType: " + accessType + ",user: " + user +  ", group: " + group + ",isTable:" + tableRule + ",audited:"  + audited + ",encrypted:" + encrypted ;
-	}
-
-	public boolean isMatched(String user, String[] groups, String accessType) {
-		String dbName  = null;
-		String tblName = null;
-		String colName = null;
-
-		return isMatched(dbName, tblName, colName, user, groups, accessType) ;
-	}
-
-	public boolean isMatched(String dbName, String user, String[] groups, String accessType) {
-		String tblName = null;
-		String colName = null;
-
-		return isMatched(dbName, tblName, colName, user, groups, accessType) ;
-	}
-	
-	public boolean isMatched(String dbName, String tblName, String user, String[] groups, String accessType) {
-		String colName = null;
-
-		return isMatched(dbName, tblName, colName, user, groups, accessType) ;
-	}
-
-	public boolean isMatched(String dbName, String tblName, String colName,  String user, String[] groups, String accessType) {
-		boolean ret = isMatched(dbName, tblName, colName);
-
-		if(ret) {
-			// does accessType match?
-			ret = StringUtil.equalsIgnoreCase(accessType,  this.accessType);
-
-			if(! ret && !StringUtil.equalsIgnoreCase(accessType, HiveAccessType.ADMIN.name())) {
-				ret = this.isAllGranted() || StringUtil.equalsIgnoreCase(accessType, "USE");
-			}
-
-			if(ret) {
-				// does user/group match?
-				ret = StringUtil.equals(user, this.user) ||
-				      StringUtil.equals(RangerHiveConstants.PUBLIC_ACCESS_ROLE, this.group) ||
-				      StringUtil.contains(groups, this.group);
-			}
-		}
-
-		if(LOG.isDebugEnabled()) {
-			LOG.debug("isMatched(db=" + dbName + ", table=" + tblName + ", col=" + colName + ", user=" + user + ", groups=" + StringUtil.toString(groups) + ", accessType=" + accessType + ") => rule[" + this.databaseName + ":" +  this.tableName + ":" + this.columnName + ":" + this.user + ":" + this.group + ":" + this.accessType + "] returns [" + ret + "]");
-		}
-
-		return ret ;
-	}
-
-	public boolean isMatched(String dbName, String tblName, String colName) {
-		boolean ret = isTableMatch(dbName, tblName);
-
-		if (ret) {
-	 		colName = StringUtil.toLower(colName);
-
-	 		if (colName != null) {
-				ret = colName.matches(this.columnName);
-
-				if (columnExcluded) {
-					ret = (! ret) ;
-				}
-			}
-		}
-
-		if(LOG.isDebugEnabled()) {
-			LOG.debug("isMatched(db=" + dbName + ", table=" + tblName + ", col=" + colName + ") => rule[" + this.databaseName + ":" +  this.tableName + ":" + this.columnName + "] returns [" + ret + "]");
-		}
-
-		return ret ;
-	}
-
-	public boolean isTableMatch(String dbName, String tblName) {
-		boolean ret = isDBMatch(dbName);
-
-		if(ret) {
-			tblName = StringUtil.toLower(tblName);
-
-			if(tblName != null) {
-				ret = tblName.matches(this.tableName);
-
-				if(tableExcluded) {
-					ret = !ret;
-				}
-			}
-		}
-		
-		return ret;
-	}
-
-	public boolean isDBMatch(String dbName) {
-		boolean ret = false;
-		
-		dbName = StringUtil.toLower(dbName);
-		
-		ret = dbName == null || dbName.matches(this.databaseName);
-		
-		return ret;
-	}
-
-	public String getDbName() {
-		return databaseName;
-	}
-
-	public String getTableName() {
-		return tableName;
-	}
-
-	public String getColumnName() {
-		return columnName;
-	}
-
-	public String getAccessType() {
-		return accessType;
-	}
-	
-	public String getUser() {
-		return user;
-	}
-
-	public String getGroup() {
-		return group;
-	}
-
-	public boolean isTableRule() {
-		return tableRule;
-	}
-
-	public boolean isAllGranted() {
-		return allGranted ;
-	}
-
-	public boolean isUdf() {
-		return udf;
-	}
-
-	public boolean isAudited() {
-		return audited;
-	}
-
-	public void setAudited(boolean audited) {
-		this.audited = audited;
-	}
-
-	public boolean isEncrypted() {
-		return encrypted;
-	}
-
-	public void setEncrypted(boolean encrypted) {
-		this.encrypted = encrypted;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthorizationProviderBase.java
----------------------------------------------------------------------
diff --git a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthorizationProviderBase.java b/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthorizationProviderBase.java
deleted file mode 100644
index 894d2df..0000000
--- a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/HiveAuthorizationProviderBase.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ranger.pdp.hive;
-
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.ranger.authorization.hive.RangerHiveAccessVerifier;
-import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo;
-
-public class HiveAuthorizationProviderBase implements RangerHiveAccessVerifier {
-
-	private static final Log LOG = LogFactory.getLog(HiveAuthorizationProviderBase.class);
-
-	protected HiveAuthDB authDB = new HiveAuthDB()  ;
-
-	
-	public HiveAuthDB getAuthDB() {
-		return authDB ;
-	}
-
-	@Override
-	public boolean isAccessAllowed(UserGroupInformation ugi, RangerHiveObjectAccessInfo objAccessInfo) {
-		HiveAuthDB ldb = authDB ;
-
-		if (ldb == null) {
-			throw new AuthorizationException("No Authorization Agent is available for AuthorizationCheck") ;
-		}
-		
-		boolean ret = ldb.isAccessAllowed(ugi, objAccessInfo);
-		
-		return ret;
-	}
-
-	@Override
-	public boolean isAudited(RangerHiveObjectAccessInfo objAccessInfo) {
-		HiveAuthDB ldb = authDB ;
-
-		if (ldb == null) {
-			throw new AuthorizationException("No Authorization Agent is available for AuthorizationCheck") ;
-		}
-
-		return ldb.isAudited(objAccessInfo) ;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/agents-impl/src/main/java/org/apache/ranger/pdp/hive/RangerAuthorizer.java
----------------------------------------------------------------------
diff --git a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/RangerAuthorizer.java b/agents-impl/src/main/java/org/apache/ranger/pdp/hive/RangerAuthorizer.java
deleted file mode 100644
index fc4291c..0000000
--- a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/RangerAuthorizer.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.pdp.hive;
-
-import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.ranger.authorization.hive.RangerHiveAccessVerifier;
-import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo;
-
-public class RangerAuthorizer implements RangerHiveAccessVerifier {
-	
-	private RangerHiveAccessVerifier authDB = URLBasedAuthDB.getInstance() ;
-	
-
-	@Override
-	public boolean isAccessAllowed(UserGroupInformation ugi, RangerHiveObjectAccessInfo objAccessInfo) {
-		if (authDB == null) {
-			throw new AuthorizationException("No Authorization Agent is available for AuthorizationCheck") ;
-		}
-		return authDB.isAccessAllowed(ugi, objAccessInfo);
-	}
-
-	@Override
-	public boolean isAudited(RangerHiveObjectAccessInfo objAccessInfo) {
-		if (authDB == null) {
-			throw new AuthorizationException("No Authorization Agent is available for AuthorizationCheck") ;
-		}
-		return authDB.isAudited(objAccessInfo) ;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/agents-impl/src/main/java/org/apache/ranger/pdp/hive/URLBasedAuthDB.java
----------------------------------------------------------------------
diff --git a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/URLBasedAuthDB.java b/agents-impl/src/main/java/org/apache/ranger/pdp/hive/URLBasedAuthDB.java
deleted file mode 100644
index 9de2bf4..0000000
--- a/agents-impl/src/main/java/org/apache/ranger/pdp/hive/URLBasedAuthDB.java
+++ /dev/null
@@ -1,221 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ranger.pdp.hive;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.ranger.authorization.hadoop.config.RangerConfiguration;
-import org.apache.ranger.pdp.config.PolicyChangeListener;
-import org.apache.ranger.pdp.config.PolicyRefresher;
-import org.apache.ranger.pdp.constants.RangerConstants;
-import org.apache.ranger.pdp.model.Policy;
-import org.apache.ranger.pdp.model.PolicyContainer;
-import org.apache.ranger.pdp.model.RolePermission;
-
-public class URLBasedAuthDB extends HiveAuthorizationProviderBase implements PolicyChangeListener {
-	
-	private static final Log LOG = LogFactory.getLog(URLBasedAuthDB.class) ;
-		
-	private static URLBasedAuthDB me = null ;
-	
-	private PolicyContainer policyContainer = null ;
-	
-	private PolicyRefresher refresher = null ;
-	
-
-	public static URLBasedAuthDB getInstance() {
-		if (me == null) {
-			synchronized(URLBasedAuthDB.class) {
-				URLBasedAuthDB temp = me ;
-				if (temp == null) {
-					me = new URLBasedAuthDB() ;
-					me.init() ;
-				}
-			}
-		}
-		return me ;
-	}
-	
-	private URLBasedAuthDB() {
-		String url 			 = RangerConfiguration.getInstance().get(RangerConstants.RANGER_HIVE_POLICYMGR_URL_PROP);
-		long  refreshInMilli = RangerConfiguration.getInstance().getLong(
-				RangerConstants.RANGER_HIVE_POLICYMGR_URL_RELOAD_INTERVAL_IN_MILLIS_PROP ,
-				RangerConstants.RANGER_HIVE_POLICYMGR_URL_RELOAD_INTERVAL_IN_MILLIS_DEFAULT);
-		
-		String lastStoredFileName = RangerConfiguration.getInstance().get(RangerConstants.RANGER_HIVE_LAST_SAVED_POLICY_FILE_PROP) ;
-		
-		String sslConfigFileName = RangerConfiguration.getInstance().get(RangerConstants.RANGER_HIVE_POLICYMGR_SSL_CONFIG_FILE_PROP) ;
-		refresher = new PolicyRefresher(url, refreshInMilli,sslConfigFileName,lastStoredFileName) ;
-		
-		String saveAsFileName = RangerConfiguration.getInstance().get(RangerConstants.RANGER_HIVE_POLICYMGR_URL_SAVE_FILE_PROP) ;
-		if (saveAsFileName != null) {
-			refresher.setSaveAsFileName(saveAsFileName) ;
-		}
-		
-		if (lastStoredFileName != null) {
-			refresher.setLastStoredFileName(lastStoredFileName);
-		}	
-
-	}
-	
-	private void init() {
-		refresher.setPolicyChangeListener(this);
-	}
-	
-	public PolicyContainer getPolicyContainer() {
-		return policyContainer;
-	}
-
-	@Override
-	public void OnPolicyChange(PolicyContainer policyContainer) {
-
-		LOG.debug("OnPolicyChange() has been called with new PolicyContainer .....") ;
-		
-		try {
-			
-			ArrayList<HiveAuthRule> ruleListTemp = new ArrayList<HiveAuthRule>();
-				
-			this.policyContainer = policyContainer;
-	
-			if (LOG.isDebugEnabled()) {
-				LOG.debug("Number of acl found (before isEnabled check): " +  ( policyContainer.getAcl() == null ? 0 :  policyContainer.getAcl().size() ) );
-			}
-			
-			for(Policy acl : policyContainer.getAcl()) {
-				
-				if (! acl.isEnabled()) {
-					LOG.debug("Diabled acl found [" + acl + "]. Skipping this acl ...") ;
-					continue ;
-				}
-				
-				if (LOG.isDebugEnabled()) {
-					LOG.debug("Number of database found in acl [" + acl + "] " +  ( acl.getDatabaseList() == null ? 0 :  acl.getDatabaseList().size() ) );
-					LOG.debug("Number of Tables found in acl [" + acl + "] " +  ( acl.getTableList() == null ? 0 :  acl.getTableList().size() ) );
-					LOG.debug("Number of Columns found in acl [" + acl + "] " +  ( acl.getColumnList()== null ? 0 :  acl.getColumnList().size() ) );
-				}
-
-				boolean isUDF = false ;
-				
-				List<String> dbList = new ArrayList<String>() ;
-				String dbs = replaceFileBasedRegEx(acl.getDatabases()) ;
-				dbList.add(getRegExFormatted(dbs)) ;
-				
-				List<String> tableList = new ArrayList<String>() ;
-				String udfs   = acl.getUdfs() ;
-				if (udfs != null) {
-					isUDF = true ;
-					dbList.clear(); 
-					dbList.add(HiveAuthRule.WILDCARD_OBJECT) ;
-					tableList.clear(); 
-					udfs  = replaceFileBasedRegEx(udfs) ;
-					tableList.add(getRegExFormatted(udfs)) ;
-				}
-				else {
-					String tables = replaceFileBasedRegEx(acl.getTables()) ;
-					tableList.add(getRegExFormatted(tables)) ;
-				}
-				
-				List<String> columnList = new ArrayList<String>() ;
-				String columns = replaceFileBasedRegEx(acl.getColumns()) ;
-				columnList.add(getRegExFormatted(columns)) ;
-
-
-				boolean isAudited = (acl.getAuditInd() == 1) ;
-				
-				boolean isEncrypted = (acl.getEncryptInd() == 1) ;
-
-				for(String db : dbList)  {
-					
-					for(String table : tableList) {
-						
-						for(String col : columnList) {
-							
-							for(RolePermission rp : acl.getPermissions()) {
-								for (String accessLevel : rp.getAccess() ) {
-									for (String group : rp.getGroups()) {
-										HiveAuthRule rule = new HiveAuthRule(isUDF, db, table, col, accessLevel.toLowerCase(), null, group, acl.isTableSelectionExcluded(), acl.isColumnSelectionExcluded());
-										rule.setAudited(isAudited);
-										rule.setEncrypted(isEncrypted);
-										LOG.debug("Adding rule [" + rule + "] to the authdb.");
-										ruleListTemp.add(rule);
-									}
-									for (String user : rp.getUsers()) {
-										HiveAuthRule rule = new HiveAuthRule(isUDF, db, table, col, accessLevel.toLowerCase(), user, null,acl.isTableSelectionExcluded(), acl.isColumnSelectionExcluded());
-										rule.setAudited(isAudited);
-										rule.setEncrypted(isEncrypted);
-										LOG.debug("Adding rule [" + rule + "] to the authdb.");
-										ruleListTemp.add(rule);
-									}
-								}
-							}
-							
-							
-						}
-					}
-				}
-			}
-			HiveAuthDB authDBTemp = new HiveAuthDB(ruleListTemp);
-			authDB = authDBTemp;
-		}
-		catch(Throwable t) {
-			LOG.error("OnPolicyChange has failed with an exception", t);
-		}
-	}
-	
-	public static String getRegExFormatted(String userEnteredStr) {
-		
-		if (userEnteredStr == null || userEnteredStr.trim().length() == 0) {
-			return HiveAuthRule.WILDCARD_OBJECT ;
-		}
-
-		StringBuilder sb = new StringBuilder() ;
-
-		for(String s : userEnteredStr.split(",")) {
-			if (sb.length() == 0) {
-				sb.append("(") ;
-			}
-			else {
-				sb.append("|") ;
-			}
-			sb.append(s.trim()) ;
-		}
-		
-		if (sb.length() > 0) {
-			sb.append(")") ;
-		}
-		
-		return sb.toString() ;
-	}
-	
-	
-	public static String replaceFileBasedRegEx(String userEnteredStr) {
-		if (userEnteredStr != null) {
-			userEnteredStr = userEnteredStr.replaceAll("\\.", "\\.")
-												.replaceAll("\\?", "\\.") 
-												.replaceAll("\\*", ".*") ;
-		}
-		return userEnteredStr ;
-	}
-	
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/pom.xml
----------------------------------------------------------------------
diff --git a/hive-agent/pom.xml b/hive-agent/pom.xml
index 1b19025..c6d41be 100644
--- a/hive-agent/pom.xml
+++ b/hive-agent/pom.xml
@@ -108,5 +108,10 @@
       <artifactId>ranger-plugins-audit</artifactId>
       <version>${project.version}</version>
     </dependency>
+    <dependency>
+    	<groupId>org.apache.ranger</groupId>
+    	<artifactId>plugin-common</artifactId>
+    	<version>${project.version}</version>
+    </dependency>
   </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessContext.java
----------------------------------------------------------------------
diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessContext.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessContext.java
deleted file mode 100644
index 6c0a2b0..0000000
--- a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessContext.java
+++ /dev/null
@@ -1,107 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ranger.authorization.hive;
-
-import org.apache.commons.lang.builder.EqualsBuilder;
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
-
-public class RangerHiveAccessContext {
-	private String mClientIpAddress;
-	private String mClientType;
-	private String mCommandString;
-	private String mSessionString;
-	
-	public RangerHiveAccessContext(HiveAuthzContext context, HiveAuthzSessionContext sessionContext) {
-		if(context != null) {
-			mClientIpAddress = context.getIpAddress();
-			mCommandString   = context.getCommandString();
-		}
-		
-		if(sessionContext != null) {
-			mClientType      = sessionContext.getClientType().name();
-			mSessionString   = sessionContext.getSessionString();
-		}
-	}
-
-	public String getClientIpAddress() {
-		return mClientIpAddress;
-	}
-
-	public void setClientIpAddress(String clientIpAddress) {
-		this.mClientIpAddress = clientIpAddress;
-	}
-
-	public String getClientType() {
-		return mClientType;
-	}
-
-	public void setClientType(String clientType) {
-		this.mClientType = clientType;
-	}
-
-	public String getCommandString() {
-		return mCommandString;
-	}
-
-	public void setCommandString(String commandString) {
-		this.mCommandString = commandString;
-	}
-
-	public String getSessionString() {
-		return mSessionString;
-	}
-
-	public void setSessionString(String sessionString) {
-		this.mSessionString = sessionString;
-	}
-	
-	@Override
-	public boolean equals(Object obj) {
-		if (obj == null) {
-			return false;
-		}
-		if (obj == this) {
-			return true;
-		}
-		if (obj.getClass() != getClass()) {
-			return false;
-		}
-		RangerHiveAccessContext that = (RangerHiveAccessContext) obj;
-		return new EqualsBuilder()
-				.appendSuper(super.equals(obj))
-				.append(mClientIpAddress, that.mClientIpAddress)
-				.append(mClientType, that.mClientType)
-				.append(mCommandString, that.mCommandString)
-				.append(mSessionString, that.mSessionString).isEquals();
-	}
-	
-	@Override
-	public int hashCode() {
-		return new HashCodeBuilder(31, 37)
-				.appendSuper(41)
-				.append(mClientIpAddress)
-				.append(mClientType)
-				.append(mCommandString)
-				.append(mSessionString)
-				.toHashCode();
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifier.java
----------------------------------------------------------------------
diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifier.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifier.java
deleted file mode 100644
index ef4ad56..0000000
--- a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifier.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.authorization.hive;
-
-import org.apache.hadoop.security.UserGroupInformation;
-
-
-public interface RangerHiveAccessVerifier {
-	public boolean isAccessAllowed(UserGroupInformation ugi, RangerHiveObjectAccessInfo objAccessInfo) ;
-	
-	public boolean isAudited(RangerHiveObjectAccessInfo objAccessInfo) ;
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifierFactory.java
----------------------------------------------------------------------
diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifierFactory.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifierFactory.java
deleted file mode 100644
index f02bfe8..0000000
--- a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveAccessVerifierFactory.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.authorization.hive;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.ranger.authorization.hadoop.config.RangerConfiguration;
-import org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants;
-
-public class RangerHiveAccessVerifierFactory {
-
-	private static final Log LOG = LogFactory.getLog(RangerHiveAccessVerifierFactory.class) ;
-
-	private static RangerHiveAccessVerifier hiveAccessVerififer = null ;
-	
-	public static RangerHiveAccessVerifier getInstance() {
-		if (hiveAccessVerififer == null) {
-			synchronized(RangerHiveAccessVerifierFactory.class) {
-				RangerHiveAccessVerifier temp = hiveAccessVerififer ;
-				if (temp == null) {
-					String hiveAccessVerifierClassName = RangerConfiguration.getInstance().get(RangerHadoopConstants.HIVE_ACCESS_VERIFIER_CLASS_NAME_PROP, RangerHadoopConstants.HIVE_ACCESS_VERIFIER_CLASS_NAME_DEFAULT_VALUE ) ;
-
-					if (hiveAccessVerifierClassName != null) {
-						LOG.info("Hive Access Verification class [" + hiveAccessVerifierClassName + "] - Being built");
-						try {
-							hiveAccessVerififer = (RangerHiveAccessVerifier) (Class.forName(hiveAccessVerifierClassName).newInstance()) ;
-							LOG.info("Created a new instance of class: [" + hiveAccessVerifierClassName + "] for Hive Access verification.");
-						} catch (InstantiationException e) {
-							LOG.error("Unable to create HiveAccess Verifier: [" +  hiveAccessVerifierClassName + "]", e);
-						} catch (IllegalAccessException e) {
-							LOG.error("Unable to create HiveAccess Verifier: [" +  hiveAccessVerifierClassName + "]", e);
-						} catch (ClassNotFoundException e) {
-							LOG.error("Unable to create HiveAccess Verifier: [" +  hiveAccessVerifierClassName + "]", e);
-						} catch (Throwable t) {
-							LOG.error("Unable to create HiveAccess Verifier: [" +  hiveAccessVerifierClassName + "]", t);
-						}
-						finally {
-							LOG.info("Created a new instance of class: [" + hiveAccessVerifierClassName + "] for Hive Access verification. (" + hiveAccessVerififer + ")");
-						}
-					}
-				}
-				else {
-					LOG.error("Unable to obtain hiveAccessVerifier [" +  RangerHadoopConstants.HIVE_ACCESS_VERIFIER_CLASS_NAME_PROP + "]");
-				}
-			}
-		}
-		return hiveAccessVerififer ;
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveObjectAccessInfo.java
----------------------------------------------------------------------
diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveObjectAccessInfo.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveObjectAccessInfo.java
deleted file mode 100644
index 61b45e2..0000000
--- a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/RangerHiveObjectAccessInfo.java
+++ /dev/null
@@ -1,270 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.authorization.hive;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.lang.builder.EqualsBuilder;
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import org.apache.ranger.authorization.utils.StringUtil;
-
-public class RangerHiveObjectAccessInfo {
-	public enum HiveObjectType { NONE, DATABASE, TABLE, VIEW, PARTITION, INDEX, COLUMN, FUNCTION, URI };
-	public enum HiveAccessType { NONE, CREATE, ALTER, DROP, INDEX, LOCK, SELECT, UPDATE, USE, ALL, ADMIN };
-
-	private String              mOperType         = null;
-	private RangerHiveAccessContext mContext          = null;
-	private HiveAccessType      mAccessType       = HiveAccessType.NONE;
-	private HiveObjectType      mObjectType       = HiveObjectType.NONE;
-	private String              mDatabase         = null;
-	private String              mTable            = null;
-	private String              mView             = null;
-	private String              mPartition        = null;
-	private String              mIndex            = null;
-	private List<String>        mColumns          = null;
-	private String              mFunction         = null;
-	private String              mUri              = null;
-	private String              mDeniedObjectName = null;
-
-	public RangerHiveObjectAccessInfo(String operType, RangerHiveAccessContext context, HiveAccessType accessType, String dbName) {
-		this(operType, context, accessType, dbName, null, HiveObjectType.DATABASE, dbName);
-	}
-
-	public RangerHiveObjectAccessInfo(String operType, RangerHiveAccessContext context, HiveAccessType accessType, String dbName, String tblName) {
-		this(operType, context, accessType, dbName, tblName, HiveObjectType.TABLE, tblName);
-	}
-
-	public RangerHiveObjectAccessInfo(String operType, RangerHiveAccessContext context, HiveAccessType accessType, String dbName, HiveObjectType objType, String objName) {
-		this(operType, context, accessType, dbName, null, objType, objName);
-	}
-
-	public RangerHiveObjectAccessInfo(String operType, RangerHiveAccessContext context, HiveAccessType accessType, HiveObjectType objType, String objName) {
-		this(operType, context, accessType, null, null, objType, objName);
-	}
-
-	public RangerHiveObjectAccessInfo(String operType, RangerHiveAccessContext context, HiveAccessType accessType, String dbName, String tblOrViewName, List<String> columns) {
-		mOperType    = operType;
-		mContext     = context;
-		mAccessType  = accessType;
-		mObjectType  = HiveObjectType.COLUMN;
-		mDatabase    = dbName;
-		mTable       = tblOrViewName;
-		mView        = tblOrViewName;
-		mColumns     = columns;
-	}
-
-	public RangerHiveObjectAccessInfo(String operType, RangerHiveAccessContext context, HiveAccessType accessType, String dbName, String tblName, HiveObjectType objType, String objName) {
-		mOperType    = operType;
-		mContext     = context;
-		mAccessType  = accessType;
-		mObjectType  = objType;
-		mDatabase    = dbName;
-		mTable       = tblName;
-		mView        = tblName;
-
-		if(objName != null && ! objName.trim().isEmpty()) {
-			switch(objType) {
-				case DATABASE:
-					mDatabase = objName;
-				break;
-
-				case TABLE:
-					mTable = objName;
-				break;
-
-				case VIEW:
-					mView = objName;
-				break;
-
-				case PARTITION:
-					mPartition = objName;
-				break;
-
-				case INDEX:
-					mIndex = objName;
-				break;
-
-				case COLUMN:
-					mColumns = new ArrayList<String>();
-					mColumns.add(objName);
-				break;
-
-				case FUNCTION:
-					mFunction = objName;
-				break;
-
-				case URI:
-					mUri = objName;
-				break;
-
-				case NONE:
-				break;
-			}
-		}
-	}
-
-	public String getOperType() {
-		return mOperType;
-	}
-
-	public RangerHiveAccessContext getContext() {
-		return mContext;
-	}
-
-	public HiveAccessType getAccessType() {
-		return mAccessType;
-	}
-
-	public HiveObjectType getObjectType() {
-		return mObjectType;
-	}
-
-	public String getDatabase() {
-		return mDatabase;
-	}
-
-	public String getTable() {
-		return mTable;
-	}
-
-	public String getView() {
-		return mView;
-	}
-
-	public String getPartition() { 
-		return mPartition;
-	}
-
-	public String getIndex() {
-		return mIndex;
-	}
-
-	public List<String> getColumns() {
-		return mColumns;
-	}
-
-	public String getFunction() {
-		return mFunction;
-	}
-
-	public String getUri() {
-		return mUri;
-	}
-
-	public void setDeinedObjectName(String deniedObjectName) {
-		mDeniedObjectName = deniedObjectName;
-	}
-
-	public String getDeinedObjectName() {
-		return mDeniedObjectName;
-	}
-
-	public String getObjectName() {
-        String objName = null;
-
-        if(this.mObjectType == HiveObjectType.URI) {
-            objName = mUri;
-        } else {
-            String tblName = null;
-            String colName = null;
-
-            if(! StringUtil.isEmpty(mTable))
-                tblName = mTable;
-            else if(! StringUtil.isEmpty(mView))
-                tblName = mView;
-            else if(! StringUtil.isEmpty(mFunction))
-                tblName = mFunction;
-
-            if(! StringUtil.isEmpty(mColumns))
-                colName = StringUtil.toString(mColumns);
-            else if(! StringUtil.isEmpty(mIndex))
-                colName = mIndex;
-
-            objName = getObjectName(mDatabase, tblName, colName);
-        }
-
-		return objName;
-	}
-	
-	public static String getObjectName(String dbName, String tblName, String colName) {
-		String objName = StringUtil.isEmpty(dbName) ? "" : dbName;
-		
-		if(!StringUtil.isEmpty(tblName)) {
-			objName += ("/" + tblName);
-			
-			if(!StringUtil.isEmpty(colName)) {
-				objName += ("/" + colName);
-			}
-		}
-
-		return objName;
-	}
-
-	@Override
-	public boolean equals(Object obj) {
-		if (obj == null) {
-			return false;
-		}
-		if (obj == this) {
-			return true;
-		}
-		if (obj.getClass() != getClass()) {
-			return false;
-		}
-		RangerHiveObjectAccessInfo that = (RangerHiveObjectAccessInfo) obj;
-		return new EqualsBuilder()
-				.appendSuper(super.equals(obj))
-				.append(mAccessType, that.mAccessType)
-				.append(mColumns, that.mColumns)
-				.append(mContext, that.mContext)
-				.append(mDatabase, that.mDatabase)
-				.append(mDeniedObjectName, that.mDeniedObjectName)
-				.append(mFunction, that.mFunction)
-				.append(mIndex, that.mIndex)
-				.append(mObjectType, that.mObjectType)
-				.append(mOperType, that.mOperType)
-				.append(mPartition, that.mPartition)
-				.append(mTable, that.mTable)
-				.append(mUri, that.mUri)
-				.append(mView, that.mView)
-				.isEquals();
-	}
-	
-	@Override
-	public int hashCode() {
-		return new HashCodeBuilder(37, 41)
-		.appendSuper(43)
-		.append(mAccessType)
-		.append(mColumns)
-		.append(mContext)
-		.append(mDatabase)
-		.append(mDeniedObjectName)
-		.append(mFunction)
-		.append(mIndex)
-		.append(mObjectType)
-		.append(mOperType)
-		.append(mPartition)
-		.append(mTable)
-		.append(mUri)
-		.append(mView)
-		.toHashCode();
-	}
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAccessRequest.java
----------------------------------------------------------------------
diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAccessRequest.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAccessRequest.java
new file mode 100644
index 0000000..cb35eac
--- /dev/null
+++ b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAccessRequest.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ranger.authorization.hive.authorizer;
+
+import java.util.Set;
+
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.ranger.authorization.utils.StringUtil;
+import org.apache.ranger.plugin.policyengine.RangerAccessRequestImpl;
+import org.apache.ranger.plugin.policyengine.RangerPolicyEngine;
+
+
+public class RangerHiveAccessRequest extends RangerAccessRequestImpl {
+	private HiveAccessType accessType = HiveAccessType.NONE;
+
+	public RangerHiveAccessRequest() {
+		super();
+	}
+
+	public RangerHiveAccessRequest(RangerHiveResource      resource,
+								   String                  user,
+								   Set<String>             userGroups,
+								   HiveOperationType       hiveOpType,
+								   HiveAccessType          accessType,
+								   HiveAuthzContext        context,
+								   HiveAuthzSessionContext sessionContext) {
+		this.setResource(resource);
+		this.setUser(user);
+		this.setUserGroups(userGroups);
+		this.setAccessTime(StringUtil.getUTCDate());
+		this.setAction(hiveOpType.name());
+		
+		if(context != null) {
+			this.setClientIPAddress(context.getIpAddress());
+			this.setRequestData(context.getCommandString());
+		}
+		
+		if(sessionContext != null) {
+			this.setClientType(sessionContext.getClientType() == null ? null : sessionContext.getClientType().toString());
+			this.setSessionId(sessionContext.getSessionString());
+		}
+
+		this.accessType = accessType;
+		
+		if(accessType == HiveAccessType.USE) {
+			this.setAccessType(RangerPolicyEngine.ANY_ACCESS);
+		} else {
+			this.setAccessType(accessType.toString().toLowerCase());
+		}
+	}
+
+	public HiveAccessType getAccessType() {
+		return accessType;
+	}
+
+	public RangerHiveAccessRequest copy() {
+		RangerHiveAccessRequest ret = new RangerHiveAccessRequest();
+
+		ret.setResource(getResource());
+		ret.setAccessTypes(getAccessTypes());
+		ret.setUser(getUser());
+		ret.setUserGroups(getUserGroups());
+		ret.setAccessTime(getAccessTime());
+		ret.setAction(getAction());
+		ret.setClientIPAddress(getClientIPAddress());
+		ret.setRequestData(getRequestData());
+		ret.setClientType(getClientType());
+		ret.setSessionId(getSessionId());
+		ret.accessType = accessType;
+
+		return ret;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuditHandler.java
----------------------------------------------------------------------
diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuditHandler.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuditHandler.java
new file mode 100644
index 0000000..e24c094
--- /dev/null
+++ b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuditHandler.java
@@ -0,0 +1,177 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ranger.authorization.hive.authorizer;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ranger.audit.model.AuthzAuditEvent;
+import org.apache.ranger.authorization.hadoop.config.RangerConfiguration;
+import org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants;
+import org.apache.ranger.authorization.utils.StringUtil;
+import org.apache.ranger.plugin.audit.RangerDefaultAuditHandler;
+import org.apache.ranger.plugin.policyengine.RangerAccessResult;
+import org.apache.ranger.plugin.policyengine.RangerAccessResult.Result;
+
+public class RangerHiveAuditHandler extends RangerDefaultAuditHandler {
+	private static final String RangerModuleName =  RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_RANGER_MODULE_ACL_NAME_PROP , RangerHadoopConstants.DEFAULT_RANGER_MODULE_ACL_NAME) ;
+
+	Collection<AuthzAuditEvent> auditEvents  = null;
+	boolean                     deniedExists = false;
+
+	public RangerHiveAuditHandler() {
+		super();
+	}
+
+	@Override
+	public void logAudit(RangerAccessResult result) {
+		if(! result.getIsAudited()) {
+			return;
+		}
+
+		AuthzAuditEvent auditEvent = new AuthzAuditEvent();
+
+		RangerHiveAccessRequest request  = (RangerHiveAccessRequest)result.getAccessRequest();
+		RangerHiveResource      resource = (RangerHiveResource)request.getResource();
+		boolean                 isAllowed = result.getResult() == Result.ALLOWED;
+
+		auditEvent.setAclEnforcer(RangerModuleName);
+		auditEvent.setSessionId(request.getSessionId());
+		auditEvent.setResourceType("@" + StringUtil.toLower(resource.getObjectType().name())); // to be consistent with earlier release
+		auditEvent.setAccessType(request.getAccessType().toString());
+		auditEvent.setAction(request.getAction());
+		auditEvent.setUser(request.getUser());
+		auditEvent.setAccessResult((short)(isAllowed ? 1 : 0));
+		auditEvent.setPolicyId(result.getPolicyId());
+		auditEvent.setClientIP(request.getClientIPAddress());
+		auditEvent.setClientType(request.getClientType());
+		auditEvent.setEventTime(request.getAccessTime());
+		auditEvent.setRepositoryType(result.getServiceType());
+		auditEvent.setRepositoryName(result.getServiceName()) ;
+		auditEvent.setRequestData(request.getRequestData());
+		auditEvent.setResourcePath(getResourceValueAsString(resource, result.getServiceDef()));
+
+		addAuthzAuditEvent(auditEvent);
+	}
+
+	/*
+	 * This method is expected to be called ONLY to process the results for multiple-columns in a table.
+	 * To ensure this, RangerHiveAuthorizer should call isAccessAllowed(Collection<requests>) only for this condition
+	 */
+	@Override
+	public void logAudit(Collection<RangerAccessResult> results) {
+		Map<Long, AuthzAuditEvent> auditEvents = new HashMap<Long, AuthzAuditEvent>();
+
+		for(RangerAccessResult result : results) {
+			if(! result.getIsAudited()) {
+				continue;
+			}
+
+			RangerHiveAccessRequest request    = (RangerHiveAccessRequest)result.getAccessRequest();
+			RangerHiveResource      resource   = (RangerHiveResource)request.getResource();
+			boolean                 isAllowed  = result.getResult() == Result.ALLOWED;
+			AuthzAuditEvent         auditEvent = auditEvents.get(result.getPolicyId());
+
+			if(auditEvent == null) {
+				auditEvent = new AuthzAuditEvent();
+				auditEvents.put(result.getPolicyId(), auditEvent);
+
+				auditEvent.setAclEnforcer(RangerModuleName);
+				auditEvent.setSessionId(request.getSessionId());
+				auditEvent.setResourceType("@" + StringUtil.toLower(resource.getObjectType().name())); // to be consistent with earlier release
+				auditEvent.setAccessType(request.getAccessType().toString());
+				auditEvent.setAction(request.getAction());
+				auditEvent.setUser(request.getUser());
+				auditEvent.setAccessResult((short)(isAllowed ? 1 : 0));
+				auditEvent.setPolicyId(result.getPolicyId());
+				auditEvent.setClientIP(request.getClientIPAddress());
+				auditEvent.setClientType(request.getClientType());
+				auditEvent.setEventTime(request.getAccessTime());
+				auditEvent.setRepositoryType(result.getServiceType());
+				auditEvent.setRepositoryName(result.getServiceName()) ;
+				auditEvent.setRequestData(request.getRequestData());
+				auditEvent.setResourcePath(getResourceValueAsString(resource, result.getServiceDef()));
+			} else if(isAllowed){
+				auditEvent.setResourcePath(auditEvent.getResourcePath() + "," + resource.getColumn());
+			} else {
+				auditEvent.setResourcePath(getResourceValueAsString(resource, result.getServiceDef()));
+			}
+			
+			if(!isAllowed) {
+				auditEvent.setResourcePath(getResourceValueAsString(resource, result.getServiceDef()));
+
+				break;
+			}
+		}
+
+		for(AuthzAuditEvent auditEvent : auditEvents.values()) {
+			addAuthzAuditEvent(auditEvent);
+		}
+	}
+
+    public void logAuditEventForDfs(String userName, String dfsCommand, boolean accessGranted, int repositoryType, String repositoryName) {
+		AuthzAuditEvent auditEvent = new AuthzAuditEvent();
+
+		auditEvent.setAclEnforcer(RangerModuleName);
+		auditEvent.setResourceType("@dfs"); // to be consistent with earlier release
+		auditEvent.setAccessType("DFS");
+		auditEvent.setAction("DFS");
+		auditEvent.setUser(userName);
+		auditEvent.setAccessResult((short)(accessGranted ? 1 : 0));
+		auditEvent.setEventTime(StringUtil.getUTCDate());
+		auditEvent.setRepositoryType(repositoryType);
+		auditEvent.setRepositoryName(repositoryName) ;
+		auditEvent.setRequestData(dfsCommand);
+
+		auditEvent.setResourcePath(dfsCommand);
+
+		addAuthzAuditEvent(auditEvent);
+    }
+
+    public void flushAudit() {
+    	if(auditEvents == null) {
+    		return;
+    	}
+
+    	for(AuthzAuditEvent auditEvent : auditEvents) {
+    		if(deniedExists && auditEvent.getAccessResult() != 0) { // if deny exists, skip logging for allowed results
+    			continue;
+    		}
+
+    		super.logAuthzAudit(auditEvent);
+    	}
+    }
+
+    private void addAuthzAuditEvent(AuthzAuditEvent auditEvent) {
+    	if(auditEvent != null) {
+    		if(auditEvents == null) {
+    			auditEvents = new ArrayList<AuthzAuditEvent>();
+    		}
+    		
+    		auditEvents.add(auditEvent);
+    		
+    		if(auditEvent.getAccessResult() == 0) {
+    			deniedExists = true;
+    		}
+    	}
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java
----------------------------------------------------------------------
diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java
index 0dcea7c..df19603 100644
--- a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java
+++ b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java
@@ -20,8 +20,11 @@
  package org.apache.ranger.authorization.hive.authorizer;
 
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.List;
+import java.util.Set;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileStatus;
@@ -45,27 +48,24 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObje
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.ranger.admin.client.RangerAdminRESTClient;
 import org.apache.ranger.admin.client.datatype.GrantRevokeData;
-import org.apache.ranger.audit.model.EnumRepositoryType;
-import org.apache.ranger.audit.model.AuthzAuditEvent;
-import org.apache.ranger.audit.provider.AuditProviderFactory;
 import org.apache.ranger.authorization.hadoop.config.RangerConfiguration;
 import org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants;
-import org.apache.ranger.authorization.hive.RangerHiveAccessContext;
-import org.apache.ranger.authorization.hive.RangerHiveAccessVerifier;
-import org.apache.ranger.authorization.hive.RangerHiveAccessVerifierFactory;
-import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo;
-import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo.HiveAccessType;
-import org.apache.ranger.authorization.hive.RangerHiveObjectAccessInfo.HiveObjectType;
 import org.apache.ranger.authorization.utils.StringUtil;
+import org.apache.ranger.plugin.policyengine.RangerAccessRequest;
+import org.apache.ranger.plugin.policyengine.RangerAccessResult;
+import org.apache.ranger.plugin.policyengine.RangerAccessResult.Result;
+import org.apache.ranger.plugin.service.RangerBasePlugin;
+
+import com.google.common.collect.Sets;
 
 public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 	private static final Log LOG = LogFactory.getLog(RangerHiveAuthorizer.class) ; 
 
-	private static final String RangerModuleName =  RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_RANGER_MODULE_ACL_NAME_PROP , RangerHadoopConstants.DEFAULT_RANGER_MODULE_ACL_NAME) ;
-	private static final String repositoryName     = RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_REPOSITORY_NAME_PROP);
+	private static final char COLUMN_SEP = ',';
+
 	private static final boolean UpdateXaPoliciesOnGrantRevoke = RangerConfiguration.getInstance().getBoolean(RangerHadoopConstants.HIVE_UPDATE_RANGER_POLICIES_ON_GRANT_REVOKE_PROP, RangerHadoopConstants.HIVE_UPDATE_RANGER_POLICIES_ON_GRANT_REVOKE_DEFAULT_VALUE);
 
-	private RangerHiveAccessVerifier mHiveAccessVerifier = null ;
+	private static RangerHivePlugin hivePlugin = null ;
 
 
 	public RangerHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory,
@@ -76,23 +76,32 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 
 		LOG.debug("RangerHiveAuthorizer.RangerHiveAuthorizer()");
 
-		mHiveAccessVerifier = RangerHiveAccessVerifierFactory.getInstance() ;
-		
-		if(!RangerConfiguration.getInstance().isAuditInitDone()) {
-			if(sessionContext != null) {
-				String appType = "unknown";
-
-				switch(sessionContext.getClientType()) {
-					case HIVECLI:
-						appType = "hiveCLI";
-					break;
+		if(hivePlugin == null) {
+			synchronized(RangerHiveAuthorizer.class) {
+				if(hivePlugin == null) {
+					RangerHivePlugin temp = new RangerHivePlugin();
+					temp.init();
+					
+					if(!RangerConfiguration.getInstance().isAuditInitDone()) {
+						if(sessionContext != null) {
+							String appType = "unknown";
+
+							switch(sessionContext.getClientType()) {
+								case HIVECLI:
+									appType = "hiveCLI";
+								break;
+
+								case HIVESERVER2:
+									appType = "hiveServer2";
+								break;
+							}
+
+							RangerConfiguration.getInstance().initAudit(appType);
+						}
+					}
 
-					case HIVESERVER2:
-						appType = "hiveServer2";
-					break;
+					hivePlugin = temp;
 				}
-
-				RangerConfiguration.getInstance().initAudit(appType);
 			}
 		}
 	}
@@ -119,8 +128,10 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 			throw new HiveAuthzPluginException("GRANT/REVOKE not supported in Ranger HiveAuthorizer. Please use Ranger Security Admin to setup access control.");
 		}
 
+		/* TODO:
+		 * 
 		boolean                isSuccess     = false;
-		RangerHiveObjectAccessInfo objAccessInfo = getObjectAccessInfo(HiveOperationType.GRANT_PRIVILEGE, hivePrivObject, new RangerHiveAccessContext(null, getHiveAuthzSessionContext()), true);
+		RangerHiveObjectAccessInfo objAccessInfo = getHiveAccessRequests(HiveOperationType.GRANT_PRIVILEGE, hivePrivObject, new RangerHiveAccessContext(null, getHiveAuthzSessionContext()), true);
 
 		try {
 			GrantRevokeData grData = createGrantRevokeData(objAccessInfo, hivePrincipals, hivePrivileges, getGrantorUsername(grantorPrincipal), grantOption);
@@ -144,6 +155,7 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 				logAuditEvent(ugi, objAccessInfo, isSuccess);
 			}
 		}
+		*/
 	}
 
 	/**
@@ -167,8 +179,10 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 			throw new HiveAuthzPluginException("GRANT/REVOKE not supported in Ranger HiveAuthorizer. Please use Ranger Security Admin to setup access control.");
 		}
 
+		/* TODO:
+		 * 
 		boolean                isSuccess     = false;
-		RangerHiveObjectAccessInfo objAccessInfo = getObjectAccessInfo(HiveOperationType.REVOKE_PRIVILEGE, hivePrivObject, new RangerHiveAccessContext(null, getHiveAuthzSessionContext()), true);
+		RangerHiveObjectAccessInfo objAccessInfo = getHiveAccessRequests(HiveOperationType.REVOKE_PRIVILEGE, hivePrivObject, new RangerHiveAccessContext(null, getHiveAuthzSessionContext()), true);
 
 		try {
 			GrantRevokeData grData = createGrantRevokeData(objAccessInfo, hivePrincipals, hivePrivileges, getGrantorUsername(grantorPrincipal), grantOption);
@@ -192,6 +206,7 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 				logAuditEvent(ugi, objAccessInfo, isSuccess);
 			}
 		}
+		*/
 	}
 
 	/**
@@ -209,136 +224,167 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 							    List<HivePrivilegeObject> outputHObjs,
 							    HiveAuthzContext          context)
 		      throws HiveAuthzPluginException, HiveAccessControlException {
-
-		UserGroupInformation ugi =  this.getCurrentUserGroupInfo();
+		UserGroupInformation ugi = getCurrentUserGroupInfo();
 
 		if(ugi == null) {
 			throw new HiveAccessControlException("Permission denied: user information not available");
 		}
 
-		RangerHiveAccessContext hiveContext = this.getAccessContext(context);
+		RangerHiveAuditHandler auditHandler = new RangerHiveAuditHandler();
 
-		if(LOG.isDebugEnabled()) {
-			LOG.debug(toString(hiveOpType, inputHObjs, outputHObjs, hiveContext));
-		}
-		
-		if(hiveOpType == HiveOperationType.DFS) {
-			handleDfsCommand(hiveOpType, inputHObjs, outputHObjs, hiveContext);
-			
-			return;
-		}
+		try {
+			HiveAuthzSessionContext sessionContext = getHiveAuthzSessionContext();
+			String                  user           = ugi.getShortUserName();
+			Set<String>             groups         = Sets.newHashSet(ugi.getGroupNames());
 
-		List<RangerHiveObjectAccessInfo> objAccessList = getObjectAccessInfo(hiveOpType, inputHObjs, outputHObjs, hiveContext);
+			if(LOG.isDebugEnabled()) {
+				LOG.debug(toString(hiveOpType, inputHObjs, outputHObjs, context, sessionContext));
+			}
 
-		for(RangerHiveObjectAccessInfo objAccessInfo : objAccessList) {
-            boolean ret = false;
+			if(hiveOpType == HiveOperationType.DFS) {
+				handleDfsCommand(hiveOpType, inputHObjs, outputHObjs, context, sessionContext, user, groups, auditHandler);
 
-            if(objAccessInfo.getObjectType() == HiveObjectType.URI) {
-                ret = isURIAccessAllowed(ugi, objAccessInfo.getAccessType(), objAccessInfo.getUri(), getHiveConf());
-            } else if(objAccessInfo.getAccessType() != HiveAccessType.ADMIN) {
-                ret = mHiveAccessVerifier.isAccessAllowed(ugi, objAccessInfo);
-            }
+				return;
+			}
 
-			if(! ret) {
-				if(mHiveAccessVerifier.isAudited(objAccessInfo)) {
-					logAuditEvent(ugi, objAccessInfo, false);
-				}
-				
-				String deniedObjectName = objAccessInfo.getDeinedObjectName();
-				
-				if(StringUtil.isEmpty(deniedObjectName)) {
-					deniedObjectName = objAccessInfo.getObjectName();
-				}
+			List<RangerHiveAccessRequest> requests = new ArrayList<RangerHiveAccessRequest>();
 
-				throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]",
-													 ugi.getShortUserName(), objAccessInfo.getAccessType().name(), deniedObjectName));
-			}
-		}
+			if(inputHObjs != null) {
+				for(HivePrivilegeObject hiveObj : inputHObjs) {
+					RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj);
 
-		// access is allowed; audit all accesses
-		for(RangerHiveObjectAccessInfo objAccessInfo : objAccessList) {
-			if(mHiveAccessVerifier.isAudited(objAccessInfo)) {
-				logAuditEvent(ugi, objAccessInfo, true);
-			}
-		}
-	}
-	
-	private List<RangerHiveObjectAccessInfo> getObjectAccessInfo(HiveOperationType       hiveOpType,
-														   List<HivePrivilegeObject> inputsHObjs,
-														   List<HivePrivilegeObject> outputHObjs,
-														   RangerHiveAccessContext       context) {
-		List<RangerHiveObjectAccessInfo> ret = new ArrayList<RangerHiveObjectAccessInfo>();
-
-		if(inputsHObjs != null) {
-			for(HivePrivilegeObject hiveObj : inputsHObjs) {
-				RangerHiveObjectAccessInfo hiveAccessObj = getObjectAccessInfo(hiveOpType, hiveObj, context, true);
-				
-				if(   hiveAccessObj != null
-				   && hiveAccessObj.getAccessType() != HiveAccessType.ADMIN // access check is performed at the Ranger policy server, as a part of updating the permissions
-				   && !ret.contains(hiveAccessObj)) {
-					ret.add(hiveAccessObj);
+					if(resource.getObjectType() == HiveObjectType.URI) {
+						String   path       = hiveObj.getObjectName();
+						FsAction permission = FsAction.READ;
+
+		                if(!isURIAccessAllowed(user, groups, permission, path, getHiveConf())) {
+		    				throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path));
+		                }
+
+						continue;
+					}
+
+					HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, true);
+
+					// ADMIN: access check is performed at the Ranger policy server, as a part of updating the permissions
+					if(accessType == HiveAccessType.ADMIN || accessType == HiveAccessType.NONE) {
+						continue;
+					}
+
+					if(!existsByResourceAndAccessType(requests, resource, accessType)) {
+						RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType, accessType, context, sessionContext);
+
+						requests.add(request);
+					}
 				}
 			}
-		}
 
-		if(outputHObjs != null) {
-			for(HivePrivilegeObject hiveObj : outputHObjs) {
-				RangerHiveObjectAccessInfo hiveAccessObj = getObjectAccessInfo(hiveOpType, hiveObj, context, false);
-				
-				if(   hiveAccessObj != null
-				   && hiveAccessObj.getAccessType() != HiveAccessType.ADMIN // access check is performed at the Ranger policy server, as a part of updating the permissions
-				   && !ret.contains(hiveAccessObj)) {
-					ret.add(hiveAccessObj);
+			if(outputHObjs != null) {
+				for(HivePrivilegeObject hiveObj : outputHObjs) {
+					RangerHiveResource resource = getHiveResource(hiveOpType, hiveObj);
+
+					if(resource.getObjectType() == HiveObjectType.URI) {
+						String   path       = hiveObj.getObjectName();
+						FsAction permission = FsAction.WRITE;
+
+		                if(!isURIAccessAllowed(user, groups, permission, path, getHiveConf())) {
+		    				throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path));
+		                }
+
+						continue;
+					}
+
+					HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, false);
+
+					// ADMIN: access check is performed at the Ranger policy server, as a part of updating the permissions
+					if(accessType == HiveAccessType.ADMIN || accessType == HiveAccessType.NONE) {
+						continue;
+					}
+
+					if(!existsByResourceAndAccessType(requests, resource, accessType)) {
+						RangerHiveAccessRequest request = new RangerHiveAccessRequest(resource, user, groups, hiveOpType, accessType, context, sessionContext);
+
+						requests.add(request);
+					}
 				}
 			}
-		}
 
-		if(ret.size() == 0 && LOG.isDebugEnabled()) {
-			LOG.debug("getObjectAccessInfo(): no objects found for access check! " + toString(hiveOpType, inputsHObjs, outputHObjs, context));
+			for(RangerHiveAccessRequest request : requests) {
+	            RangerHiveResource resource = (RangerHiveResource)request.getResource();
+	            RangerAccessResult result   = null;
+
+	            if(resource.getObjectType() == HiveObjectType.COLUMN && StringUtils.contains(resource.getColumn(), COLUMN_SEP)) {
+	            	List<RangerAccessRequest> colRequests = new ArrayList<RangerAccessRequest>();
+
+	            	String[] columns = StringUtils.split(resource.getColumn(), COLUMN_SEP);
+
+	            	for(String column : columns) {
+	            		column = column == null ? null : column.trim();
+
+	            		if(StringUtils.isEmpty(column.trim())) {
+	            			continue;
+	            		}
+
+	            		RangerHiveResource colResource = new RangerHiveResource(HiveObjectType.COLUMN, resource.getDatabase(), resource.getTableOrUdf(), column);
+
+	            		RangerHiveAccessRequest colRequest = request.copy();
+	            		colRequest.setResource(colResource);
+
+	            		colRequests.add(colRequest);
+	            	}
+
+	            	Collection<RangerAccessResult> colResults = hivePlugin.isAccessAllowed(colRequests, auditHandler);
+
+	            	if(colResults != null) {
+		            	for(RangerAccessResult colResult : colResults) {
+		            		result = colResult;
+
+		            		if(result.getResult() != Result.ALLOWED) {
+		            			break;
+		            		}
+		            	}
+	            	}
+	            } else {
+		            result = hivePlugin.isAccessAllowed(request, auditHandler);
+	            }
+
+				if(result != null && result.getResult() != Result.ALLOWED) {
+					String path = auditHandler.getResourceValueAsString(request.getResource(), result.getServiceDef());
+	
+					throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]",
+														 user, request.getAccessType().name(), path));
+				}
+			}
+		} finally {
+			auditHandler.flushAudit();
 		}
-		
-		return ret;
 	}
 
-	private RangerHiveObjectAccessInfo getObjectAccessInfo(HiveOperationType hiveOpType, HivePrivilegeObject hiveObj, RangerHiveAccessContext context, boolean isInput) {
-		RangerHiveObjectAccessInfo ret = null;
+	private RangerHiveResource getHiveResource(HiveOperationType   hiveOpType,
+											   HivePrivilegeObject hiveObj) {
+		RangerHiveResource ret = null;
 
 		HiveObjectType objectType = getObjectType(hiveObj, hiveOpType);
-		HiveAccessType accessType = getAccessType(hiveObj, hiveOpType, isInput);
-		String         operType   = hiveOpType.name();
 
 		switch(objectType) {
 			case DATABASE:
-				ret = new RangerHiveObjectAccessInfo(operType, context, accessType, hiveObj.getDbname());
+				ret = new RangerHiveResource(objectType, hiveObj.getDbname());
 			break;
 	
 			case TABLE:
-				ret = new RangerHiveObjectAccessInfo(operType, context, accessType, hiveObj.getDbname(), HiveObjectType.TABLE, hiveObj.getObjectName());
-			break;
-	
 			case VIEW:
-				ret = new RangerHiveObjectAccessInfo(operType, context, accessType, hiveObj.getDbname(), HiveObjectType.VIEW, hiveObj.getObjectName());
-			break;
-	
 			case PARTITION:
-				ret = new RangerHiveObjectAccessInfo(operType, context, accessType, hiveObj.getDbname(), HiveObjectType.PARTITION, hiveObj.getObjectName());
-			break;
-	
 			case INDEX:
-				String indexName = "?"; // TODO:
-				ret = new RangerHiveObjectAccessInfo(operType, context, accessType, hiveObj.getDbname(), hiveObj.getObjectName(), HiveObjectType.INDEX, indexName);
+			case FUNCTION:
+				ret = new RangerHiveResource(objectType, hiveObj.getDbname(), hiveObj.getObjectName());
 			break;
 	
 			case COLUMN:
-				ret = new RangerHiveObjectAccessInfo(operType, context, accessType, hiveObj.getDbname(), hiveObj.getObjectName(), hiveObj.getColumns());
-			break;
-
-			case FUNCTION:
-				ret = new RangerHiveObjectAccessInfo(operType, context, accessType, hiveObj.getDbname(), HiveObjectType.FUNCTION, hiveObj.getObjectName());
+				ret = new RangerHiveResource(objectType, hiveObj.getDbname(), hiveObj.getObjectName(), StringUtils.join(hiveObj.getColumns(), COLUMN_SEP));
 			break;
 
             case URI:
-                ret = new RangerHiveObjectAccessInfo(operType, context, accessType, HiveObjectType.URI, hiveObj.getObjectName());
+				ret = new RangerHiveResource(objectType, hiveObj.getObjectName());
             break;
 
 			case NONE:
@@ -555,32 +601,9 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 		return accessType;
 	}
 
-    private boolean isURIAccessAllowed(UserGroupInformation ugi, HiveAccessType accessType, String uri, HiveConf conf) {
+    private boolean isURIAccessAllowed(String userName, Set<String> groups, FsAction action, String uri, HiveConf conf) {
         boolean ret = false;
 
-        FsAction action = FsAction.NONE;
-
-        switch(accessType) {
-            case ALTER:
-            case CREATE:
-            case UPDATE:
-            case DROP:
-            case INDEX:
-            case LOCK:
-            case ADMIN:
-    		case ALL:
-                action = FsAction.WRITE;
-            break;
-
-            case SELECT:
-            case USE:
-                action = FsAction.READ;
-            break;
-
-            case NONE:
-            break;
-        }
-
         if(action == FsAction.NONE) {
             ret = true;
         } else {
@@ -589,7 +612,6 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
                 FileSystem fs         = FileSystem.get(filePath.toUri(), conf);
                 Path       path       = FileUtils.getPathOrParentThatExists(fs, filePath);
                 FileStatus fileStatus = fs.getFileStatus(path);
-                String     userName   = ugi.getShortUserName();
 
                 if (FileUtils.isOwnerOfFileHierarchy(fs, fileStatus, userName)) {
                     ret = true;
@@ -607,7 +629,11 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 	private void handleDfsCommand(HiveOperationType         hiveOpType,
 								  List<HivePrivilegeObject> inputHObjs,
 							      List<HivePrivilegeObject> outputHObjs,
-							      RangerHiveAccessContext       context)
+							      HiveAuthzContext          context,
+							      HiveAuthzSessionContext   sessionContext,
+								  String                    user,
+								  Set<String>               groups,
+								  RangerHiveAuditHandler    auditHandler)
 	      throws HiveAuthzPluginException, HiveAccessControlException {
 
 		String dfsCommandParams = null;
@@ -624,14 +650,42 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 			}
 		}
 
-		UserGroupInformation ugi = this.getCurrentUserGroupInfo();
+		int    serviceType = -1;
+		String serviceName = null;
 
-		logAuditEventForDfs(ugi, dfsCommandParams, false);
+		if(hivePlugin != null) {
+			if(hivePlugin.getPolicyEngine() != null &&
+			   hivePlugin.getPolicyEngine().getServiceDef() != null &&
+			   hivePlugin.getPolicyEngine().getServiceDef().getId() != null ) {
+				serviceType = hivePlugin.getPolicyEngine().getServiceDef().getId().intValue();
+			}
+
+			serviceName = hivePlugin.getServiceName();
+		}
+
+		auditHandler.logAuditEventForDfs(user, dfsCommandParams, false, serviceType, serviceName);
 
 		throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have privilege for [%s] command",
-											 ugi.getShortUserName(), hiveOpType.name()));
+											 user, hiveOpType.name()));
 	}
-	
+
+	private boolean existsByResourceAndAccessType(Collection<RangerHiveAccessRequest> requests, RangerHiveResource resource, HiveAccessType accessType) {
+		boolean ret = false;
+
+		if(requests != null && resource != null) {
+			for(RangerHiveAccessRequest request : requests) {
+				if(request.getAccessType() == accessType && request.getResource().equals(resource)) {
+					ret = true;
+
+					break;
+				}
+			}
+		}
+
+		return ret;
+	}
+
+	/*
 	private String getGrantorUsername(HivePrincipal grantorPrincipal) {
 		String grantor = grantorPrincipal != null ? grantorPrincipal.getName() : null;
 
@@ -714,82 +768,13 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 
 		return grData;
 	}
-
-    private void logAuditEventForDfs(UserGroupInformation ugi, String dfsCommand, boolean accessGranted) {
-		AuthzAuditEvent auditEvent = new AuthzAuditEvent();
-
-		try {
-			auditEvent.setAclEnforcer(RangerModuleName);
-			auditEvent.setResourceType("@dfs"); // to be consistent with earlier release
-			auditEvent.setAccessType("DFS");
-			auditEvent.setAction("DFS");
-			auditEvent.setUser(ugi.getShortUserName());
-			auditEvent.setAccessResult((short)(accessGranted ? 1 : 0));
-			auditEvent.setEventTime(StringUtil.getUTCDate());
-			auditEvent.setRepositoryType(EnumRepositoryType.HIVE);
-			auditEvent.setRepositoryName(repositoryName) ;
-			auditEvent.setRequestData(dfsCommand);
-
-			auditEvent.setResourcePath(dfsCommand);
-		
-			if(LOG.isDebugEnabled()) {
-				LOG.debug("logAuditEvent [" + auditEvent + "] - START");
-			}
-
-			AuditProviderFactory.getAuditProvider().log(auditEvent);
-
-			if(LOG.isDebugEnabled()) {
-				LOG.debug("logAuditEvent [" + auditEvent + "] - END");
-			}
-		}
-		catch(Throwable t) {
-			LOG.error("ERROR logEvent [" + auditEvent + "]", t);
-		}
-    }
-
-	private void logAuditEvent(UserGroupInformation ugi, RangerHiveObjectAccessInfo objAccessInfo, boolean accessGranted) {
-		AuthzAuditEvent auditEvent = new AuthzAuditEvent();
-
-		try {
-			auditEvent.setAclEnforcer(RangerModuleName);
-			auditEvent.setSessionId(objAccessInfo.getContext().getSessionString());
-			auditEvent.setResourceType("@" + StringUtil.toLower(objAccessInfo.getObjectType().name())); // to be consistent with earlier release
-			auditEvent.setAccessType(objAccessInfo.getAccessType().toString());
-			auditEvent.setAction(objAccessInfo.getOperType());
-			auditEvent.setUser(ugi.getShortUserName());
-			auditEvent.setAccessResult((short)(accessGranted ? 1 : 0));
-			auditEvent.setClientIP(objAccessInfo.getContext().getClientIpAddress());
-			auditEvent.setClientType(objAccessInfo.getContext().getClientType());
-			auditEvent.setEventTime(StringUtil.getUTCDate());
-			auditEvent.setRepositoryType(EnumRepositoryType.HIVE);
-			auditEvent.setRepositoryName(repositoryName) ;
-			auditEvent.setRequestData(objAccessInfo.getContext().getCommandString());
-
-			if(! accessGranted && !StringUtil.isEmpty(objAccessInfo.getDeinedObjectName())) {
-				auditEvent.setResourcePath(objAccessInfo.getDeinedObjectName());
-			} else {
-				auditEvent.setResourcePath(objAccessInfo.getObjectName());
-			}
-		
-			if(LOG.isDebugEnabled()) {
-				LOG.debug("logAuditEvent [" + auditEvent + "] - START");
-			}
-
-			AuditProviderFactory.getAuditProvider().log(auditEvent);
-
-			if(LOG.isDebugEnabled()) {
-				LOG.debug("logAuditEvent [" + auditEvent + "] - END");
-			}
-		}
-		catch(Throwable t) {
-			LOG.error("ERROR logEvent [" + auditEvent + "]", t);
-		}
-	}
+	*/
 	
 	private String toString(HiveOperationType         hiveOpType,
 							List<HivePrivilegeObject> inputHObjs,
 							List<HivePrivilegeObject> outputHObjs,
-							RangerHiveAccessContext       context) {
+							HiveAuthzContext          context,
+							HiveAuthzSessionContext   sessionContext) {
 		StringBuilder sb = new StringBuilder();
 		
 		sb.append("'checkPrivileges':{");
@@ -804,12 +789,10 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 		sb.append("]");
 
 		sb.append(", 'context':{");
-		if(context != null) {
-			sb.append("'clientType':").append(context.getClientType());
-			sb.append(", 'commandString':").append(context.getCommandString());
-			sb.append(", 'ipAddress':").append(context.getClientIpAddress());
-			sb.append(", 'sessionString':").append(context.getSessionString());
-		}
+		sb.append("'clientType':").append(sessionContext == null ? null : sessionContext.getClientType());
+		sb.append(", 'commandString':").append(context == null ? null : context.getCommandString());
+		sb.append(", 'ipAddress':").append(context == null ? null : context.getIpAddress());
+		sb.append(", 'sessionString':").append(sessionContext == null ? null : sessionContext.getSessionString());
 		sb.append("}");
 
 		sb.append(", 'user':").append(this.getCurrentUserGroupInfo().getUserName());
@@ -847,3 +830,14 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 		return sb;
 	}
 }
+
+enum HiveObjectType { NONE, DATABASE, TABLE, VIEW, PARTITION, INDEX, COLUMN, FUNCTION, URI };
+enum HiveAccessType { NONE, CREATE, ALTER, DROP, INDEX, LOCK, SELECT, UPDATE, USE, ALL, ADMIN };
+
+class RangerHivePlugin extends RangerBasePlugin {
+	public RangerHivePlugin() {
+		super("hive");
+	}
+}
+
+


[4/6] incubator-ranger git commit: RANGER-203: replaced Hive plugin implementation to use Pluggable-service model.

Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizerBase.java
----------------------------------------------------------------------
diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizerBase.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizerBase.java
index de43975..b584f8c 100644
--- a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizerBase.java
+++ b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizerBase.java
@@ -29,7 +29,6 @@ import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.DisallowTransformHook;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext.CLIENT_TYPE;
@@ -40,7 +39,6 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObje
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.SettableConfigUpdater;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.ranger.authorization.hive.RangerHiveAccessContext;
 import org.apache.ranger.authorization.utils.StringUtil;
 
 public abstract class RangerHiveAuthorizerBase implements HiveAuthorizer {
@@ -94,10 +92,6 @@ public abstract class RangerHiveAuthorizerBase implements HiveAuthorizer {
 	public UserGroupInformation getCurrentUserGroupInfo() {
 		return mUgi;
 	}
-	
-	public RangerHiveAccessContext getAccessContext(HiveAuthzContext context) {
-		return new RangerHiveAccessContext(context, mSessionContext);
-	}
 
 	@Override
 	public void applyAuthorizationConfigPolicy(HiveConf hiveConf) throws HiveAuthzPluginException {

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveResource.java
----------------------------------------------------------------------
diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveResource.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveResource.java
new file mode 100644
index 0000000..82e256e
--- /dev/null
+++ b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveResource.java
@@ -0,0 +1,185 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ranger.authorization.hive.authorizer;
+
+import java.util.Set;
+
+import org.apache.commons.lang.ObjectUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.ranger.plugin.policyengine.RangerResource;
+
+import com.google.common.collect.Sets;
+
+
+public class RangerHiveResource implements RangerResource {
+	private static final String KEY_DATABASE = "database";
+	private static final String KEY_TABLE    = "table";
+	private static final String KEY_UDF      = "udf";
+	private static final String KEY_COLUMN   = "column";
+
+	private static final Set<String> KEYS_DATABASE = Sets.newHashSet(KEY_DATABASE);
+	private static final Set<String> KEYS_TABLE    = Sets.newHashSet(KEY_DATABASE, KEY_TABLE);
+	private static final Set<String> KEYS_UDF      = Sets.newHashSet(KEY_DATABASE, KEY_UDF);
+	private static final Set<String> KEYS_COLUMN   = Sets.newHashSet(KEY_DATABASE, KEY_TABLE, KEY_COLUMN);
+
+	private HiveObjectType objectType = null;
+	private String         database   = null;
+	private String         tableOrUdf = null;
+	private String         column     = null;
+	private Set<String>    keys       = null;
+
+
+	public RangerHiveResource(HiveObjectType objectType, String database) {
+		this(objectType, database, null, null);
+	}
+
+	public RangerHiveResource(HiveObjectType objectType, String database, String tableOrUdf) {
+		this(objectType, database, tableOrUdf, null);
+	}
+	
+	public RangerHiveResource(HiveObjectType objectType, String database, String tableOrUdf, String column) {
+		this.objectType = objectType;
+		this.database   = database;
+		this.tableOrUdf = tableOrUdf;
+		this.column     = column;
+
+		switch(objectType) {
+			case DATABASE:
+				keys = KEYS_DATABASE;
+			break;
+	
+			case FUNCTION:
+				keys = KEYS_UDF;
+			break;
+
+			case COLUMN:
+				keys = KEYS_COLUMN;
+			break;
+
+			case TABLE:
+			case VIEW:
+			case INDEX:
+			case PARTITION:
+				keys = KEYS_TABLE;
+			break;
+
+			case NONE:
+			case URI:
+			default:
+				keys = null;
+			break;
+		}
+	}
+
+	@Override
+	public String getOwnerUser() {
+		return null; // no owner information available
+	}
+
+	@Override
+	public boolean exists(String name) {
+		return !StringUtils.isEmpty(getValue(name));
+	}
+
+	@Override
+	public String getValue(String name) {
+		if(StringUtils.equalsIgnoreCase(name, KEY_DATABASE)) {
+			return database;
+		} else if(objectType == HiveObjectType.FUNCTION) {
+			if(StringUtils.equalsIgnoreCase(name, KEY_UDF)) {
+				return tableOrUdf;
+			}
+		} else if(StringUtils.equalsIgnoreCase(name, KEY_TABLE)) {
+			return tableOrUdf;
+		} else  if(StringUtils.equalsIgnoreCase(name, KEY_COLUMN)) {
+			return column;
+		}
+
+		return null;
+	}
+
+	public Set<String> getKeys() {
+		return keys;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if(obj == null || !(obj instanceof RangerHiveResource)) {
+			return false;
+		}
+
+		if(this == obj) {
+			return true;
+		}
+
+		RangerHiveResource other = (RangerHiveResource) obj;
+
+		return ObjectUtils.equals(objectType, other.objectType) &&
+			   ObjectUtils.equals(database, other.database) &&
+			   ObjectUtils.equals(tableOrUdf, other.tableOrUdf) &&
+			   ObjectUtils.equals(column, other.column);
+	}
+
+	@Override
+	public int hashCode() {
+		int ret = 7;
+
+		ret = 31 * ret + ObjectUtils.hashCode(objectType);
+		ret = 31 * ret + ObjectUtils.hashCode(database);
+		ret = 31 * ret + ObjectUtils.hashCode(tableOrUdf);
+		ret = 31 * ret + ObjectUtils.hashCode(column);
+
+		return ret;
+	}
+
+	@Override
+	public String toString() {
+		StringBuilder sb = new StringBuilder();
+
+		toString(sb);
+
+		return sb.toString();
+	}
+
+	public StringBuilder toString(StringBuilder sb) {
+		sb.append("objectType={").append(objectType).append("} ");
+		sb.append("database={").append(database).append("} ");
+		sb.append("tableOrUdf={").append(tableOrUdf).append("} ");
+		sb.append("column={").append(column).append("} ");
+		
+		return sb;
+	}
+
+	public HiveObjectType getObjectType() {
+		return objectType;
+	}
+
+	public String getDatabase() {
+		return database;
+	}
+
+	public String getTableOrUdf() {
+		return tableOrUdf;
+	}
+
+	public String getColumn() {
+		return column;
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/plugin-common/src/main/java/org/apache/ranger/plugin/audit/RangerDefaultAuditHandler.java
----------------------------------------------------------------------
diff --git a/plugin-common/src/main/java/org/apache/ranger/plugin/audit/RangerDefaultAuditHandler.java b/plugin-common/src/main/java/org/apache/ranger/plugin/audit/RangerDefaultAuditHandler.java
index 9c6f7cd..afc03b2 100644
--- a/plugin-common/src/main/java/org/apache/ranger/plugin/audit/RangerDefaultAuditHandler.java
+++ b/plugin-common/src/main/java/org/apache/ranger/plugin/audit/RangerDefaultAuditHandler.java
@@ -87,8 +87,6 @@ public class RangerDefaultAuditHandler implements RangerAuditHandler {
 
 		if(request != null && result != null && result.getIsAudited()) {
 			RangerServiceDef serviceDef   = result.getServiceDef();
-			int              serviceType  = (serviceDef != null && serviceDef.getId() != null) ? serviceDef.getId().intValue() : -1;
-			String           serviceName  = result.getServiceName();
 			String           resourceType = getResourceName(request.getResource(), serviceDef);
 			String           resourcePath = getResourceValueAsString(request.getResource(), serviceDef);
 
@@ -99,8 +97,8 @@ public class RangerDefaultAuditHandler implements RangerAuditHandler {
 
 				AuthzAuditEvent event = createAuthzAuditEvent();
 
-				event.setRepositoryName(serviceName);
-				event.setRepositoryType(serviceType);
+				event.setRepositoryName(result.getServiceName());
+				event.setRepositoryType(result.getServiceType());
 				event.setResourceType(resourceType);
 				event.setResourcePath(resourcePath);
 				event.setRequestData(request.getRequestData());
@@ -108,6 +106,7 @@ public class RangerDefaultAuditHandler implements RangerAuditHandler {
 				event.setUser(request.getUser());
 				event.setAccessType(request.getAction());
 				event.setAccessResult((short)(accessResult.isAllowed() ? 1 : 0));
+				event.setPolicyId(result.getPolicyId());
 				event.setAclEnforcer("ranger-acl"); // TODO: review
 				event.setAction(accessType);
 				event.setClientIP(request.getClientIPAddress());

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/7758ed1c/plugin-common/src/main/java/org/apache/ranger/plugin/policyengine/RangerAccessResult.java
----------------------------------------------------------------------
diff --git a/plugin-common/src/main/java/org/apache/ranger/plugin/policyengine/RangerAccessResult.java b/plugin-common/src/main/java/org/apache/ranger/plugin/policyengine/RangerAccessResult.java
index 5f07402..b64a441 100644
--- a/plugin-common/src/main/java/org/apache/ranger/plugin/policyengine/RangerAccessResult.java
+++ b/plugin-common/src/main/java/org/apache/ranger/plugin/policyengine/RangerAccessResult.java
@@ -20,8 +20,11 @@
 package org.apache.ranger.plugin.policyengine;
 
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Map;
+import java.util.Set;
 
+import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.ranger.plugin.model.RangerServiceDef;
 
@@ -163,6 +166,40 @@ public class RangerAccessResult {
 		return ret;
 	}
 
+	public int getServiceType() {
+		int ret = -1;
+
+		if(serviceDef != null && serviceDef.getId() != null) {
+			ret = serviceDef.getId().intValue();
+		}
+
+		return ret;
+	}
+
+	public long getPolicyId() {
+		long ret = -1;
+
+		if(! MapUtils.isEmpty(accessTypeResults)) {
+			ResultDetail detail = accessTypeResults.values().iterator().next();
+			
+			ret = detail.getPolicyId();
+		}
+
+		return ret;
+	}
+
+	public Set<Long> getPolicyIds() {
+		Set<Long> ret = new HashSet<Long>();
+
+		if(! MapUtils.isEmpty(accessTypeResults)) {
+			for(ResultDetail detail : accessTypeResults.values()) {
+				ret.add(detail.getPolicyId());
+			}
+		}
+
+		return ret;
+	}
+
 	@Override
 	public String toString( ) {
 		StringBuilder sb = new StringBuilder();


[2/6] incubator-ranger git commit: RANGER-203: Resource to policy match updated to use all all the keys in a resource (ex: database, table/udf, [column]).

Posted by ma...@apache.org.
RANGER-203: Resource to policy match updated to use all all the keys in
a resource (ex: database, table/udf, [column]).

Project: http://git-wip-us.apache.org/repos/asf/incubator-ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ranger/commit/57ded063
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ranger/tree/57ded063
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ranger/diff/57ded063

Branch: refs/heads/stack
Commit: 57ded063dee603767d06af2e9d6bcd442af564a2
Parents: ce1808a
Author: Madhan Neethiraj <ma...@apache.org>
Authored: Mon Jan 26 16:07:31 2015 -0800
Committer: Madhan Neethiraj <ma...@apache.org>
Committed: Mon Jan 26 16:07:31 2015 -0800

----------------------------------------------------------------------
 .../audit/provider/MultiDestAuditProvider.java  |  2 +-
 .../plugin/policyengine/RangerResource.java     |  4 +++
 .../plugin/policyengine/RangerResourceImpl.java | 12 ++++++++
 .../RangerDefaultPolicyEvaluator.java           | 31 ++++++++++++--------
 4 files changed, 36 insertions(+), 13 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/57ded063/agents-audit/src/main/java/org/apache/ranger/audit/provider/MultiDestAuditProvider.java
----------------------------------------------------------------------
diff --git a/agents-audit/src/main/java/org/apache/ranger/audit/provider/MultiDestAuditProvider.java b/agents-audit/src/main/java/org/apache/ranger/audit/provider/MultiDestAuditProvider.java
index 0f429ea..1eec345 100644
--- a/agents-audit/src/main/java/org/apache/ranger/audit/provider/MultiDestAuditProvider.java
+++ b/agents-audit/src/main/java/org/apache/ranger/audit/provider/MultiDestAuditProvider.java
@@ -51,7 +51,7 @@ public class MultiDestAuditProvider extends BaseAuditProvider {
     		try {
                 provider.init(props);
     		} catch(Throwable excp) {
-    			LOG.info("MultiDestAuditProvider.init(): failed" + provider.getClass().getCanonicalName() + ")");
+    			LOG.info("MultiDestAuditProvider.init(): failed " + provider.getClass().getCanonicalName() + ")", excp);
     		}
         }
 	}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/57ded063/plugin-common/src/main/java/org/apache/ranger/plugin/policyengine/RangerResource.java
----------------------------------------------------------------------
diff --git a/plugin-common/src/main/java/org/apache/ranger/plugin/policyengine/RangerResource.java b/plugin-common/src/main/java/org/apache/ranger/plugin/policyengine/RangerResource.java
index f79aba8..6941bc3 100644
--- a/plugin-common/src/main/java/org/apache/ranger/plugin/policyengine/RangerResource.java
+++ b/plugin-common/src/main/java/org/apache/ranger/plugin/policyengine/RangerResource.java
@@ -19,6 +19,8 @@
 
 package org.apache.ranger.plugin.policyengine;
 
+import java.util.Set;
+
 
 public interface RangerResource {
 	public abstract String getOwnerUser();
@@ -26,4 +28,6 @@ public interface RangerResource {
 	public abstract boolean exists(String name);
 
 	public abstract String getValue(String name);
+
+	public Set<String> getKeys();
 }

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/57ded063/plugin-common/src/main/java/org/apache/ranger/plugin/policyengine/RangerResourceImpl.java
----------------------------------------------------------------------
diff --git a/plugin-common/src/main/java/org/apache/ranger/plugin/policyengine/RangerResourceImpl.java b/plugin-common/src/main/java/org/apache/ranger/plugin/policyengine/RangerResourceImpl.java
index 529ac5f..86f7ea4 100644
--- a/plugin-common/src/main/java/org/apache/ranger/plugin/policyengine/RangerResourceImpl.java
+++ b/plugin-common/src/main/java/org/apache/ranger/plugin/policyengine/RangerResourceImpl.java
@@ -21,6 +21,7 @@ package org.apache.ranger.plugin.policyengine;
 
 import java.util.HashMap;
 import java.util.Map;
+import java.util.Set;
 
 
 public class RangerResourceImpl implements RangerMutableResource {
@@ -53,6 +54,17 @@ public class RangerResourceImpl implements RangerMutableResource {
 	}
 
 	@Override
+	public Set<String> getKeys() {
+		Set<String> ret = null;
+
+		if(elements != null) {
+			ret = elements.keySet();
+		}
+
+		return ret;
+	}
+
+	@Override
 	public void setOwnerUser(String ownerUser) {
 		this.ownerUser = ownerUser;
 	}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/57ded063/plugin-common/src/main/java/org/apache/ranger/plugin/policyevaluator/RangerDefaultPolicyEvaluator.java
----------------------------------------------------------------------
diff --git a/plugin-common/src/main/java/org/apache/ranger/plugin/policyevaluator/RangerDefaultPolicyEvaluator.java b/plugin-common/src/main/java/org/apache/ranger/plugin/policyevaluator/RangerDefaultPolicyEvaluator.java
index 0160347..7fea4b6 100644
--- a/plugin-common/src/main/java/org/apache/ranger/plugin/policyevaluator/RangerDefaultPolicyEvaluator.java
+++ b/plugin-common/src/main/java/org/apache/ranger/plugin/policyevaluator/RangerDefaultPolicyEvaluator.java
@@ -178,20 +178,27 @@ public class RangerDefaultPolicyEvaluator extends RangerAbstractPolicyEvaluator
 		RangerServiceDef serviceDef = getServiceDef();
 
 		if(serviceDef != null && serviceDef.getResources() != null) {
-			for(RangerResourceDef resourceDef : serviceDef.getResources()) {
-				String                resourceName  = resourceDef.getName();
-				String                resourceValue = resource == null ? null : resource.getValue(resourceName);
-				RangerResourceMatcher matcher       = matchers == null ? null : matchers.get(resourceName);
+			Collection<String> resourceKeys = resource == null ? null : resource.getKeys();
+			Collection<String> policyKeys   = matchers == null ? null : matchers.keySet();
+			
+			boolean keysMatch = (resourceKeys == null) || (policyKeys != null && policyKeys.containsAll(resourceKeys));
 
-				// when no value exists for a resourceName, consider it a match only if (policy doesn't have a matcher OR matcher allows no-value resource)
-				if(StringUtils.isEmpty(resourceValue)) {
-					ret = matcher == null || matcher.isMatch(resourceValue);
-				} else {
-					ret = matcher != null && matcher.isMatch(resourceValue);
-				}
+			if(keysMatch) {
+				for(RangerResourceDef resourceDef : serviceDef.getResources()) {
+					String                resourceName  = resourceDef.getName();
+					String                resourceValue = resource == null ? null : resource.getValue(resourceName);
+					RangerResourceMatcher matcher       = matchers == null ? null : matchers.get(resourceName);
 
-				if(! ret) {
-					break;
+					// when no value exists for a resourceName, consider it a match only if (policy doesn't have a matcher OR matcher allows no-value resource)
+					if(StringUtils.isEmpty(resourceValue)) {
+						ret = matcher == null || matcher.isMatch(resourceValue);
+					} else {
+						ret = matcher != null && matcher.isMatch(resourceValue);
+					}
+
+					if(! ret) {
+						break;
+					}
 				}
 			}
 		}


[3/6] incubator-ranger git commit: RANGER-203: HDFS plugin updated for recent change in RangerResource.

Posted by ma...@apache.org.
RANGER-203: HDFS plugin updated for recent change in RangerResource.

Project: http://git-wip-us.apache.org/repos/asf/incubator-ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ranger/commit/5a50f5fb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ranger/tree/5a50f5fb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ranger/diff/5a50f5fb

Branch: refs/heads/stack
Commit: 5a50f5fb03581fa99577076c60ae9edadc9ef476
Parents: 57ded06
Author: Madhan Neethiraj <ma...@apache.org>
Authored: Mon Jan 26 16:08:54 2015 -0800
Committer: Madhan Neethiraj <ma...@apache.org>
Committed: Mon Jan 26 16:08:54 2015 -0800

----------------------------------------------------------------------
 .../namenode/RangerFSPermissionChecker.java     | 50 +++++++++-----------
 1 file changed, 23 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a50f5fb/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
----------------------------------------------------------------------
diff --git a/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java b/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
index fcf710c..f4e6dc7 100644
--- a/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
+++ b/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
@@ -21,16 +21,10 @@ package org.apache.hadoop.hdfs.server.namenode;
 import static org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants.*;
 
 import java.net.InetAddress;
-import java.util.Arrays;
-import java.util.Calendar;
-import java.util.Collections;
-import java.util.Date;
-import java.util.GregorianCalendar;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
-import java.util.TimeZone;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
@@ -42,6 +36,7 @@ import org.apache.ranger.audit.model.AuthzAuditEvent;
 import org.apache.ranger.authorization.hadoop.config.RangerConfiguration;
 import org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants;
 import org.apache.ranger.authorization.hadoop.exceptions.RangerAccessControlException;
+import org.apache.ranger.authorization.utils.StringUtil;
 import org.apache.ranger.plugin.audit.RangerDefaultAuditHandler;
 import org.apache.ranger.plugin.model.RangerServiceDef;
 import org.apache.ranger.plugin.policyengine.RangerAccessRequest;
@@ -50,11 +45,13 @@ import org.apache.ranger.plugin.policyengine.RangerAccessResult;
 import org.apache.ranger.plugin.policyengine.RangerResource;
 import org.apache.ranger.plugin.service.RangerBasePlugin;
 
+import com.google.common.collect.Sets;
+
 
 public class RangerFSPermissionChecker {
 	private static final Log LOG = LogFactory.getLog(RangerFSPermissionChecker.class);
 
-	private static final boolean addHadoopAuth 	  = RangerConfiguration.getInstance().getBoolean(RangerHadoopConstants.RANGER_ADD_HDFS_PERMISSION_PROP, RangerHadoopConstants.RANGER_ADD_HDFS_PERMISSION_DEFAULT) ;
+	private static final boolean addHadoopAuth = RangerConfiguration.getInstance().getBoolean(RangerHadoopConstants.RANGER_ADD_HDFS_PERMISSION_PROP, RangerHadoopConstants.RANGER_ADD_HDFS_PERMISSION_DEFAULT) ;
 
 
 	private static RangerHdfsPlugin                    rangerPlugin        = null;
@@ -69,7 +66,7 @@ public class RangerFSPermissionChecker {
 		String      path      = inode.getFullPathName();
 		String      pathOwner = inode.getUserName();
 		String      user      = ugi.getShortUserName();
-		Set<String> groups    = Collections.unmodifiableSet(new HashSet<String>(Arrays.asList(ugi.getGroupNames())));
+		Set<String> groups    = Sets.newHashSet(ugi.getGroupNames());
 
 		boolean accessGranted =  AuthorizeAccessForUser(path, pathOwner, access, user, groups);
 
@@ -162,6 +159,10 @@ class RangerHdfsPlugin extends RangerBasePlugin {
 }
 
 class RangerHdfsResource implements RangerResource {
+	private static final String KEY_PATH = "path";
+
+	private static final Set<String> KEYS_PATH = Sets.newHashSet(KEY_PATH);
+
 	private String path  = null;
 	private String owner = null;
 
@@ -177,17 +178,21 @@ class RangerHdfsResource implements RangerResource {
 
 	@Override
 	public boolean exists(String name) {
-		return StringUtils.equalsIgnoreCase(name, "path");
+		return StringUtils.equalsIgnoreCase(name, KEY_PATH);
 	}
 
 	@Override
 	public String getValue(String name) {
-		if(StringUtils.equalsIgnoreCase(name, "path")) {
+		if(StringUtils.equalsIgnoreCase(name, KEY_PATH)) {
 			return path;
 		}
 
 		return null;
 	}
+
+	public Set<String> getKeys() {
+		return KEYS_PATH;
+	}
 }
 
 class RangerHdfsAccessRequest extends RangerAccessRequestImpl {
@@ -197,13 +202,13 @@ class RangerHdfsAccessRequest extends RangerAccessRequestImpl {
 		access2ActionListMapper = new HashMap<FsAction, Set<String>>();
 
 		access2ActionListMapper.put(FsAction.NONE,          new HashSet<String>());
-		access2ActionListMapper.put(FsAction.ALL,           new HashSet<String>(Arrays.asList(READ_ACCCESS_TYPE, WRITE_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE)));
-		access2ActionListMapper.put(FsAction.READ,          new HashSet<String>(Arrays.asList(READ_ACCCESS_TYPE)));
-		access2ActionListMapper.put(FsAction.READ_WRITE,    new HashSet<String>(Arrays.asList(READ_ACCCESS_TYPE, WRITE_ACCCESS_TYPE)));
-		access2ActionListMapper.put(FsAction.READ_EXECUTE,  new HashSet<String>(Arrays.asList(READ_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE)));
-		access2ActionListMapper.put(FsAction.WRITE,         new HashSet<String>(Arrays.asList(WRITE_ACCCESS_TYPE)));
-		access2ActionListMapper.put(FsAction.WRITE_EXECUTE, new HashSet<String>(Arrays.asList(WRITE_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE)));
-		access2ActionListMapper.put(FsAction.EXECUTE,       new HashSet<String>(Arrays.asList(EXECUTE_ACCCESS_TYPE)));
+		access2ActionListMapper.put(FsAction.ALL,           Sets.newHashSet(READ_ACCCESS_TYPE, WRITE_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE));
+		access2ActionListMapper.put(FsAction.READ,          Sets.newHashSet(READ_ACCCESS_TYPE));
+		access2ActionListMapper.put(FsAction.READ_WRITE,    Sets.newHashSet(READ_ACCCESS_TYPE, WRITE_ACCCESS_TYPE));
+		access2ActionListMapper.put(FsAction.READ_EXECUTE,  Sets.newHashSet(READ_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE));
+		access2ActionListMapper.put(FsAction.WRITE,         Sets.newHashSet(WRITE_ACCCESS_TYPE));
+		access2ActionListMapper.put(FsAction.WRITE_EXECUTE, Sets.newHashSet(WRITE_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE));
+		access2ActionListMapper.put(FsAction.EXECUTE,       Sets.newHashSet(EXECUTE_ACCCESS_TYPE));
 	}
 
 	public RangerHdfsAccessRequest(String path, String pathOwner, FsAction access, String user, Set<String> groups) {
@@ -211,19 +216,10 @@ class RangerHdfsAccessRequest extends RangerAccessRequestImpl {
 		super.setAccessTypes(access2ActionListMapper.get(access));
 		super.setUser(user);
 		super.setUserGroups(groups);
-		super.setAccessTime(getUTCDate());
+		super.setAccessTime(StringUtil.getUTCDate());
 		super.setClientIPAddress(getRemoteIp());
 		super.setAction(access.toString());
 	}
-
-	private static Date getUTCDate() {
-		Calendar local=Calendar.getInstance();
-	    int offset = local.getTimeZone().getOffset(local.getTimeInMillis());
-	    GregorianCalendar utc = new GregorianCalendar(TimeZone.getTimeZone("GMT+0"));
-	    utc.setTimeInMillis(local.getTimeInMillis());
-	    utc.add(Calendar.MILLISECOND, -offset);
-	    return utc.getTime();
-	}
 	
 	private static String getRemoteIp() {
 		String ret = null ;