You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ranger.apache.org by ab...@apache.org on 2019/12/04 22:55:07 UTC

[ranger] branch master updated: RANGER-2657: Upgrade Hive version from 3.1 to 3.1.2

This is an automated email from the ASF dual-hosted git repository.

abhay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/ranger.git


The following commit(s) were added to refs/heads/master by this push:
     new fa0dff1  RANGER-2657: Upgrade Hive version from 3.1 to 3.1.2
fa0dff1 is described below

commit fa0dff146399b97554d4e745e6e2dcbd7510d988
Author: Abhay Kulkarni <ab...@apache.org>
AuthorDate: Wed Dec 4 14:53:11 2019 -0800

    RANGER-2657: Upgrade Hive version from 3.1 to 3.1.2
---
 .../RangerDefaultPolicyEvaluator.java              |   2 +-
 .../plugin/policyengine/TestPolicyEngine.java      |  65 +++++---
 .../hive/authorizer/RangerHiveAuthorizer.java      | 137 +++++++++++------
 .../hive/authorizer/RangerHivePolicyProvider.java  | 168 +++++++++++++++++++++
 .../hive/authorizer/RangerHiveResourceACLs.java    |  46 ++++++
 pom.xml                                            |   2 +-
 6 files changed, 355 insertions(+), 65 deletions(-)

diff --git a/agents-common/src/main/java/org/apache/ranger/plugin/policyevaluator/RangerDefaultPolicyEvaluator.java b/agents-common/src/main/java/org/apache/ranger/plugin/policyevaluator/RangerDefaultPolicyEvaluator.java
index 6664d1b..57ad717 100644
--- a/agents-common/src/main/java/org/apache/ranger/plugin/policyevaluator/RangerDefaultPolicyEvaluator.java
+++ b/agents-common/src/main/java/org/apache/ranger/plugin/policyevaluator/RangerDefaultPolicyEvaluator.java
@@ -1094,7 +1094,7 @@ public class RangerDefaultPolicyEvaluator extends RangerAbstractPolicyEvaluator
 			case RangerPolicy.POLICY_TYPE_ACCESS: {
 				ret = getMatchingPolicyItem(request, denyEvaluators, denyExceptionEvaluators);
 
-				if(ret == null && !result.getIsAllowed()) { // if not denied, evaluate allowItems only if not already allowed
+				if(ret == null && !result.getIsAccessDetermined()) { // a deny policy could have set isAllowed=true, but in such case it wouldn't set isAccessDetermined=true
 					ret = getMatchingPolicyItem(request, allowEvaluators, allowExceptionEvaluators);
 				}
 				break;
diff --git a/agents-common/src/test/java/org/apache/ranger/plugin/policyengine/TestPolicyEngine.java b/agents-common/src/test/java/org/apache/ranger/plugin/policyengine/TestPolicyEngine.java
index 362f28e..4265b06 100644
--- a/agents-common/src/test/java/org/apache/ranger/plugin/policyengine/TestPolicyEngine.java
+++ b/agents-common/src/test/java/org/apache/ranger/plugin/policyengine/TestPolicyEngine.java
@@ -484,35 +484,32 @@ public class TestPolicyEngine {
 
         RangerPolicyEngineOptions policyEngineOptions = pluginContext.getConfig().getPolicyEngineOptions();
 
-        policyEngineOptions.disableTagPolicyEvaluation = false;
-        policyEngineOptions.disableAccessEvaluationWithPolicyACLSummary = false;
-        policyEngineOptions.optimizeTrieForRetrieval = false;
+        policyEngineOptions.disableAccessEvaluationWithPolicyACLSummary = true;
 
-		RangerPolicyEngineImpl policyEngine = new RangerPolicyEngineImpl(servicePolicies, pluginContext, roles);
+        RangerPolicyEngineImpl policyEngine = new RangerPolicyEngineImpl(servicePolicies, pluginContext, roles);
 
-		policyEngine.setUseForwardedIPAddress(useForwardedIPAddress);
-		policyEngine.setTrustedProxyAddresses(trustedProxyAddresses);
+        policyEngine.setUseForwardedIPAddress(useForwardedIPAddress);
+        policyEngine.setTrustedProxyAddresses(trustedProxyAddresses);
 
-		policyEngineOptions.disableAccessEvaluationWithPolicyACLSummary = true;
-		policyEngineOptions.optimizeTrieForRetrieval = false;
+        policyEngineOptions.disableAccessEvaluationWithPolicyACLSummary = false;
 
-		RangerPolicyEngineImpl policyEngineForResourceAccessInfo = new RangerPolicyEngineImpl(servicePolicies, pluginContext, roles);
+		RangerPolicyEngineImpl policyEngineForEvaluatingWithACLs = new RangerPolicyEngineImpl(servicePolicies, pluginContext, roles);
 
-		policyEngineForResourceAccessInfo.setUseForwardedIPAddress(useForwardedIPAddress);
-		policyEngineForResourceAccessInfo.setTrustedProxyAddresses(trustedProxyAddresses);
+		policyEngineForEvaluatingWithACLs.setUseForwardedIPAddress(useForwardedIPAddress);
+		policyEngineForEvaluatingWithACLs.setTrustedProxyAddresses(trustedProxyAddresses);
 
-		runTestCaseTests(policyEngine, policyEngineForResourceAccessInfo, testCase.serviceDef, testName, testCase.tests);
+		runTestCaseTests(policyEngine, policyEngineForEvaluatingWithACLs, testCase.serviceDef, testName, testCase.tests);
 
 		if (testCase.updatedPolicies != null) {
 			servicePolicies.setPolicyDeltas(testCase.updatedPolicies.policyDeltas);
 			servicePolicies.setSecurityZones(testCase.updatedPolicies.securityZones);
 			RangerPolicyEngine updatedPolicyEngine = RangerPolicyEngineImpl.getPolicyEngine(policyEngine, servicePolicies);
-			RangerPolicyEngine updatedPolicyEngineForResourceAccessInfo = RangerPolicyEngineImpl.getPolicyEngine(policyEngineForResourceAccessInfo, servicePolicies);
-			runTestCaseTests(updatedPolicyEngine, updatedPolicyEngineForResourceAccessInfo, testCase.serviceDef, testName, testCase.updatedTests);
+            RangerPolicyEngine updatedPolicyEngineForEvaluatingWithACLs = RangerPolicyEngineImpl.getPolicyEngine(policyEngineForEvaluatingWithACLs, servicePolicies);
+			runTestCaseTests(updatedPolicyEngine, updatedPolicyEngineForEvaluatingWithACLs, testCase.serviceDef, testName, testCase.updatedTests);
 		}
 	}
 
-    private void runTestCaseTests(RangerPolicyEngine policyEngine, RangerPolicyEngine policyEngineForResourceAccessInfo, RangerServiceDef serviceDef, String testName, List<TestData> tests) {
+    private void runTestCaseTests(RangerPolicyEngine policyEngine, RangerPolicyEngine policyEngineForEvaluatingWithACLs, RangerServiceDef serviceDef, String testName, List<TestData> tests) {
 
         RangerAccessRequest request = null;
 
@@ -588,38 +585,66 @@ public class TestPolicyEngine {
 			RangerAccessResultProcessor auditHandler = new RangerDefaultAuditHandler();
 
 			if(test.result != null) {
-				RangerAccessResult expected = test.result;
-				RangerAccessResult result   = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_ACCESS, auditHandler);
+                RangerAccessResult expected = test.result;
+                RangerAccessResult result;
+
+				result   = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_ACCESS, auditHandler);
 
 				assertNotNull("result was null! - " + test.name, result);
 				assertEquals("isAllowed mismatched! - " + test.name, expected.getIsAllowed(), result.getIsAllowed());
 				assertEquals("isAudited mismatched! - " + test.name, expected.getIsAudited(), result.getIsAudited());
+
+				result   = policyEngineForEvaluatingWithACLs.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_ACCESS, auditHandler);
+
+                assertNotNull("result was null! - " + test.name, result);
+                assertEquals("isAllowed mismatched! - " + test.name, expected.getIsAllowed(), result.getIsAllowed());
+                assertEquals("isAudited mismatched! - " + test.name, expected.getIsAudited(), result.getIsAudited());
 			}
 
 			if(test.dataMaskResult != null) {
 				RangerAccessResult expected = test.dataMaskResult;
-				RangerAccessResult result   = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_DATAMASK, auditHandler);
+				RangerAccessResult result;
+
+                result   = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_DATAMASK, auditHandler);
+
+                assertNotNull("result was null! - " + test.name, result);
+                assertEquals("maskType mismatched! - " + test.name, expected.getMaskType(), result.getMaskType());
+                assertEquals("maskCondition mismatched! - " + test.name, expected.getMaskCondition(), result.getMaskCondition());
+                assertEquals("maskedValue mismatched! - " + test.name, expected.getMaskedValue(), result.getMaskedValue());
+                assertEquals("policyId mismatched! - " + test.name, expected.getPolicyId(), result.getPolicyId());
+
+                result = policyEngineForEvaluatingWithACLs.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_DATAMASK, auditHandler);
 
 				assertNotNull("result was null! - " + test.name, result);
 				assertEquals("maskType mismatched! - " + test.name, expected.getMaskType(), result.getMaskType());
 				assertEquals("maskCondition mismatched! - " + test.name, expected.getMaskCondition(), result.getMaskCondition());
 				assertEquals("maskedValue mismatched! - " + test.name, expected.getMaskedValue(), result.getMaskedValue());
 				assertEquals("policyId mismatched! - " + test.name, expected.getPolicyId(), result.getPolicyId());
+
 			}
 
 			if(test.rowFilterResult != null) {
 				RangerAccessResult expected = test.rowFilterResult;
-				RangerAccessResult result   = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_ROWFILTER, auditHandler);
+				RangerAccessResult result;
+
+                result   = policyEngine.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_ROWFILTER, auditHandler);
+
+                assertNotNull("result was null! - " + test.name, result);
+                assertEquals("filterExpr mismatched! - " + test.name, expected.getFilterExpr(), result.getFilterExpr());
+                assertEquals("policyId mismatched! - " + test.name, expected.getPolicyId(), result.getPolicyId());
+
+				result = policyEngineForEvaluatingWithACLs.evaluatePolicies(request, RangerPolicy.POLICY_TYPE_ROWFILTER, auditHandler);
 
 				assertNotNull("result was null! - " + test.name, result);
 				assertEquals("filterExpr mismatched! - " + test.name, expected.getFilterExpr(), result.getFilterExpr());
 				assertEquals("policyId mismatched! - " + test.name, expected.getPolicyId(), result.getPolicyId());
+
 			}
 
 			if(test.resourceAccessInfo != null) {
 
 				RangerResourceAccessInfo expected = new RangerResourceAccessInfo(test.resourceAccessInfo);
-				RangerResourceAccessInfo result   = policyEngineForResourceAccessInfo.getResourceAccessInfo(test.request);
+				RangerResourceAccessInfo result   = policyEngine.getResourceAccessInfo(test.request);
 
 				assertNotNull("result was null! - " + test.name, result);
 				assertEquals("allowedUsers mismatched! - " + test.name, expected.getAllowedUsers(), result.getAllowedUsers());
diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java
index 5a7de43..5e2ddae 100644
--- a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java
+++ b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java
@@ -52,6 +52,7 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginEx
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
@@ -134,6 +135,17 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 							case HIVESERVER2:
 								appType = "hiveServer2";
 							break;
+
+							/*
+							case HIVEMETASTORE:
+								appType = "hiveMetastore";
+								break;
+
+							case OTHER:
+								appType = "other";
+								break;
+
+							 */
 						}
 					}
 
@@ -147,6 +159,16 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 	}
 
 	@Override
+	public HivePolicyProvider getHivePolicyProvider() throws HiveAuthzPluginException {
+		if (hivePlugin == null) {
+			throw new HiveAuthzPluginException();
+		}
+		RangerHivePolicyProvider policyProvider = new RangerHivePolicyProvider(hivePlugin);
+
+		return policyProvider;
+	}
+
+	@Override
 	public void createRole(String roleName, HivePrincipal adminGrantor)
 			throws HiveAuthzPluginException, HiveAccessControlException {
 		if(LOG.isDebugEnabled()) {
@@ -888,7 +910,7 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 					LOG.debug(String.format(format, actionType, objectType, objectName, dbName, columns, partitionKeys, commandString, ipAddress));
 				}
 				
-				RangerHiveResource resource = createHiveResource(privilegeObject);
+				RangerHiveResource resource = createHiveResourceForFiltering(privilegeObject);
 				if (resource == null) {
 					LOG.error("filterListCmdObjects: RangerHiveResource returned by createHiveResource is null");
 				} else {
@@ -1150,7 +1172,24 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 		return ret;
 	}
 
-	private RangerHiveResource createHiveResource(HivePrivilegeObject privilegeObject) {
+	static RangerHiveResource createHiveResourceForFiltering(HivePrivilegeObject privilegeObject) {
+		RangerHiveResource resource = null;
+
+		HivePrivilegeObjectType objectType = privilegeObject.getType();
+
+		switch(objectType) {
+			case DATABASE:
+			case TABLE_OR_VIEW:
+				resource = createHiveResource(privilegeObject);
+				break;
+			default:
+				LOG.warn("RangerHiveAuthorizer.getHiveResourceForFiltering: unexpected objectType:" + objectType);
+		}
+
+		return resource;
+	}
+
+	static RangerHiveResource createHiveResource(HivePrivilegeObject privilegeObject) {
 		RangerHiveResource resource = null;
 
 		HivePrivilegeObjectType objectType = privilegeObject.getType();
@@ -1158,18 +1197,25 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 		String dbName = privilegeObject.getDbname();
 
 		switch(objectType) {
-		case DATABASE:
-			resource = new RangerHiveResource(HiveObjectType.DATABASE, dbName);
-			//when fix is in place for HIVE-22128 we can un comment this.
-			//resource.setOwnerUser(privilegeObject.getOwnerName());
-			break;
-		case TABLE_OR_VIEW:
-			resource = new RangerHiveResource(HiveObjectType.TABLE, dbName, objectName);
-			//when fix is in place for HIVE-22128 we can un comment this.
-			//resource.setOwnerUser(privilegeObject.getOwnerName());
-			break;
-		default:
-			LOG.warn("RangerHiveAuthorizer.getHiveResource: unexpected objectType:" + objectType);
+			case DATABASE:
+				resource = new RangerHiveResource(HiveObjectType.DATABASE, dbName);
+				break;
+			case TABLE_OR_VIEW:
+				resource = new RangerHiveResource(HiveObjectType.TABLE, dbName, objectName);
+				//resource.setOwnerUser(privilegeObject.getOwnerName());
+				break;
+			case COLUMN:
+				List<String> columns = privilegeObject.getColumns();
+				int numOfColumns = columns == null ? 0 : columns.size();
+				if (numOfColumns == 1) {
+					resource = new RangerHiveResource(HiveObjectType.COLUMN, dbName, objectName, columns.get(0));
+					//resource.setOwnerUser(privilegeObject.getOwnerName());
+				} else {
+					LOG.warn("RangerHiveAuthorizer.getHiveResource: unexpected number of columns requested:" + numOfColumns + ", objectType:" + objectType);
+				}
+				break;
+			default:
+				LOG.warn("RangerHiveAuthorizer.getHiveResource: unexpected objectType:" + objectType);
 		}
 
 		if (resource != null) {
@@ -1184,10 +1230,6 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 											   HivePrivilegeObject hiveObj,
 											   List<HivePrivilegeObject> inputs,
 											   List<HivePrivilegeObject> outputs) {
-		if(LOG.isDebugEnabled()) {
-			LOG.debug("==> RangerHiveAuthorizer.getHiveResource(" + "HiveOperationType: " + hiveOpType + "HivePrivilegeObject:"+ hiveObj +  " InputObjs:" + inputs + " OutputObjs:" + outputs);
-		}
-
 		RangerHiveResource ret = null;
 
 		HiveObjectType objectType = getObjectType(hiveObj, hiveOpType);
@@ -1195,17 +1237,26 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 		switch(objectType) {
 			case DATABASE:
 				ret = new RangerHiveResource(objectType, hiveObj.getDbname());
-				/*if (!isCreateOperation(hiveOpType)) {
+				/*
+				if (!isCreateOperation(hiveOpType)) {
 					ret.setOwnerUser(hiveObj.getOwnerName());
-				}*/
+				}
+
+				 */
 			break;
 	
 			case TABLE:
 			case VIEW:
 			case FUNCTION:
 				ret = new RangerHiveResource(objectType, hiveObj.getDbname(), hiveObj.getObjectName());
+				// To suppress PMD violations
+				if (LOG.isDebugEnabled()) {
+					LOG.debug("Size of inputs = [" + (CollectionUtils.isNotEmpty(inputs) ? inputs.size() : 0) +
+							", Size of outputs = [" + (CollectionUtils.isNotEmpty(outputs) ? outputs.size() : 0) + "]");
+				}
 
-				/*String ownerName = hiveObj.getOwnerName();
+				/*
+				String ownerName = hiveObj.getOwnerName();
 
 				if (isCreateOperation(hiveOpType)) {
 					HivePrivilegeObject dbObject = getDatabaseObject(hiveObj.getDbname(), inputs, outputs);
@@ -1215,7 +1266,9 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 				}
 
 				ret.setOwnerUser(ownerName);
-				*/
+
+				 */
+
 			break;
 
 			case PARTITION:
@@ -1245,10 +1298,6 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 			ret.setServiceDef(hivePlugin == null ? null : hivePlugin.getServiceDef());
 		}
 
-		if(LOG.isDebugEnabled()) {
-			LOG.debug("<= RangerHiveAuthorizer.getHiveResource(" + " RangerHiveResource: " + ret);
-		}
-
 		return ret;
 	}
 
@@ -1290,7 +1339,8 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 
 		return ret;
 	}
-	*/
+	
+	 */
 
 	private HiveObjectType getObjectType(HivePrivilegeObject hiveObj, HiveOperationType hiveOpType) {
 		HiveObjectType objType = HiveObjectType.NONE;
@@ -1650,6 +1700,7 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 			case ALTERPARTITION_LOCATION:
 			case ALTERPARTITION_MERGEFILES:
 			case ALTERTBLPART_SKEWED_LOCATION:
+			case ALTERTABLE_OWNER:
 			case QUERY:
 				ret = FsAction.ALL;
 				break;
@@ -2126,6 +2177,17 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 		return ret;
 	}
 
+	private boolean isTempUDFOperation(String hiveOpTypeName, HivePrivilegeObject hiveObj) {
+		boolean ret = false;
+		if ((hiveOpTypeName.contains("createfunction") || hiveOpTypeName.contains("dropfunction")) &&
+				StringUtils.isEmpty(hiveObj.getDbname())) {
+			// This happens for temp udf function and will use
+			// global resource policy in ranger for auth
+			ret = true;
+		}
+		return ret;
+	}
+
 	private List<HivePrivilegeInfo> getHivePrivilegeInfos(HivePrincipal principal, HivePrivilegeObject privObj) throws HiveAuthzPluginException {
 		List<HivePrivilegeInfo> ret = new ArrayList<>();
 		HivePrivilegeObject.HivePrivilegeObjectType objectType = null;
@@ -2133,10 +2195,10 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 		Map<String, Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult>> groupPermissions = null;
 		Map<String, Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult>> rolePermissions = null;
 
-		String			dbName     = null;
-		String			objectName = null;
-		String			columnName = null;
-		List<String>	partValues = null;
+		String 		 		dbName	= null;
+		String		 	 objectName = null;
+		String		 	 columnName	= null;
+		List<String> 	 partValues = null;
 
 		try {
 			HiveObjectRef msObjRef = AuthorizationUtils.getThriftHiveObjectRef(privObj);
@@ -2333,7 +2395,7 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 			LOG.debug("==> RangerHivePolicyProvider.getRangerResourceACLs:[" + hiveObject + "]");
 		}
 
-		RangerHiveResource hiveResource = createHiveResource(hiveObject);
+		RangerHiveResource hiveResource = RangerHiveAuthorizer.createHiveResource(hiveObject);
 		RangerAccessRequestImpl request = new RangerAccessRequestImpl(hiveResource, RangerPolicyEngine.ANY_ACCESS, null, null);
 
 		ret = hivePlugin.getResourceACLs(request);
@@ -2411,17 +2473,6 @@ public class RangerHiveAuthorizer extends RangerHiveAuthorizerBase {
 		return result;
 	}
 
-	private boolean isTempUDFOperation(String hiveOpTypeName, HivePrivilegeObject hiveObj) {
-		boolean ret = false;
-		if ((hiveOpTypeName.contains("createfunction") || hiveOpTypeName.contains("dropfunction")) &&
-			StringUtils.isEmpty(hiveObj.getDbname())) {
-			// This happens for temp udf function and will use
-			// global resource policy in ranger for auth
-			ret = true;
-		}
-		return ret;
-	}
-
 	private String toString(HiveOperationType         hiveOpType,
 							List<HivePrivilegeObject> inputHObjs,
 							List<HivePrivilegeObject> outputHObjs,
diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHivePolicyProvider.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHivePolicyProvider.java
new file mode 100644
index 0000000..3a88c47
--- /dev/null
+++ b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHivePolicyProvider.java
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ranger.authorization.hive.authorizer;
+
+import org.apache.commons.collections.MapUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyChangeListener;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveResourceACLs;
+import org.apache.ranger.plugin.policyengine.RangerAccessRequestImpl;
+import org.apache.ranger.plugin.policyengine.RangerPolicyEngine;
+import org.apache.ranger.plugin.policyengine.RangerResourceACLs;
+import org.apache.ranger.plugin.policyevaluator.RangerPolicyEvaluator;
+import org.apache.ranger.plugin.service.RangerAuthContextListener;
+import org.apache.ranger.plugin.service.RangerBasePlugin;
+import org.apache.ranger.plugin.util.RangerPerfTracer;
+
+import javax.validation.constraints.NotNull;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+public class RangerHivePolicyProvider implements HivePolicyProvider {
+    private static final Log LOG = LogFactory.getLog(RangerHivePolicyProvider.class);
+
+    private static final Log PERF_HIVEACLPROVIDER_REQUEST_LOG = RangerPerfTracer.getPerfLogger("hiveACLProvider.request");
+
+	private final RangerHiveAuthContextListener authContextListener = new RangerHiveAuthContextListener();
+
+	private final Set<String> hivePrivileges;
+
+	private final RangerBasePlugin  rangerPlugin;
+
+	RangerHivePolicyProvider(@NotNull RangerHivePlugin hivePlugin) {
+
+		Set<String> privileges = new HashSet<>();
+		for (HiveResourceACLs.Privilege privilege : HiveResourceACLs.Privilege.values()) {
+			privileges.add(privilege.name().toLowerCase());
+		}
+
+		this.hivePrivileges = new HashSet<>(privileges);
+		this.rangerPlugin   = hivePlugin;
+	}
+
+	@Override
+    public HiveResourceACLs getResourceACLs(HivePrivilegeObject hiveObject) {
+
+	    HiveResourceACLs ret;
+
+	    RangerPerfTracer perf = null;
+
+	    if (RangerPerfTracer.isPerfTraceEnabled(PERF_HIVEACLPROVIDER_REQUEST_LOG)) {
+		    perf = RangerPerfTracer.getPerfTracer(PERF_HIVEACLPROVIDER_REQUEST_LOG, "RangerHivePolicyProvider.getResourceACLS()");
+	    }
+	    // Extract and build RangerHiveResource from inputObject
+	    RangerHiveResource hiveResource = RangerHiveAuthorizer.createHiveResource(hiveObject);
+	    ret = getResourceACLs(hiveResource);
+	    RangerPerfTracer.log(perf);
+		return ret;
+    }
+
+	@Override
+	public void registerHivePolicyChangeListener(HivePolicyChangeListener listener) {
+		if (LOG.isDebugEnabled()) {
+			LOG.debug("==> RangerHiveACLProviderFactory.registerACLProviderChangeListener()");
+		}
+		authContextListener.providerChangeListeners.add(listener);
+
+		rangerPlugin.registerAuthContextEventListener(authContextListener);
+
+		if (LOG.isDebugEnabled()) {
+			LOG.debug("<== RangerHiveACLProviderFactory.registerACLProviderChangeListener()");
+		}
+	}
+
+	private HiveResourceACLs getResourceACLs(RangerHiveResource hiveResource) {
+	    HiveResourceACLs ret;
+
+	    RangerAccessRequestImpl request = new RangerAccessRequestImpl(hiveResource, RangerPolicyEngine.ANY_ACCESS, null, null);
+
+	    RangerResourceACLs acls = rangerPlugin.getResourceACLs(request);
+
+	    if (LOG.isDebugEnabled()) {
+	    	LOG.debug("HiveResource:[" + hiveResource.getAsString() + "], Computed ACLS:[" + acls + "]");
+	    }
+
+	    Map<String, Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult>> userPermissions = convertRangerACLsToHiveACLs(acls.getUserACLs());
+	    Map<String, Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult>> groupPermissions = convertRangerACLsToHiveACLs(acls.getGroupACLs());
+
+	    ret = new RangerHiveResourceACLs(userPermissions, groupPermissions);
+
+	    return ret;
+    }
+
+    private Map<String, Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult>> convertRangerACLsToHiveACLs(Map<String, Map<String, RangerResourceACLs.AccessResult>> rangerACLs) {
+
+	    Map<String, Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult>> ret = new HashMap<>();
+
+	    if (MapUtils.isNotEmpty(rangerACLs)) {
+
+		    for (Map.Entry<String, Map<String, RangerResourceACLs.AccessResult>> entry : rangerACLs.entrySet()) {
+
+			    Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult> permissions = new HashMap<>();
+
+			    ret.put(entry.getKey(), permissions);
+
+			    for (Map.Entry<String, RangerResourceACLs.AccessResult> permission : entry.getValue().entrySet()) {
+
+				    if (hivePrivileges.contains(permission.getKey())) {
+
+					    HiveResourceACLs.Privilege privilege = HiveResourceACLs.Privilege.valueOf(StringUtils.upperCase(permission.getKey()));
+
+					    HiveResourceACLs.AccessResult accessResult;
+
+					    int rangerResultValue = permission.getValue().getResult();
+
+					    if (rangerResultValue == RangerPolicyEvaluator.ACCESS_ALLOWED) {
+						    accessResult = HiveResourceACLs.AccessResult.ALLOWED;
+					    } else if (rangerResultValue == RangerPolicyEvaluator.ACCESS_DENIED) {
+						    accessResult = HiveResourceACLs.AccessResult.NOT_ALLOWED;
+					    } else if (rangerResultValue == RangerPolicyEvaluator.ACCESS_CONDITIONAL) {
+						    accessResult = HiveResourceACLs.AccessResult.CONDITIONAL_ALLOWED;
+					    } else {
+						    // Should not get here
+						    accessResult = HiveResourceACLs.AccessResult.NOT_ALLOWED;
+					    }
+
+					    permissions.put(privilege, accessResult);
+				    }
+
+			    }
+		    }
+	    }
+
+	    return ret;
+    }
+
+	static class RangerHiveAuthContextListener implements RangerAuthContextListener {
+		Set<HivePolicyChangeListener> providerChangeListeners = new HashSet<>();
+
+		public void contextChanged() {
+			for (HivePolicyChangeListener eventListener : providerChangeListeners) {
+				eventListener.notifyPolicyChange(null);
+			}
+		}
+	}
+}
diff --git a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveResourceACLs.java b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveResourceACLs.java
new file mode 100644
index 0000000..02dfa68
--- /dev/null
+++ b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveResourceACLs.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ranger.authorization.hive.authorizer;
+
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveResourceACLs;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class RangerHiveResourceACLs implements HiveResourceACLs {
+
+	private final Map<String, Map<Privilege, AccessResult>> userPermissions;
+	private final Map<String, Map<Privilege, AccessResult>> groupPermissions;
+
+	RangerHiveResourceACLs() {
+		this(null, null);
+	}
+
+	RangerHiveResourceACLs(Map<String, Map<Privilege, AccessResult>> userPermissions, Map<String, Map<Privilege, AccessResult>> groupPermissions) {
+		this.userPermissions = userPermissions != null ? userPermissions : new HashMap<>();
+		this.groupPermissions = groupPermissions != null ? groupPermissions : new HashMap<>();
+	}
+
+	@Override
+	public Map<String, Map<Privilege, AccessResult>> getUserPermissions() { return userPermissions; }
+
+	@Override
+	public Map<String, Map<Privilege, AccessResult>> getGroupPermissions() { return groupPermissions; }
+}
diff --git a/pom.xml b/pom.xml
index bce4f20..cb04757 100644
--- a/pom.xml
+++ b/pom.xml
@@ -120,7 +120,7 @@
         <ozone.version>0.4.0-alpha</ozone.version>
         <hamcrest.all.version>1.3</hamcrest.all.version>
         <hbase.version>2.0.2</hbase.version>
-        <hive.version>3.1.0</hive.version>
+        <hive.version>3.1.2</hive.version>
         <hbase-shaded-protobuf>2.0.0</hbase-shaded-protobuf>
         <hbase-shaded-netty>2.0.0</hbase-shaded-netty>
         <hbase-shaded-miscellaneous>2.0.0</hbase-shaded-miscellaneous>