You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by da...@apache.org on 2018/05/29 19:06:01 UTC

[7/7] hive git commit: HIVE-19440: Make StorageBasedAuthorizer work with information schema (Daniel Dai, reviewed by Thejas Nair)

HIVE-19440: Make StorageBasedAuthorizer work with information schema (Daniel Dai, reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/83afdb4d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/83afdb4d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/83afdb4d

Branch: refs/heads/master
Commit: 83afdb4d52d8ee9c6ac4006a1808233609c85298
Parents: 2811d0a
Author: Daniel Dai <da...@gmail.com>
Authored: Tue May 29 12:05:29 2018 -0700
Committer: Daniel Dai <da...@gmail.com>
Committed: Tue May 29 12:05:29 2018 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/conf/HiveConf.java   |    2 -
 .../storagehandler/DummyHCatAuthProvider.java   |    7 +
 .../listener/DummyRawStoreFailEvent.java        |    4 +-
 .../TestHDFSPermissionPolicyProvider.java       |  189 ++
 .../apache/hive/jdbc/TestRestrictedList.java    |    1 -
 .../TestInformationSchemaWithPrivilege.java     |   22 +-
 ...DummyHiveMetastoreAuthorizationProvider.java |    8 +-
 .../jdbc/dao/DatabaseAccessorFactory.java       |    3 +-
 .../scripts/upgrade/derby/upgrade.order.derby   |    1 +
 .../upgrade/hive/hive-schema-3.0.0.hive.sql     |   41 +-
 .../scripts/upgrade/mssql/upgrade.order.mssql   |    1 +
 .../scripts/upgrade/mysql/upgrade.order.mysql   |    1 +
 .../scripts/upgrade/oracle/upgrade.order.oracle |    1 +
 .../upgrade/postgres/upgrade.order.postgres     |    1 +
 pom.xml                                         |    2 +-
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |    1 +
 .../ql/metadata/SessionHiveMetaStoreClient.java |    2 +-
 .../HDFSPermissionPolicyProvider.java           |  120 ++
 .../HiveAuthorizationProviderBase.java          |    6 +
 .../HiveMetastoreAuthorizationProvider.java     |    7 +
 .../authorization/PolicyProviderContainer.java  |   77 +
 .../authorization/PrivilegeSynchonizer.java     |   70 +-
 .../StorageBasedAuthorizationProvider.java      |    7 +
 .../authorization/plugin/HiveV1Authorizer.java  |   18 +-
 .../plugin/sqlstd/SQLAuthorizationUtils.java    |    2 +-
 .../generic/GenericUDFCurrentAuthorizer.java    |  120 ++
 .../GenericUDFRestrictInformationSchema.java    |   16 +-
 .../clientpositive/llap/resourceplan.q.out      |   78 +-
 .../results/clientpositive/show_functions.q.out |    2 +
 .../apache/hive/service/server/HiveServer2.java |   37 +-
 standalone-metastore/pom.xml                    |    2 +-
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp  |   36 +-
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.h    |   29 +-
 .../ThriftHiveMetastore_server.skeleton.cpp     |    2 +-
 .../gen/thrift/gen-cpp/hive_metastore_types.cpp |   20 +
 .../gen/thrift/gen-cpp/hive_metastore_types.h   |   10 +-
 .../hive/metastore/api/HiveObjectPrivilege.java |  112 +-
 .../hive/metastore/api/ThriftHiveMetastore.java |  142 +-
 .../gen-php/metastore/ThriftHiveMetastore.php   |   35 +-
 .../src/gen/thrift/gen-php/metastore/Types.php  |   23 +
 .../hive_metastore/ThriftHiveMetastore-remote   |    8 +-
 .../hive_metastore/ThriftHiveMetastore.py       |   32 +-
 .../gen/thrift/gen-py/hive_metastore/ttypes.py  |   15 +-
 .../gen/thrift/gen-rb/hive_metastore_types.rb   |    4 +-
 .../gen/thrift/gen-rb/thrift_hive_metastore.rb  |   14 +-
 .../hadoop/hive/metastore/HiveMetaStore.java    |    6 +-
 .../hive/metastore/HiveMetaStoreClient.java     |    4 +-
 .../hadoop/hive/metastore/IMetaStoreClient.java |    3 +-
 .../hadoop/hive/metastore/ObjectStore.java      |  386 ++--
 .../apache/hadoop/hive/metastore/RawStore.java  |    2 +-
 .../hive/metastore/cache/CachedStore.java       |    4 +-
 .../builder/HiveObjectPrivilegeBuilder.java     |    8 +-
 .../hive/metastore/model/MDBPrivilege.java      |   12 +-
 .../hive/metastore/model/MGlobalPrivilege.java  |   12 +-
 .../model/MPartitionColumnPrivilege.java        |   12 +-
 .../metastore/model/MPartitionPrivilege.java    |   12 +-
 .../metastore/model/MTableColumnPrivilege.java  |   12 +-
 .../hive/metastore/model/MTablePrivilege.java   |   12 +-
 .../src/main/resources/package.jdo              |   24 +
 .../main/sql/derby/hive-schema-3.1.0.derby.sql  |  692 +++++++
 .../sql/derby/upgrade-3.0.0-to-3.1.0.derby.sql  |   28 +
 .../src/main/sql/derby/upgrade.order.derby      |    1 +
 .../main/sql/mssql/hive-schema-3.1.0.mssql.sql  | 1252 ++++++++++++
 .../sql/mssql/upgrade-3.0.0-to-3.1.0.mssql.sql  |   30 +
 .../src/main/sql/mssql/upgrade.order.mssql      |    1 +
 .../main/sql/mysql/hive-schema-3.1.0.mysql.sql  | 1190 ++++++++++++
 .../sql/mysql/upgrade-3.0.0-to-3.1.0.mysql.sql  |   30 +
 .../src/main/sql/mysql/upgrade.order.mysql      |    1 +
 .../sql/oracle/hive-schema-3.1.0.oracle.sql     | 1147 +++++++++++
 .../oracle/upgrade-3.0.0-to-3.1.0.oracle.sql    |   31 +
 .../src/main/sql/oracle/upgrade.order.oracle    |    1 +
 .../sql/postgres/hive-schema-3.1.0.postgres.sql | 1835 ++++++++++++++++++
 .../upgrade-3.0.0-to-3.1.0.postgres.sql         |   33 +
 .../main/sql/postgres/upgrade.order.postgres    |    1 +
 .../src/main/thrift/hive_metastore.thrift       |    3 +-
 .../DummyRawStoreControlledCommit.java          |    4 +-
 .../DummyRawStoreForJdoConnection.java          |    2 +-
 .../HiveMetaStoreClientPreCatalog.java          |    4 +-
 78 files changed, 7791 insertions(+), 335 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 7942608..3295d1d 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -2977,8 +2977,6 @@ public class HiveConf extends Configuration {
     HIVE_SSL_PROTOCOL_BLACKLIST("hive.ssl.protocol.blacklist", "SSLv2,SSLv3",
         "SSL Versions to disable for all Hive Servers"),
 
-    HIVE_PRIVILEGE_SYNCHRONIZER("hive.privilege.synchronizer", false,
-        "Synchronize privileges from external authorizer such as ranger to Hive periodically in HS2"),
     HIVE_PRIVILEGE_SYNCHRONIZER_INTERVAL("hive.privilege.synchronizer.interval",
         "1800s", new TimeValidator(TimeUnit.SECONDS),
         "Interval to synchronize privileges from external authorizer periodically in HS2"),

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/hcatalog/core/src/main/java/org/apache/hive/hcatalog/storagehandler/DummyHCatAuthProvider.java
----------------------------------------------------------------------
diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/storagehandler/DummyHCatAuthProvider.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/storagehandler/DummyHCatAuthProvider.java
index a53028f..86d9a18 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/storagehandler/DummyHCatAuthProvider.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/storagehandler/DummyHCatAuthProvider.java
@@ -30,6 +30,8 @@ import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider;
 
 /**
  * This class is a dummy implementation of HiveAuthorizationProvider to provide
@@ -141,4 +143,9 @@ class DummyHCatAuthProvider implements HiveAuthorizationProvider {
     throws HiveException, AuthorizationException {
   }
 
+  @Override
+  public HivePolicyProvider getHivePolicyProvider() throws HiveAuthzPluginException {
+    return null;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
----------------------------------------------------------------------
diff --git a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
index 3d6fda6..0cc0ae5 100644
--- a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
+++ b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/listener/DummyRawStoreFailEvent.java
@@ -544,9 +544,9 @@ public class DummyRawStoreFailEvent implements RawStore, Configurable {
   }
 
   @Override
-  public boolean refreshPrivileges(HiveObjectRef objToRefresh, PrivilegeBag grantPrivileges)
+  public boolean refreshPrivileges(HiveObjectRef objToRefresh, String authorizer, PrivilegeBag grantPrivileges)
       throws InvalidObjectException, MetaException, NoSuchObjectException {
-    return objectStore.refreshPrivileges(objToRefresh, grantPrivileges);
+    return objectStore.refreshPrivileges(objToRefresh, authorizer, grantPrivileges);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestHDFSPermissionPolicyProvider.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestHDFSPermissionPolicyProvider.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestHDFSPermissionPolicyProvider.java
new file mode 100644
index 0000000..be2a39e
--- /dev/null
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/TestHDFSPermissionPolicyProvider.java
@@ -0,0 +1,189 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.security.authorization.HDFSPermissionPolicyProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveResourceACLs;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Test cases for privilege synchronizer for storage based authorizer
+ */
+public class TestHDFSPermissionPolicyProvider {
+  private static MiniDFSCluster mDfs;
+  private static IMetaStoreClient client;
+  private static Configuration conf;
+  private static String defaultTbl1Loc, defaultTbl2Loc, db1Loc, db1Tbl1Loc;
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    mDfs = new MiniDFSCluster.Builder(new Configuration()).numDataNodes(1).format(true).build();
+    conf = new Configuration();
+    conf.set("fs.defaultFS", "hdfs://" + mDfs.getNameNode().getHostAndPort());
+    String warehouseLocation = "hdfs://" + mDfs.getNameNode().getHostAndPort()
+        + MetastoreConf.ConfVars.WAREHOUSE.getDefaultVal();
+    conf.set(MetastoreConf.ConfVars.WAREHOUSE.getVarname(), warehouseLocation);
+    conf.set(MetastoreConf.ConfVars.AUTO_CREATE_ALL.getVarname(), "true");
+    conf.set(MetastoreConf.ConfVars.SCHEMA_VERIFICATION.getVarname(), "false");
+    client = Hive.get(conf, TestHDFSPermissionPolicyProvider.class).getMSC();
+
+    try {
+      client.dropTable("default", "tbl1");
+    } catch (Exception e) {
+    }
+    try {
+      client.dropTable("default", "tbl2");
+    } catch (Exception e) {
+    }
+    try {
+      client.dropTable("db1", "tbl1");
+    } catch (Exception e) {
+    }
+    try {
+      client.dropDatabase("db1");
+    } catch (Exception e) {
+    }
+
+    defaultTbl1Loc = warehouseLocation + "/tbl1";
+    defaultTbl2Loc = warehouseLocation + "/tbl2";
+    db1Loc = warehouseLocation + "/db1";
+    db1Tbl1Loc = warehouseLocation + "/db1/tbl1";
+
+    int now = (int)System.currentTimeMillis() / 1000;
+    FieldSchema col1 = new FieldSchema("col1", "int", "no comment");
+    List<FieldSchema> cols = new ArrayList<FieldSchema>();
+    cols.add(col1);
+    SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
+    StorageDescriptor sd =
+        new StorageDescriptor(cols, defaultTbl1Loc, "input", "output", false, 0, serde, null, null,
+            new HashMap<String, String>());
+    Table tbl =
+        new Table("tbl1", "default", "foo", now, now, 0, sd, null,
+            new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+    client.createTable(tbl);
+
+    sd = new StorageDescriptor(cols, defaultTbl2Loc, "input", "output", false, 0, serde,
+        null, null, new HashMap<String, String>());
+    tbl = new Table("tbl2", "default", "foo", now, now, 0, sd, null,
+            new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+    client.createTable(tbl);
+
+    Database db = new Database("db1", "no description", db1Loc, new HashMap<String, String>());
+    client.createDatabase(db);
+
+    sd = new StorageDescriptor(cols, db1Tbl1Loc, "input", "output", false, 0, serde, null, null,
+            new HashMap<String, String>());
+    tbl = new Table("tbl1", "db1", "foo", now, now, 0, sd, null,
+            new HashMap<String, String>(), null, null, TableType.MANAGED_TABLE.toString());
+    client.createTable(tbl);
+  }
+
+  @Test
+  public void testPolicyProvider() throws Exception {
+    HDFSPermissionPolicyProvider policyProvider = new HDFSPermissionPolicyProvider(conf);
+    FileSystem fs = FileSystem.get(conf);
+    fs.setOwner(new Path(defaultTbl1Loc), "user1", "group1");
+    fs.setOwner(new Path(defaultTbl2Loc), "user1", "group1");
+    fs.setOwner(new Path(db1Loc), "user1", "group1");
+    fs.setOwner(new Path(db1Tbl1Loc), "user1", "group1");
+    fs.setPermission(new Path(defaultTbl1Loc), new FsPermission("444")); // r--r--r--
+    HiveResourceACLs acls = policyProvider.getResourceACLs(
+        new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, "default", "tbl1"));
+    assertEquals(acls.getUserPermissions().size(), 1);
+    assertTrue(acls.getUserPermissions().keySet().contains("user1"));
+    assertEquals(acls.getGroupPermissions().size(), 2);
+    assertTrue(acls.getGroupPermissions().keySet().contains("group1"));
+    assertTrue(acls.getGroupPermissions().keySet().contains("public"));
+
+    fs.setPermission(new Path(defaultTbl1Loc), new FsPermission("440")); // r--r-----
+    acls = policyProvider.getResourceACLs(
+        new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, "default", "tbl1"));
+    assertEquals(acls.getUserPermissions().size(), 1);
+    assertEquals(acls.getUserPermissions().keySet().iterator().next(), "user1");
+    assertEquals(acls.getGroupPermissions().size(), 1);
+    assertTrue(acls.getGroupPermissions().keySet().contains("group1"));
+
+    fs.setPermission(new Path(defaultTbl1Loc), new FsPermission("404")); // r-----r--
+    acls = policyProvider.getResourceACLs(
+        new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, "default", "tbl1"));
+    assertEquals(acls.getUserPermissions().size(), 1);
+    assertTrue(acls.getUserPermissions().keySet().contains("user1"));
+    assertEquals(acls.getGroupPermissions().size(), 1);
+    assertTrue(acls.getGroupPermissions().keySet().contains("public"));
+
+    fs.setPermission(new Path(defaultTbl1Loc), new FsPermission("400")); // r--------
+    acls = policyProvider.getResourceACLs(
+        new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, "default", "tbl1"));
+    assertEquals(acls.getUserPermissions().size(), 1);
+    assertTrue(acls.getUserPermissions().keySet().contains("user1"));
+    assertEquals(acls.getGroupPermissions().size(), 0);
+
+    fs.setPermission(new Path(defaultTbl1Loc), new FsPermission("004")); // ------r--
+    fs.setPermission(new Path(defaultTbl2Loc), new FsPermission("777")); // rwxrwxrwx
+    acls = policyProvider.getResourceACLs(
+        new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, "default", "tbl1"));
+    assertEquals(acls.getUserPermissions().size(), 0);
+    assertEquals(acls.getGroupPermissions().size(), 1);
+    assertTrue(acls.getGroupPermissions().keySet().contains("public"));
+    acls = policyProvider.getResourceACLs(
+        new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, "default", "tbl2"));
+    assertEquals(acls.getUserPermissions().size(), 1);
+    assertTrue(acls.getUserPermissions().keySet().contains("user1"));
+    assertEquals(acls.getGroupPermissions().size(), 2);
+    assertTrue(acls.getGroupPermissions().keySet().contains("group1"));
+    assertTrue(acls.getGroupPermissions().keySet().contains("public"));
+
+    fs.setPermission(new Path(db1Loc), new FsPermission("400")); // ------r--
+    fs.delete(new Path(db1Tbl1Loc), true);
+    acls = policyProvider.getResourceACLs(
+        new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, "db1", null));
+    assertEquals(acls.getUserPermissions().size(), 1);
+    assertTrue(acls.getUserPermissions().keySet().contains("user1"));
+    assertEquals(acls.getGroupPermissions().size(), 0);
+    acls = policyProvider.getResourceACLs(
+        new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, "db1", "tbl1"));
+    assertEquals(acls.getUserPermissions().size(), 1);
+    assertTrue(acls.getUserPermissions().keySet().contains("user1"));
+    assertEquals(acls.getGroupPermissions().size(), 0);
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestRestrictedList.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestRestrictedList.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestRestrictedList.java
index 6270e14..cb005bf 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestRestrictedList.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestRestrictedList.java
@@ -107,7 +107,6 @@ public class TestRestrictedList {
     addToExpectedRestrictedMap("_hive.hdfs.session.path");
     addToExpectedRestrictedMap("hive.spark.client.rpc.server.address");
     addToExpectedRestrictedMap("spark.home");
-    addToExpectedRestrictedMap("hive.privilege.synchronizer");
     addToExpectedRestrictedMap("hive.privilege.synchronizer.interval");
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestInformationSchemaWithPrivilege.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestInformationSchemaWithPrivilege.java b/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestInformationSchemaWithPrivilege.java
index f49fbed..ccacb00 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestInformationSchemaWithPrivilege.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/service/server/TestInformationSchemaWithPrivilege.java
@@ -46,8 +46,8 @@ import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObje
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveResourceACLs;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveResourceACLsImpl;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.DummyHiveAuthorizationValidator;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAccessControllerWrapper;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizationValidator;
 import org.apache.hive.beeline.BeeLine;
 import org.apache.hive.jdbc.miniHS2.MiniHS2;
 import org.apache.hive.service.cli.CLIServiceClient;
@@ -171,18 +171,7 @@ public class TestInformationSchemaWithPrivilege {
       SQLStdHiveAccessControllerWrapper privilegeManager = new SQLStdHiveAccessControllerWrapper(metastoreClientFactory,
           conf, authenticator, ctx);
       return new HiveAuthorizerImplWithPolicyProvider(privilegeManager,
-          new SQLStdHiveAuthorizationValidator(metastoreClientFactory, conf, authenticator, privilegeManager, ctx));
-    }
-  }
-
-  static class TestHiveAuthorizerNullPolicyProviderFactory implements HiveAuthorizerFactory {
-    @Override
-    public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory, HiveConf conf,
-        HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) throws HiveAuthzPluginException {
-      SQLStdHiveAccessControllerWrapper privilegeManager = new SQLStdHiveAccessControllerWrapper(metastoreClientFactory,
-          conf, authenticator, ctx);
-      return new HiveAuthorizerImplWithNullPolicyProvider(privilegeManager,
-          new SQLStdHiveAuthorizationValidator(metastoreClientFactory, conf, authenticator, privilegeManager, ctx));
+          new DummyHiveAuthorizationValidator());
     }
   }
 
@@ -208,7 +197,6 @@ public class TestInformationSchemaWithPrivilege {
         + File.separator + "mapred" + File.separator + "staging");
     confOverlay.put("mapred.temp.dir", workDir + File.separator + "TestInformationSchemaWithPrivilege"
         + File.separator + "mapred" + File.separator + "temp");
-    confOverlay.put(ConfVars.HIVE_PRIVILEGE_SYNCHRONIZER.varname, "true");
     confOverlay.put(ConfVars.HIVE_PRIVILEGE_SYNCHRONIZER_INTERVAL.varname, "1");
     confOverlay.put(ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY.varname, "true");
     confOverlay.put(ConfVars.HIVE_AUTHORIZATION_MANAGER.varname, TestHiveAuthorizerFactory.class.getName());
@@ -216,6 +204,8 @@ public class TestInformationSchemaWithPrivilege {
     confOverlay.put(ConfVars.HIVE_ZOOKEEPER_CLIENT_PORT.varname, Integer.toString(zkPort));
     confOverlay.put(MetastoreConf.ConfVars.AUTO_CREATE_ALL.getVarname(), "true");
     confOverlay.put(ConfVars.HIVE_AUTHENTICATOR_MANAGER.varname, FakeGroupAuthenticator.class.getName());
+    confOverlay.put(ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, "true");
+    confOverlay.put(ConfVars.HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST.varname, ".*");
     miniHS2.start(confOverlay);
   }
 
@@ -585,9 +575,7 @@ public class TestInformationSchemaWithPrivilege {
     serviceClient.closeSession(sessHandle);
 
     // Revert hive.server2.restrict_information_schema to false
-    miniHS2.getHiveConf().set(ConfVars.HIVE_AUTHORIZATION_MANAGER.varname,
-        TestHiveAuthorizerNullPolicyProviderFactory.class.getName());
-    miniHS2.getHiveConf().unset(MetastoreConf.ConfVars.PRE_EVENT_LISTENERS.getVarname());
+    miniHS2.getHiveConf().set(ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, "false");
 
     sessHandle = serviceClient.openSession("user1", "");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java b/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java
index 31e795c..3fdacac 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/security/DummyHiveMetastoreAuthorizationProvider.java
@@ -32,6 +32,8 @@ import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider;
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider;
 
 /**
  * Dummy implementation for use by unit tests. Tracks the context of calls made to
@@ -211,6 +213,8 @@ public class DummyHiveMetastoreAuthorizationProvider implements HiveMetastoreAut
     authCalls.add(new AuthCallContext(AuthCallContextType.AUTHORIZATION, null, null));
   }
 
-
-
+  @Override
+  public HivePolicyProvider getHivePolicyProvider() throws HiveAuthzPluginException {
+    return null;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java
index fffe0df..692cb23 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/dao/DatabaseAccessorFactory.java
@@ -61,7 +61,8 @@ public class DatabaseAccessorFactory {
 
 
   public static DatabaseAccessor getAccessor(Configuration conf) {
-    DatabaseType dbType = DatabaseType.valueOf(conf.get(JdbcStorageConfig.DATABASE_TYPE.getPropertyName()));
+    DatabaseType dbType = DatabaseType.valueOf(
+        conf.get(JdbcStorageConfig.DATABASE_TYPE.getPropertyName()).toUpperCase());
     return getAccessor(dbType);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/metastore/scripts/upgrade/derby/upgrade.order.derby
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/derby/upgrade.order.derby b/metastore/scripts/upgrade/derby/upgrade.order.derby
index d7091b5..f43da9a 100644
--- a/metastore/scripts/upgrade/derby/upgrade.order.derby
+++ b/metastore/scripts/upgrade/derby/upgrade.order.derby
@@ -14,3 +14,4 @@
 2.1.0-to-2.2.0
 2.2.0-to-2.3.0
 2.3.0-to-3.0.0
+3.0.0-to-3.1.0

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql b/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
index d9606d8..a3ecded 100644
--- a/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
+++ b/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
@@ -109,6 +109,7 @@ CREATE TABLE IF NOT EXISTS `DB_PRIVS` (
   `PRINCIPAL_NAME` string,
   `PRINCIPAL_TYPE` string,
   `DB_PRIV` string,
+  `AUTHORIZER` string,
   CONSTRAINT `SYS_PK_DB_PRIVS` PRIMARY KEY (`DB_GRANT_ID`) DISABLE
 )
 STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
@@ -124,7 +125,8 @@ TBLPROPERTIES (
   \"GRANTOR_TYPE\",
   \"PRINCIPAL_NAME\",
   \"PRINCIPAL_TYPE\",
-  \"DB_PRIV\"
+  \"DB_PRIV\",
+  \"AUTHORIZER\"
 FROM
   \"DB_PRIVS\""
 );
@@ -138,6 +140,7 @@ CREATE TABLE IF NOT EXISTS `GLOBAL_PRIVS` (
   `PRINCIPAL_NAME` string,
   `PRINCIPAL_TYPE` string,
   `USER_PRIV` string,
+  `AUTHORIZER` string,
   CONSTRAINT `SYS_PK_GLOBAL_PRIVS` PRIMARY KEY (`USER_GRANT_ID`) DISABLE
 )
 STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
@@ -152,7 +155,8 @@ TBLPROPERTIES (
   \"GRANTOR_TYPE\",
   \"PRINCIPAL_NAME\",
   \"PRINCIPAL_TYPE\",
-  \"USER_PRIV\"
+  \"USER_PRIV\",
+  \"AUTHORIZER\"
 FROM
   \"GLOBAL_PRIVS\""
 );
@@ -250,6 +254,7 @@ CREATE TABLE IF NOT EXISTS `PART_COL_PRIVS` (
   `PRINCIPAL_NAME` string,
   `PRINCIPAL_TYPE` string,
   `PART_COL_PRIV` string,
+  `AUTHORIZER` string,
   CONSTRAINT `SYS_PK_PART_COL_PRIVS` PRIMARY KEY (`PART_COLUMN_GRANT_ID`) DISABLE
 )
 STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
@@ -266,7 +271,8 @@ TBLPROPERTIES (
   \"PART_ID\",
   \"PRINCIPAL_NAME\",
   \"PRINCIPAL_TYPE\",
-  \"PART_COL_PRIV\"
+  \"PART_COL_PRIV\",
+  \"AUTHORIZER\"
 FROM
   \"PART_COL_PRIVS\""
 );
@@ -281,6 +287,7 @@ CREATE TABLE IF NOT EXISTS `PART_PRIVS` (
   `PRINCIPAL_NAME` string,
   `PRINCIPAL_TYPE` string,
   `PART_PRIV` string,
+  `AUTHORIZER` string,
   CONSTRAINT `SYS_PK_PART_PRIVS` PRIMARY KEY (`PART_GRANT_ID`) DISABLE
 )
 STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
@@ -296,7 +303,8 @@ TBLPROPERTIES (
   \"PART_ID\",
   \"PRINCIPAL_NAME\",
   \"PRINCIPAL_TYPE\",
-  \"PART_PRIV\"
+  \"PART_PRIV\",
+  \"AUTHORIZER\"
 FROM
   \"PART_PRIVS\""
 );
@@ -652,6 +660,7 @@ CREATE TABLE IF NOT EXISTS `TBL_COL_PRIVS` (
   `PRINCIPAL_TYPE` string,
   `TBL_COL_PRIV` string,
   `TBL_ID` bigint,
+  `AUTHORIZER` string,
   CONSTRAINT `SYS_PK_TBL_COL_PRIVS` PRIMARY KEY (`TBL_COLUMN_GRANT_ID`) DISABLE
 )
 STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
@@ -668,7 +677,8 @@ TBLPROPERTIES (
   \"PRINCIPAL_NAME\",
   \"PRINCIPAL_TYPE\",
   \"TBL_COL_PRIV\",
-  \"TBL_ID\"
+  \"TBL_ID\",
+  \"AUTHORIZER\"
 FROM
   \"TBL_COL_PRIVS\""
 );
@@ -683,6 +693,7 @@ CREATE TABLE IF NOT EXISTS `TBL_PRIVS` (
   `PRINCIPAL_TYPE` string,
   `TBL_PRIV` string,
   `TBL_ID` bigint,
+  `AUTHORIZER` string,
   CONSTRAINT `SYS_PK_TBL_PRIVS` PRIMARY KEY (`TBL_GRANT_ID`) DISABLE
 )
 STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
@@ -698,7 +709,8 @@ TBLPROPERTIES (
   \"PRINCIPAL_NAME\",
   \"PRINCIPAL_TYPE\",
   \"TBL_PRIV\",
-  \"TBL_ID\"
+  \"TBL_ID\",
+  \"AUTHORIZER\"
 FROM
   \"TBL_PRIVS\""
 );
@@ -1082,7 +1094,8 @@ WHERE
   D.`DB_ID` = T.`DB_ID`
   AND T.`TBL_ID` = P.`TBL_ID`
   AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
-    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'));
+    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
+  AND current_authorizer() = P.`AUTHORIZER`;
 
 CREATE VIEW IF NOT EXISTS `TABLES`
 (
@@ -1118,8 +1131,8 @@ WHERE
   D.`DB_ID` = T.`DB_ID`
   AND (NOT restrict_information_schema() OR T.`TBL_ID` = P.`TBL_ID`
   AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
-    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
-  AND P.`TBL_PRIV`='SELECT');
+    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP')))
+  AND P.`TBL_PRIV`='SELECT' AND P.`AUTHORIZER`=current_authorizer();
 
 CREATE VIEW IF NOT EXISTS `TABLE_PRIVILEGES`
 (
@@ -1152,8 +1165,8 @@ WHERE
   AND (NOT restrict_information_schema() OR
   P.`TBL_ID` = P2.`TBL_ID` AND P.`PRINCIPAL_NAME` = P2.`PRINCIPAL_NAME` AND P.`PRINCIPAL_TYPE` = P2.`PRINCIPAL_TYPE`
   AND (P2.`PRINCIPAL_NAME`=current_user() AND P2.`PRINCIPAL_TYPE`='USER'
-    OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP'))
-  AND P2.`TBL_PRIV`='SELECT');
+    OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP')))
+  AND P2.`TBL_PRIV`='SELECT' AND P.`AUTHORIZER` = current_authorizer() AND P2.`AUTHORIZER` = current_authorizer();
 
 CREATE VIEW IF NOT EXISTS `COLUMNS`
 (
@@ -1308,7 +1321,7 @@ WHERE
   AND C.`COLUMN_NAME` = P.`COLUMN_NAME`
   AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
     OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
-  AND P.`TBL_COL_PRIV`='SELECT');
+  AND P.`TBL_COL_PRIV`='SELECT' AND P.`AUTHORIZER`=current_authorizer());
 
 CREATE VIEW IF NOT EXISTS `COLUMN_PRIVILEGES`
 (
@@ -1344,7 +1357,7 @@ WHERE
   P.`TBL_ID` = P2.`TBL_ID` AND P.`PRINCIPAL_NAME` = P2.`PRINCIPAL_NAME` AND P.`PRINCIPAL_TYPE` = P2.`PRINCIPAL_TYPE`
   AND (P2.`PRINCIPAL_NAME`=current_user() AND P2.`PRINCIPAL_TYPE`='USER'
     OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP'))
-  AND P2.`TBL_PRIV`='SELECT');
+  AND P2.`TBL_PRIV`='SELECT' AND P.`AUTHORIZER`=current_authorizer() AND P2.`AUTHORIZER`=current_authorizer());
 
 CREATE VIEW IF NOT EXISTS `VIEWS`
 (
@@ -1381,4 +1394,4 @@ WHERE
   T.`TBL_ID` = P.`TBL_ID`
   AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
     OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
-  AND P.`TBL_PRIV`='SELECT');
+  AND P.`TBL_PRIV`='SELECT' AND P.`AUTHORIZER`=current_authorizer());

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/metastore/scripts/upgrade/mssql/upgrade.order.mssql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/mssql/upgrade.order.mssql b/metastore/scripts/upgrade/mssql/upgrade.order.mssql
index 8623683..5572c26 100644
--- a/metastore/scripts/upgrade/mssql/upgrade.order.mssql
+++ b/metastore/scripts/upgrade/mssql/upgrade.order.mssql
@@ -8,3 +8,4 @@
 2.1.0-to-2.2.0
 2.2.0-to-2.3.0
 2.3.0-to-3.0.0
+3.0.0-to-3.1.0

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/metastore/scripts/upgrade/mysql/upgrade.order.mysql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/mysql/upgrade.order.mysql b/metastore/scripts/upgrade/mysql/upgrade.order.mysql
index d7091b5..f43da9a 100644
--- a/metastore/scripts/upgrade/mysql/upgrade.order.mysql
+++ b/metastore/scripts/upgrade/mysql/upgrade.order.mysql
@@ -14,3 +14,4 @@
 2.1.0-to-2.2.0
 2.2.0-to-2.3.0
 2.3.0-to-3.0.0
+3.0.0-to-3.1.0

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/metastore/scripts/upgrade/oracle/upgrade.order.oracle
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/oracle/upgrade.order.oracle b/metastore/scripts/upgrade/oracle/upgrade.order.oracle
index a18b062..72b8303 100644
--- a/metastore/scripts/upgrade/oracle/upgrade.order.oracle
+++ b/metastore/scripts/upgrade/oracle/upgrade.order.oracle
@@ -10,3 +10,4 @@
 2.1.0-to-2.2.0
 2.2.0-to-2.3.0
 2.3.0-to-3.0.0
+3.0.0-to-3.1.0

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/metastore/scripts/upgrade/postgres/upgrade.order.postgres
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/postgres/upgrade.order.postgres b/metastore/scripts/upgrade/postgres/upgrade.order.postgres
index d7091b5..f43da9a 100644
--- a/metastore/scripts/upgrade/postgres/upgrade.order.postgres
+++ b/metastore/scripts/upgrade/postgres/upgrade.order.postgres
@@ -14,3 +14,4 @@
 2.1.0-to-2.2.0
 2.2.0-to-2.3.0
 2.3.0-to-3.0.0
+3.0.0-to-3.1.0

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 5f124f8..31960ac 100644
--- a/pom.xml
+++ b/pom.xml
@@ -65,7 +65,7 @@
   </modules>
 
   <properties>
-    <hive.version.shortname>3.0.0</hive.version.shortname>
+    <hive.version.shortname>3.1.0</hive.version.shortname>
 
     <!-- Build Properties -->
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index a1f549a..e77fe18 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -359,6 +359,7 @@ public final class FunctionRegistry {
     system.registerGenericUDF("current_groups", GenericUDFCurrentGroups.class);
     system.registerGenericUDF("logged_in_user", GenericUDFLoggedInUser.class);
     system.registerGenericUDF("restrict_information_schema", GenericUDFRestrictInformationSchema.class);
+    system.registerGenericUDF("current_authorizer", GenericUDFCurrentAuthorizer.class);
 
     system.registerGenericUDF("isnull", GenericUDFOPNull.class);
     system.registerGenericUDF("isnotnull", GenericUDFOPNotNull.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
index 1c516f2..209fdfb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
@@ -709,7 +709,7 @@ public class SessionHiveMetaStoreClient extends HiveMetaStoreClient implements I
   private static Map<String, Map<String, Table>> getTempTables(String msg) {
     SessionState ss = SessionState.get();
     if (ss == null) {
-      LOG.warn("No current SessionState, skipping temp tables for " + msg);
+      LOG.debug("No current SessionState, skipping temp tables for " + msg);
       return Collections.emptyMap();
     }
     return ss.getTempTables();

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HDFSPermissionPolicyProvider.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HDFSPermissionPolicyProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HDFSPermissionPolicyProvider.java
new file mode 100644
index 0000000..2080054
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HDFSPermissionPolicyProvider.java
@@ -0,0 +1,120 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyChangeListener;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveResourceACLs;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveResourceACLsImpl;
+
+/**
+ * PolicyProvider for storage based authorizer based on hdfs permission string
+ */
+public class HDFSPermissionPolicyProvider implements HivePolicyProvider {
+
+  private Configuration conf;
+
+  public HDFSPermissionPolicyProvider(Configuration conf) {
+    this.conf = conf;
+  }
+
+  @Override
+  public HiveResourceACLs getResourceACLs(HivePrivilegeObject hiveObject) {
+    HiveResourceACLs acls = null;
+    try {
+      switch (hiveObject.getType()) {
+      case DATABASE:
+        Database db = Hive.get().getDatabase(hiveObject.getDbname());
+        acls = getResourceACLs(new Path(db.getLocationUri()));
+        break;
+      case TABLE_OR_VIEW:
+      case COLUMN:
+        Table table = Hive.get().getTable(hiveObject.getDbname(), hiveObject.getObjectName());
+        acls = getResourceACLs(new Path(table.getTTable().getSd().getLocation()));
+        break;
+      default:
+        // Shall never happen
+        throw new RuntimeException("Unknown request type:" + hiveObject.getType());
+      }
+    } catch (Exception e) {
+    }
+    return acls;
+  }
+
+  private HiveResourceACLs getResourceACLs(Path path) throws IOException {
+    if (path == null) {
+      throw new IllegalArgumentException("path is null");
+    }
+
+    final FileSystem fs = path.getFileSystem(conf);
+
+    FileStatus pathStatus = FileUtils.getFileStatusOrNull(fs, path);
+    if (pathStatus != null) {
+      return getResourceACLs(fs, pathStatus);
+    } else if (path.getParent() != null) {
+      // find the ancestor which exists to check its permissions
+      Path par = path.getParent();
+      FileStatus parStatus = null;
+      while (par != null) {
+        parStatus = FileUtils.getFileStatusOrNull(fs, par);
+        if (parStatus != null) {
+          break;
+        }
+        par = par.getParent();
+      }
+      return getResourceACLs(fs, parStatus);
+    }
+    return null;
+  }
+
+  private HiveResourceACLs getResourceACLs(final FileSystem fs, final FileStatus stat) {
+    String owner = stat.getOwner();
+    String group = stat.getGroup();
+    HiveResourceACLsImpl acls = new HiveResourceACLsImpl();
+    FsPermission permission = stat.getPermission();
+    if (permission.getUserAction().implies(FsAction.READ)) {
+      acls.addUserEntry(owner, HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED);
+    }
+    if (permission.getGroupAction().implies(FsAction.READ)) {
+      acls.addGroupEntry(group, HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED);
+    }
+    if (permission.getOtherAction().implies(FsAction.READ)) {
+      acls.addGroupEntry("public", HiveResourceACLs.Privilege.SELECT, HiveResourceACLs.AccessResult.ALLOWED);
+    }
+    return acls;
+  }
+
+  @Override
+  public void registerHivePolicyChangeListener(HivePolicyChangeListener listener) {
+    // Not implemented
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
index 8a7c06d..d3e13a5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
@@ -36,6 +36,8 @@ import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider;
 import org.apache.thrift.TException;
 
 public abstract class HiveAuthorizationProviderBase implements
@@ -133,4 +135,8 @@ public abstract class HiveAuthorizationProviderBase implements
     this.authenticator = authenticator;
   }
 
+  @Override
+  public HivePolicyProvider getHivePolicyProvider() throws HiveAuthzPluginException {
+    return null;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java
index 0dab334..de9b8d1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveMetastoreAuthorizationProvider.java
@@ -21,6 +21,8 @@ package org.apache.hadoop.hive.ql.security.authorization;
 import org.apache.hadoop.hive.metastore.IHMSHandler;
 import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider;
 
 /**
  * HiveMetastoreAuthorizationProvider : An extension of HiveAuthorizaytionProvider
@@ -44,5 +46,10 @@ public interface HiveMetastoreAuthorizationProvider extends HiveAuthorizationPro
    */
   void authorizeAuthorizationApiInvocation() throws HiveException, AuthorizationException;
 
+  /**
+   * @return HivePolicyProvider instance (expected to be a singleton)
+   * @throws HiveAuthzPluginException
+   */
+  HivePolicyProvider getHivePolicyProvider() throws HiveAuthzPluginException;
 
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PolicyProviderContainer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PolicyProviderContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PolicyProviderContainer.java
new file mode 100644
index 0000000..51a4cd7
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PolicyProviderContainer.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider;
+
+/**
+ * Wrapper of policy provider no matter this is in authorizer v1 or v2
+ */
+public class PolicyProviderContainer implements Iterable<HivePolicyProvider> {
+  List<HiveAuthorizer> authorizers = new ArrayList<HiveAuthorizer>();
+  List<HiveMetastoreAuthorizationProvider> authorizationProviders = new ArrayList<HiveMetastoreAuthorizationProvider>();
+
+  public void addAuthorizer(HiveAuthorizer authorizer) {
+    authorizers.add(authorizer);
+  }
+
+  public void addAuthorizationProvider(HiveMetastoreAuthorizationProvider authorizationProvider) {
+    authorizationProviders.add(authorizationProvider);
+  }
+
+  public int size() {
+    return authorizers.size() + authorizationProviders.size();
+  }
+
+  @Override
+  public Iterator<HivePolicyProvider> iterator() {
+    return new PolicyIterator();
+  }
+
+  class PolicyIterator implements Iterator<HivePolicyProvider> {
+    int currentAuthorizerPosition = 0;
+    int authorizationProviderPosition = 0;
+    @Override
+    public boolean hasNext() {
+      if (currentAuthorizerPosition < authorizers.size()
+          || authorizationProviderPosition < authorizationProviders.size()) {
+        return true;
+      }
+      return false;
+    }
+
+    @Override
+    public HivePolicyProvider next() {
+      try {
+        if (currentAuthorizerPosition < authorizers.size()) {
+          return authorizers.get(currentAuthorizerPosition++).getHivePolicyProvider();
+        } else {
+          return authorizationProviders.get(authorizationProviderPosition++).getHivePolicyProvider();
+        }
+      } catch (HiveAuthzPluginException e) {
+        throw new RuntimeException(e);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchonizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchonizer.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchonizer.java
index 9b2e6cd..e56094e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchonizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchonizer.java
@@ -23,6 +23,7 @@ import java.util.concurrent.TimeUnit;
 import org.apache.curator.framework.recipes.leader.LeaderLatch;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
@@ -32,9 +33,8 @@ import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
 import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
-import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactoryImpl;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
@@ -53,23 +53,26 @@ public class PrivilegeSynchonizer implements Runnable {
   private IMetaStoreClient hiveClient;
   private LeaderLatch privilegeSynchonizerLatch;
   private HiveConf hiveConf;
-  private HiveAuthorizer authorizer;
+  private PolicyProviderContainer policyProviderContainer;
 
-  public PrivilegeSynchonizer(LeaderLatch privilegeSynchonizerLatch, HiveAuthorizer authorizer, HiveConf hiveConf) {
+  public PrivilegeSynchonizer(LeaderLatch privilegeSynchonizerLatch,
+      PolicyProviderContainer policyProviderContainer, HiveConf hiveConf) {
+    this.hiveConf = new HiveConf(hiveConf);
+    this.hiveConf.set(MetastoreConf.ConfVars.FILTER_HOOK.getVarname(), DefaultMetaStoreFilterHookImpl.class.getName());
     try {
-      hiveClient = new HiveMetastoreClientFactoryImpl().getHiveMetastoreClient();
-    } catch (HiveAuthzPluginException e) {
-      throw new RuntimeException("Error creating getHiveMetastoreClient", e);
+      hiveClient = Hive.get(this.hiveConf).getMSC();
+    } catch (Exception e) {
+      throw new RuntimeException("Error creating HiveMetastoreClient", e);
     }
     this.privilegeSynchonizerLatch = privilegeSynchonizerLatch;
-    this.authorizer = authorizer;
+    this.policyProviderContainer = policyProviderContainer;
     this.hiveConf = hiveConf;
   }
 
   private void addACLsToBag(
       Map<String, Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult>> principalAclsMap,
       PrivilegeBag privBag, HiveObjectType objectType, String dbName, String tblName, String columnName,
-      PrincipalType principalType) {
+      PrincipalType principalType, String authorizer) {
 
     for (Map.Entry<String, Map<HiveResourceACLs.Privilege, HiveResourceACLs.AccessResult>> principalAcls
         : principalAclsMap.entrySet()) {
@@ -82,19 +85,19 @@ public class PrivilegeSynchonizer implements Runnable {
             privBag.addToPrivileges(
                 new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.DATABASE, dbName, null, null, null), principal,
                     principalType, new PrivilegeGrantInfo(acl.getKey().toString(),
-                        (int) (System.currentTimeMillis() / 1000), GRANTOR, PrincipalType.USER, false)));
+                        (int) (System.currentTimeMillis() / 1000), GRANTOR, PrincipalType.USER, false), authorizer));
             break;
           case TABLE:
             privBag.addToPrivileges(
                 new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.TABLE, dbName, tblName, null, null), principal,
                     principalType, new PrivilegeGrantInfo(acl.getKey().toString(),
-                        (int) (System.currentTimeMillis() / 1000), GRANTOR, PrincipalType.USER, false)));
+                        (int) (System.currentTimeMillis() / 1000), GRANTOR, PrincipalType.USER, false), authorizer));
             break;
           case COLUMN:
             privBag.addToPrivileges(
                 new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.COLUMN, dbName, tblName, null, columnName),
                     principal, principalType, new PrivilegeGrantInfo(acl.getKey().toString(),
-                        (int) (System.currentTimeMillis() / 1000), GRANTOR, PrincipalType.USER, false)));
+                        (int) (System.currentTimeMillis() / 1000), GRANTOR, PrincipalType.USER, false), authorizer));
             break;
           default:
             throw new RuntimeException("Get unknown object type " + objectType);
@@ -123,7 +126,7 @@ public class PrivilegeSynchonizer implements Runnable {
   }
 
   private void addGrantPrivilegesToBag(HivePolicyProvider policyProvider, PrivilegeBag privBag, HiveObjectType type,
-      String dbName, String tblName, String columnName) throws Exception {
+      String dbName, String tblName, String columnName, String authorizer) throws Exception {
 
     HiveResourceACLs objectAcls = null;
 
@@ -151,51 +154,56 @@ public class PrivilegeSynchonizer implements Runnable {
       return;
     }
 
-    addACLsToBag(objectAcls.getUserPermissions(), privBag, type, dbName, tblName, columnName, PrincipalType.USER);
-    addACLsToBag(objectAcls.getGroupPermissions(), privBag, type, dbName, tblName, columnName, PrincipalType.GROUP);
+    addACLsToBag(objectAcls.getUserPermissions(), privBag, type, dbName, tblName, columnName,
+        PrincipalType.USER, authorizer);
+    addACLsToBag(objectAcls.getGroupPermissions(), privBag, type, dbName, tblName, columnName,
+        PrincipalType.GROUP, authorizer);
   }
 
   @Override
   public void run() {
     while (true) {
+      long interval = HiveConf.getTimeVar(hiveConf, ConfVars.HIVE_PRIVILEGE_SYNCHRONIZER_INTERVAL, TimeUnit.SECONDS);
       try {
-        HivePolicyProvider policyProvider = authorizer.getHivePolicyProvider();
-        long interval = HiveConf.getTimeVar(hiveConf, ConfVars.HIVE_PRIVILEGE_SYNCHRONIZER_INTERVAL, TimeUnit.SECONDS);
-        if (hiveConf.getBoolVar(ConfVars.HIVE_PRIVILEGE_SYNCHRONIZER)) {
+        for (HivePolicyProvider policyProvider : policyProviderContainer) {
+          String authorizer = policyProvider.getClass().getSimpleName();
           if (!privilegeSynchonizerLatch.await(interval, TimeUnit.SECONDS)) {
             continue;
           }
-          LOG.debug("Start synchonize privilege");
+          LOG.info("Start synchonize privilege");
           for (String dbName : hiveClient.getAllDatabases()) {
             HiveObjectRef dbToRefresh = getObjToRefresh(HiveObjectType.DATABASE, dbName, null);
             PrivilegeBag grantDatabaseBag = new PrivilegeBag();
-            addGrantPrivilegesToBag(policyProvider, grantDatabaseBag, HiveObjectType.DATABASE, dbName, null, null);
-            hiveClient.refresh_privileges(dbToRefresh, grantDatabaseBag);
+            addGrantPrivilegesToBag(policyProvider, grantDatabaseBag, HiveObjectType.DATABASE,
+                dbName, null, null, authorizer);
+            hiveClient.refresh_privileges(dbToRefresh, authorizer, grantDatabaseBag);
 
             for (String tblName : hiveClient.getAllTables(dbName)) {
               HiveObjectRef tableToRefresh = getObjToRefresh(HiveObjectType.TABLE, dbName, tblName);
               PrivilegeBag grantTableBag = new PrivilegeBag();
-              addGrantPrivilegesToBag(policyProvider, grantTableBag, HiveObjectType.TABLE, dbName, tblName, null);
-              hiveClient.refresh_privileges(tableToRefresh, grantTableBag);
+              addGrantPrivilegesToBag(policyProvider, grantTableBag, HiveObjectType.TABLE,
+                  dbName, tblName, null, authorizer);
+              hiveClient.refresh_privileges(tableToRefresh, authorizer, grantTableBag);
 
               HiveObjectRef tableOfColumnsToRefresh = getObjToRefresh(HiveObjectType.COLUMN, dbName, tblName);
               PrivilegeBag grantColumnBag = new PrivilegeBag();
               Table tbl = hiveClient.getTable(dbName, tblName);
               for (FieldSchema fs : tbl.getPartitionKeys()) {
-                addGrantPrivilegesToBag(policyProvider, grantColumnBag, HiveObjectType.COLUMN, dbName, tblName,
-                    fs.getName());
+                addGrantPrivilegesToBag(policyProvider, grantColumnBag, HiveObjectType.COLUMN,
+                    dbName, tblName, fs.getName(), authorizer);
               }
               for (FieldSchema fs : tbl.getSd().getCols()) {
-                addGrantPrivilegesToBag(policyProvider, grantColumnBag, HiveObjectType.COLUMN, dbName, tblName,
-                    fs.getName());
+                addGrantPrivilegesToBag(policyProvider, grantColumnBag, HiveObjectType.COLUMN,
+                    dbName, tblName, fs.getName(), authorizer);
               }
-              hiveClient.refresh_privileges(tableOfColumnsToRefresh, grantColumnBag);
+              hiveClient.refresh_privileges(tableOfColumnsToRefresh, authorizer, grantColumnBag);
             }
           }
+          // Wait if no exception happens, otherwise, retry immediately
         }
-        // Wait if no exception happens, otherwise, retry immediately
         Thread.sleep(interval * 1000);
-        LOG.debug("Success synchonize privilege");
+        LOG.info("Success synchonize privilege");
+
       } catch (Exception e) {
         LOG.error("Error initializing PrivilegeSynchonizer: " + e.getMessage(), e);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
index b66d188..f074d39 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
@@ -45,6 +45,8 @@ import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider;
 
 /**
  * StorageBasedAuthorizationProvider is an implementation of
@@ -491,4 +493,9 @@ public class StorageBasedAuthorizationProvider extends HiveAuthorizationProvider
 
   }
 
+  @Override
+  public HivePolicyProvider getHivePolicyProvider() throws HiveAuthzPluginException {
+    return new HDFSPermissionPolicyProvider(getConf());
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java
index 48798d8..c889321 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java
@@ -38,7 +38,6 @@ import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
 import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
 import org.apache.hadoop.hive.ql.security.authorization.PrivilegeScope;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAccessController;
@@ -47,6 +46,7 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 public class HiveV1Authorizer extends AbstractHiveAuthorizer {
 
   private final HiveConf conf;
+  static private final String AUTHORIZER = "v1";
 
   public HiveV1Authorizer(HiveConf conf) {
     this.conf = conf;
@@ -77,7 +77,7 @@ public class HiveV1Authorizer extends AbstractHiveAuthorizer {
       HivePrincipal grantor, boolean grantOption)
       throws HiveAuthzPluginException, HiveAccessControlException {
     try {
-      PrivilegeBag privBag = toPrivilegeBag(privileges, privObject, grantor, grantOption);
+      PrivilegeBag privBag = toPrivilegeBag(privileges, privObject, grantor, grantOption, AUTHORIZER);
       grantOrRevokePrivs(principals, privBag, true, grantOption);
     } catch (Exception e) {
       throw new HiveAuthzPluginException(e);
@@ -90,7 +90,7 @@ public class HiveV1Authorizer extends AbstractHiveAuthorizer {
       HivePrincipal grantor, boolean grantOption)
       throws HiveAuthzPluginException, HiveAccessControlException {
     try {
-      PrivilegeBag privBag = toPrivilegeBag(privileges, privObject, grantor, grantOption);
+      PrivilegeBag privBag = toPrivilegeBag(privileges, privObject, grantor, grantOption, AUTHORIZER);
       grantOrRevokePrivs(principals, privBag, false, grantOption);
     } catch (Exception e) {
       throw new HiveAuthzPluginException(e);
@@ -115,7 +115,7 @@ public class HiveV1Authorizer extends AbstractHiveAuthorizer {
   }
 
   private PrivilegeBag toPrivilegeBag(List<HivePrivilege> privileges,
-      HivePrivilegeObject privObject, HivePrincipal grantor, boolean grantOption)
+      HivePrivilegeObject privObject, HivePrincipal grantor, boolean grantOption, String authorizer)
       throws HiveException {
 
     PrivilegeBag privBag = new PrivilegeBag();
@@ -136,7 +136,7 @@ public class HiveV1Authorizer extends AbstractHiveAuthorizer {
         privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(
             HiveObjectType.GLOBAL, null, null, null, null), null, null,
             new PrivilegeGrantInfo(priv.getName(), 0, grantor.getName(), grantorType,
-                grantOption)));
+                grantOption), authorizer));
       }
       return privBag;
     }
@@ -186,23 +186,23 @@ public class HiveV1Authorizer extends AbstractHiveAuthorizer {
           privBag.addToPrivileges(new HiveObjectPrivilege(
               new HiveObjectRef(HiveObjectType.COLUMN, dbObj.getName(), tableObj.getTableName(),
                   partValues, columns.get(i)), null, null,
-              new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
+              new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption), authorizer));
         }
       } else if (tableObj == null) {
         privBag.addToPrivileges(new HiveObjectPrivilege(
             new HiveObjectRef(HiveObjectType.DATABASE, dbObj.getName(), null,
                 null, null), null, null,
-            new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
+            new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption), authorizer));
       } else if (partValues == null) {
         privBag.addToPrivileges(new HiveObjectPrivilege(
             new HiveObjectRef(HiveObjectType.TABLE, dbObj.getName(), tableObj.getTableName(),
                 null, null), null, null,
-            new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
+            new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption), authorizer));
       } else {
         privBag.addToPrivileges(new HiveObjectPrivilege(
             new HiveObjectRef(HiveObjectType.PARTITION, dbObj.getName(), tableObj.getTableName(),
                 partValues, null), null, null,
-            new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption)));
+            new PrivilegeGrantInfo(priv.getName(), 0, grantorName, grantorType, grantOption), authorizer));
       }
     }
     return privBag;

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
index 02ed7aa..e787538 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
@@ -99,7 +99,7 @@ public class SQLAuthorizationUtils {
           grantOption, 0 /*real grant time added by metastore*/);
       for (HivePrincipal principal : hivePrincipals) {
         HiveObjectPrivilege objPriv = new HiveObjectPrivilege(privObj, principal.getName(),
-            AuthorizationUtils.getThriftPrincipalType(principal.getType()), grantInfo);
+            AuthorizationUtils.getThriftPrincipalType(principal.getType()), grantInfo, "SQL");
         privBag.addToPrivileges(objPriv);
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentAuthorizer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentAuthorizer.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentAuthorizer.java
new file mode 100644
index 0000000..d178863
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentAuthorizer.java
@@ -0,0 +1,120 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+import org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.udf.UDFType;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.Text;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * UDF to determine the current authorizer (class name of the authorizer)
+ * This is intended for internal usage only. This function is not a deterministic function,
+ * but a runtime constant. The return value is constant within a query but can be different between queries
+ */
+@UDFType(deterministic = false, runtimeConstant = true)
+@Description(name = "current_authorizer",
+    value = "_FUNC_() - Returns the current authorizer (class name of the authorizer). ")
+@NDV(maxNdv = 1)
+public class GenericUDFCurrentAuthorizer extends GenericUDF {
+  private static final Logger LOG = LoggerFactory.getLogger(GenericUDFCurrentAuthorizer.class.getName());
+  protected Text authorizer;
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
+    if (arguments.length != 0) {
+      throw new UDFArgumentLengthException(
+          "The function CurrentAuthorizer does not take any arguments, but found " + arguments.length);
+    }
+
+    if (authorizer == null) {
+
+      HiveConf hiveConf = SessionState.getSessionConf();
+      HiveAuthorizer hiveAuthorizer = SessionState.get().getAuthorizerV2();
+      try {
+        if (hiveAuthorizer.getHivePolicyProvider() != null) {
+          authorizer = new Text(hiveAuthorizer.getHivePolicyProvider().getClass().getSimpleName());
+        }
+      } catch (HiveAuthzPluginException e) {
+        LOG.warn("Error getting HivePolicyProvider", e);
+      }
+
+      if (authorizer == null) {
+        // If authorizer is not set, check for metastore authorizer (eg. StorageBasedAuthorizationProvider)
+        if (MetastoreConf.getVar(hiveConf, MetastoreConf.ConfVars.PRE_EVENT_LISTENERS) != null &&
+            !MetastoreConf.getVar(hiveConf, MetastoreConf.ConfVars.PRE_EVENT_LISTENERS).isEmpty() &&
+            HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER) != null) {
+          List<HiveMetastoreAuthorizationProvider> authorizerProviders;
+          try {
+            authorizerProviders = HiveUtils.getMetaStoreAuthorizeProviderManagers(
+              hiveConf, HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER,
+              SessionState.get().getAuthenticator());
+            for (HiveMetastoreAuthorizationProvider authProvider : authorizerProviders) {
+              if (authProvider.getHivePolicyProvider() != null) {
+                authorizer = new Text(authProvider.getHivePolicyProvider().getClass().getSimpleName());
+                break;
+              }
+            }
+          } catch (HiveAuthzPluginException e) {
+            LOG.warn("Error getting HivePolicyProvider", e);
+          } catch (HiveException e) {
+            LOG.warn("Error instantiating hive.security.metastore.authorization.manager", e);
+          }
+        }
+      }
+    }
+
+    return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    return authorizer;
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    return "CURRENT_AUTHORIZER()";
+  }
+
+  @Override
+  public void copyToNewInstance(Object newInstance) throws UDFArgumentException {
+    super.copyToNewInstance(newInstance);
+    // Need to preserve authorizer flag
+    GenericUDFCurrentAuthorizer other = (GenericUDFCurrentAuthorizer) newInstance;
+    if (this.authorizer != null) {
+      other.authorizer = new Text(this.authorizer);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/83afdb4d/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRestrictInformationSchema.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRestrictInformationSchema.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRestrictInformationSchema.java
index 3eb0914..3635a5a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRestrictInformationSchema.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRestrictInformationSchema.java
@@ -61,13 +61,15 @@ public class GenericUDFRestrictInformationSchema extends GenericUDF {
     }
 
     if (enabled == null) {
+      HiveConf hiveConf = SessionState.getSessionConf();
+
       boolean enableHS2PolicyProvider = false;
       boolean enableMetastorePolicyProvider = false;
 
-      HiveConf hiveConf = SessionState.getSessionConf();
       HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
       try {
-        if (authorizer.getHivePolicyProvider() != null) {
+        if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)
+            && authorizer.getHivePolicyProvider() != null) {
           enableHS2PolicyProvider = true;
         }
       } catch (HiveAuthzPluginException e) {
@@ -95,12 +97,12 @@ public class GenericUDFRestrictInformationSchema extends GenericUDF {
             LOG.warn("Error instantiating hive.security.metastore.authorization.manager", e);
           }
         }
-      }
 
-      if (enableHS2PolicyProvider || enableMetastorePolicyProvider) {
-        enabled = new BooleanWritable(true);
-      } else {
-        enabled = new BooleanWritable(false);
+        if (enableHS2PolicyProvider || enableMetastorePolicyProvider) {
+          enabled = new BooleanWritable(true);
+        } else {
+          enabled = new BooleanWritable(false);
+        }
       }
     }