You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ng...@apache.org on 2021/08/04 15:12:21 UTC

[hive] branch master updated: HIVE-24705: Authorization support for Storage-handler based tables (Saihemanth via Naveen Gangam)

This is an automated email from the ASF dual-hosted git repository.

ngangam pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 2431fd1  HIVE-24705: Authorization support for Storage-handler based tables (Saihemanth via Naveen Gangam)
2431fd1 is described below

commit 2431fd16aedbb857356cef3e15f20e91185e4039
Author: saihemanth <sa...@cloudera.com>
AuthorDate: Mon Feb 8 15:52:44 2021 -0700

    HIVE-24705: Authorization support for Storage-handler based tables (Saihemanth via Naveen Gangam)
---
 .../hadoop/hive/druid/DruidStorageHandler.java     | 22 +++++++++-
 .../hadoop/hive/hbase/HBaseStorageHandler.java     | 22 ++++++++++
 .../hive/storage/jdbc/JdbcStorageHandler.java      | 12 +++++-
 .../hadoop/hive/kafka/KafkaStorageHandler.java     | 19 +++++++-
 .../hive/ql/metadata/DefaultStorageHandler.java    | 16 ++++++-
 .../metadata/HiveStorageAuthorizationHandler.java  | 49 +++++++++++++++++++++
 .../org/apache/hadoop/hive/ql/metadata/Table.java  |  6 ++-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java     | 26 ++++++++---
 .../HiveCustomStorageHandlerUtils.java             | 37 ++++++++++++++++
 .../authorization/command/CommandAuthorizerV2.java | 50 +++++++++++++++++++---
 .../authorization/plugin/HivePrivilegeObject.java  |  3 +-
 .../plugin/metastore/events/AlterTableEvent.java   | 42 ++++++++++++++++++
 12 files changed, 286 insertions(+), 18 deletions(-)

diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java
index f3c86d1..4a8c281 100644
--- a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java
+++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java
@@ -89,6 +89,7 @@ import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageAuthorizationHandler;
 import org.apache.hadoop.hive.ql.metadata.StorageHandlerInfo;
 import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
@@ -117,6 +118,8 @@ import javax.annotation.Nullable;
 import java.io.IOException;
 import java.net.MalformedURLException;
 import java.net.URL;
+import java.net.URI;
+import java.net.URISyntaxException;
 import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -134,7 +137,7 @@ import static org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.JSON_MAPPER;
  * DruidStorageHandler provides a HiveStorageHandler implementation for Druid.
  */
 @SuppressWarnings({ "rawtypes" }) public class DruidStorageHandler extends DefaultHiveMetaHook
-    implements HiveStorageHandler {
+    implements HiveStorageHandler, HiveStorageAuthorizationHandler {
 
   private static final Logger LOG = LoggerFactory.getLogger(DruidStorageHandler.class);
 
@@ -149,6 +152,11 @@ import static org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.JSON_MAPPER;
   private static final List<String> ALLOWED_ALTER_TYPES =
       ImmutableList.of("ADDPROPS", "DROPPROPS", "ADDCOLS");
 
+  /** Druid prefix to form the URI for authentication */
+  private static final String DRUID_PREFIX = "druid:";
+  /** Druid config for determining the host name */
+  private static final String DRUID_HOST_NAME = "druid.zk.service.host";
+
   static {
     final Lifecycle lifecycle = new Lifecycle();
     try {
@@ -258,6 +266,18 @@ import static org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.JSON_MAPPER;
     this.commitInsertTable(table, false);
   }
 
+  @Override
+  public URI getURIForAuth(Map<String, String> tableProperties) throws URISyntaxException{
+    String host_name = conf.get(DRUID_HOST_NAME) != null ? conf.get(DRUID_HOST_NAME) :
+            HiveConf.getVar(getConf(), HiveConf.ConfVars.HIVE_DRUID_BROKER_DEFAULT_ADDRESS);
+    String table_name = tableProperties.get(Constants.DRUID_DATA_SOURCE);
+    String column_names = tableProperties.get(Constants.DRUID_QUERY_FIELD_NAMES);
+    if (column_names != null)
+      return new URI(DRUID_PREFIX+"//"+host_name+"/"+table_name+"/"+column_names);
+    else
+      return new URI(DRUID_PREFIX+"//"+host_name+"/"+table_name);
+  }
+
   private void updateKafkaIngestion(Table table) {
     final String overlordAddress = HiveConf.getVar(getConf(), HiveConf.ConfVars.HIVE_DRUID_OVERLORD_DEFAULT_ADDRESS);
 
diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
index 1e772dd..787ccd4 100644
--- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
+++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
@@ -19,6 +19,8 @@
 package org.apache.hadoop.hive.hbase;
 
 import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
 import java.util.ArrayList;
 import java.util.LinkedHashSet;
 import java.util.List;
@@ -102,6 +104,13 @@ public class HBaseStorageHandler extends DefaultStorageHandler
 
   final static public String DEFAULT_PREFIX = "default.";
 
+  /** HBase prefix to form the URI for authentication */
+  private static final String HBASE_PREFIX = "hbase:";
+  /** HBase config for determining the host name based on hbase-site.xml */
+  private static final String HBASE_HOST_NAME = "hbase.zookeeper.quorum";
+  /** HBase config for determining the client port based on hbase-site.xml */
+  private static final String HBASE_CLIENT_PORT = "hbase.zookeeper.property.clientPort";
+
   //Check if the configure job properties is called from input
   // or output for setting asymmetric properties
   private boolean configureInputJobProps = true;
@@ -278,6 +287,19 @@ public class HBaseStorageHandler extends DefaultStorageHandler
     } // output job properties
   }
 
+  @Override
+  public URI getURIForAuth(Map<String, String> tableProperties) throws URISyntaxException{
+    hbaseConf = getConf();
+    String hbase_host = tableProperties.containsKey(HBASE_HOST_NAME)? tableProperties.get(HBASE_HOST_NAME) : hbaseConf.get(HBASE_HOST_NAME);
+    String hbase_port = tableProperties.containsKey(HBASE_CLIENT_PORT)? tableProperties.get(HBASE_CLIENT_PORT) : hbaseConf.get(HBASE_CLIENT_PORT);
+    String table_name = tableProperties.getOrDefault(HBaseSerDe.HBASE_TABLE_NAME, null);
+    String column_family = tableProperties.getOrDefault(HBaseSerDe.HBASE_COLUMNS_MAPPING, null);
+    if (column_family != null)
+      return new URI(HBASE_PREFIX+"//"+hbase_host+":"+hbase_port+"/"+table_name+"/"+column_family);
+    else
+      return new URI(HBASE_PREFIX+"//"+hbase_host+":"+hbase_port+"/"+table_name);
+  }
+
   /**
    * Return true when HBaseStorageHandler should generate hfiles instead of operate against the
    * online table. This mode is implicitly applied when "hive.hbase.generatehfiles" is true.
diff --git a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcStorageHandler.java b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcStorageHandler.java
index 2d00755..11154b9 100644
--- a/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcStorageHandler.java
+++ b/jdbc-handler/src/main/java/org/apache/hive/storage/jdbc/JdbcStorageHandler.java
@@ -19,6 +19,7 @@ import org.apache.hadoop.hive.conf.Constants;
 import org.apache.hadoop.hive.metastore.HiveMetaHook;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageAuthorizationHandler;
 import org.apache.hadoop.hive.ql.metadata.JarUtils;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
@@ -37,8 +38,10 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
+import java.net.URI;
+import java.net.URISyntaxException;
 
-public class JdbcStorageHandler implements HiveStorageHandler {
+public class JdbcStorageHandler implements HiveStorageHandler, HiveStorageAuthorizationHandler {
 
   private static final Logger LOGGER = LoggerFactory.getLogger(JdbcStorageHandler.class);
   private Configuration conf;
@@ -93,6 +96,13 @@ public class JdbcStorageHandler implements HiveStorageHandler {
   }
 
   @Override
+  public URI getURIForAuth(Map<String, String> tableProperties) throws URISyntaxException{
+    String host_url = tableProperties.get(Constants.JDBC_URL);
+    String table_name = tableProperties.get(Constants.JDBC_TABLE);
+    return new URI(host_url+"/"+table_name);
+  }
+
+  @Override
   public void configureInputJobCredentials(TableDesc tableDesc, Map<String, String> jobSecrets) {
     try {
       LOGGER.debug("Adding secrets to input job conf");
diff --git a/kafka-handler/src/java/org/apache/hadoop/hive/kafka/KafkaStorageHandler.java b/kafka-handler/src/java/org/apache/hadoop/hive/kafka/KafkaStorageHandler.java
index c3ddbb5..bf8c973 100644
--- a/kafka-handler/src/java/org/apache/hadoop/hive/kafka/KafkaStorageHandler.java
+++ b/kafka-handler/src/java/org/apache/hadoop/hive/kafka/KafkaStorageHandler.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageAuthorizationHandler;
 import org.apache.hadoop.hive.ql.metadata.StorageHandlerInfo;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider;
@@ -53,6 +54,8 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -65,13 +68,16 @@ import java.util.function.Predicate;
 /**
  * Hive Kafka storage handler to allow user to read and write from/to Kafka message bus.
  */
-@SuppressWarnings("ALL") public class KafkaStorageHandler extends DefaultHiveMetaHook implements HiveStorageHandler {
+@SuppressWarnings("ALL") public class KafkaStorageHandler extends DefaultHiveMetaHook implements HiveStorageHandler, HiveStorageAuthorizationHandler {
 
   private static final Logger LOG = LoggerFactory.getLogger(KafkaStorageHandler.class);
   private static final String KAFKA_STORAGE_HANDLER = "org.apache.hadoop.hive.kafka.KafkaStorageHandler";
 
   private Configuration configuration;
 
+  /** Kafka prefix to form the URI for authentication */
+  private static final String KAFKA_PREFIX = "kafka:";
+
   @Override public Class<? extends InputFormat> getInputFormatClass() {
     return KafkaInputFormat.class;
   }
@@ -192,6 +198,17 @@ import java.util.function.Predicate;
     return new KafkaStorageHandlerInfo(topic, properties);
   }
 
+  @Override
+  public URI getURIForAuth(Map<String, String> tableProperties) throws URISyntaxException{
+    String host_name = tableProperties.get(KafkaTableProperties.HIVE_KAFKA_BOOTSTRAP_SERVERS.getName()) != null ?
+            tableProperties.get(KafkaTableProperties.HIVE_KAFKA_BOOTSTRAP_SERVERS.getName()) :
+            configuration.get(KafkaTableProperties.HIVE_KAFKA_BOOTSTRAP_SERVERS.getName());
+    Preconditions.checkNotNull(host_name, "Set Table property " + KafkaTableProperties.HIVE_KAFKA_BOOTSTRAP_SERVERS.getName());
+    String table_name = tableProperties.get(KafkaTableProperties.HIVE_KAFKA_TOPIC.getName());
+    Preconditions.checkNotNull(table_name, "Set Table property " + KafkaTableProperties.HIVE_KAFKA_TOPIC.getName());
+    return new URI(KAFKA_PREFIX+"//"+host_name+"/"+table_name);
+  }
+
   private Properties buildProducerProperties(Table table) {
     String brokers = table.getParameters().get(KafkaTableProperties.HIVE_KAFKA_BOOTSTRAP_SERVERS.getName());
     if (brokers == null || brokers.isEmpty()) {
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java
index 5a7c3cb..4632ae2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java
@@ -18,11 +18,14 @@
 
 package org.apache.hadoop.hive.ql.metadata;
 
+import java.net.URI;
+import java.net.URISyntaxException;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.metastore.HiveMetaHook;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
+import org.apache.hadoop.hive.ql.security.authorization.HiveCustomStorageHandlerUtils;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
@@ -40,7 +43,7 @@ import org.apache.hadoop.mapred.SequenceFileOutputFormat;
  * (providing something which appears to be a non-native table with respect to
  * metadata even though its behavior is otherwise identical to a native table).
  */
-public class DefaultStorageHandler implements HiveStorageHandler {
+public class DefaultStorageHandler implements HiveStorageHandler, HiveStorageAuthorizationHandler {
   private Configuration conf;
 
   @Override
@@ -64,6 +67,17 @@ public class DefaultStorageHandler implements HiveStorageHandler {
     return null;
   }
 
+  @Override
+  public URI getURIForAuth(Map<String, String> tableProperties) throws URISyntaxException{
+    // custom storage URI by default
+    try {
+      return new URI(this.getClass().getSimpleName().toLowerCase() + "://" +
+              HiveCustomStorageHandlerUtils.getTablePropsForCustomStorageHandler(tableProperties));
+    } catch (Exception ex) {
+      throw new URISyntaxException("Unsupported ex",ex.getMessage());
+    }
+  }
+
   public HiveAuthorizationProvider getAuthorizationProvider()
          throws HiveException {
      return new DefaultHiveAuthorizationProvider();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageAuthorizationHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageAuthorizationHandler.java
new file mode 100644
index 0000000..c9e095e
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageAuthorizationHandler.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.metadata;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience;
+import org.apache.hadoop.hive.common.classification.InterfaceStability;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Map;
+
+/**
+ * HiveStorageAuthorizationHandler defines a pluggable interface for
+ * authorization of storage based tables in Hive. A Storage authorization
+ * handler consists of a bundle of the following:
+ *
+ *<ul>
+ *<li>getURI
+ *</ul>
+ *
+ * Storage authorization handler classes are plugged in using the STORED BY 'classname'
+ * clause in CREATE TABLE.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Stable
+public interface HiveStorageAuthorizationHandler{
+
+    /**
+     * @return get URI for authentication implementation,
+     * should return uri with table properties.
+     */
+    public URI getURIForAuth(Map<String, String> tableProperties) throws URISyntaxException;
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
index 1c3c116..07b222d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
@@ -355,6 +355,10 @@ public class Table implements Serializable {
     return storageHandler;
   }
 
+  public void setStorageHandler(HiveStorageHandler sh){
+    storageHandler = sh;
+  }
+
   public StorageHandlerInfo getStorageHandlerInfo() {
     return storageHandlerInfo;
   }
@@ -471,7 +475,7 @@ public class Table implements Serializable {
   }
 
   public String getProperty(String name) {
-    return tTable.getParameters().get(name);
+    return tTable.getParameters() != null ? tTable.getParameters().get(name) : null;
   }
 
   public boolean isImmutable(){
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 800be4f..a33d0fe 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -301,6 +301,7 @@ import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.OutputFormat;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.ReflectionUtils;
 
 import com.google.common.base.Splitter;
 import com.google.common.base.Strings;
@@ -13593,7 +13594,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
           tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary,
           isTransactional, isManaged, false);
       addDbAndTabToOutputs(new String[] {qualifiedTabName.getDb(), qualifiedTabName.getTable()},
-          TableType.MANAGED_TABLE, isTemporary, tblProps);
+          TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
 
       CreateTableDesc crtTblDesc = new CreateTableDesc(qualifiedTabName,
           isExt, isTemporary, cols, partCols,
@@ -13621,7 +13622,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
       tblProps = validateAndAddDefaultProperties(tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization,
           isTemporary, isTransactional, isManaged, false);
       addDbAndTabToOutputs(new String[] {qualifiedTabName.getDb(), qualifiedTabName.getTable()},
-          TableType.MANAGED_TABLE, false, tblProps);
+          TableType.MANAGED_TABLE, false, tblProps, storageFormat);
 
       CreateTableDesc crtTranTblDesc =
           new CreateTableDesc(qualifiedTabName, isExt, isTemporary, cols, partCols, bucketCols, sortCols, numBuckets,
@@ -13645,7 +13646,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
           tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary,
           isTransactional, isManaged, false);
       addDbAndTabToOutputs(new String[] {qualifiedTabName.getDb(), qualifiedTabName.getTable()},
-          TableType.MANAGED_TABLE, isTemporary, tblProps);
+          TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
 
       Table likeTable = getTable(likeTableName, false);
       if (likeTable != null) {
@@ -13728,7 +13729,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
           tblProps, isExt, storageFormat, dbDotTab, sortCols, isMaterialization, isTemporary,
           isTransactional, isManaged, true);
       addDbAndTabToOutputs(new String[] {qualifiedTabName.getDb(), qualifiedTabName.getTable()},
-          TableType.MANAGED_TABLE, isTemporary, tblProps);
+          TableType.MANAGED_TABLE, isTemporary, tblProps, storageFormat);
       tableDesc = new CreateTableDesc(qualifiedTabName, isExt, isTemporary, cols,
           partColNames, bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim,
           rowFormatParams.fieldEscape, rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim,
@@ -13752,7 +13753,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
 
   /** Adds entities for create table/create view. */
   private void addDbAndTabToOutputs(String[] qualifiedTabName, TableType type,
-      boolean isTemporary, Map<String, String> tblProps) throws SemanticException {
+      boolean isTemporary, Map<String, String> tblProps, StorageFormat storageFormat) throws SemanticException {
     Database database  = getDatabase(qualifiedTabName[0]);
     outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
 
@@ -13760,6 +13761,19 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     t.setParameters(tblProps);
     t.setTableType(type);
     t.setTemporary(isTemporary);
+    HiveStorageHandler storageHandler = null;
+    if (storageFormat.getStorageHandler() != null) {
+      try {
+        storageHandler = (HiveStorageHandler) ReflectionUtils.newInstance(
+                conf.getClassByName(storageFormat.getStorageHandler()), SessionState.get().getConf());
+      } catch (ClassNotFoundException ex) {
+        LOG.error("Class not found. Storage handler will be set to null: "+ex.getMessage() , ex);
+      }
+    }
+    t.setStorageHandler(storageHandler);
+    for(Map.Entry<String,String> serdeMap : storageFormat.getSerdeProps().entrySet()){
+      t.setSerdeParam(serdeMap.getKey(), serdeMap.getValue());
+    }
     outputs.add(new WriteEntity(t, WriteEntity.WriteType.DDL_NO_LOCK));
   }
 
@@ -13899,7 +13913,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
         location, storageFormat.getSerde(), storageFormat.getStorageHandler(),
         storageFormat.getSerdeProps());
     addDbAndTabToOutputs(new String[] {qualTabName.getDb(), qualTabName.getTable()}, TableType.MATERIALIZED_VIEW,
-        false, tblProps);
+        false, tblProps, storageFormat);
     queryState.setCommandType(HiveOperation.CREATE_MATERIALIZED_VIEW);
     qb.setViewDesc(createVwDesc);
 
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveCustomStorageHandlerUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveCustomStorageHandlerUtils.java
new file mode 100644
index 0000000..e786569
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveCustomStorageHandlerUtils.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization;
+
+import java.util.Map;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.common.StatsSetupConst;
+
+public class HiveCustomStorageHandlerUtils {
+
+    public static String getTablePropsForCustomStorageHandler(Map<String, String> tableProperties) {
+        StringBuilder properties = new StringBuilder();
+        for (Map.Entry<String,String> serdeMap : tableProperties.entrySet()) {
+            if (!serdeMap.getKey().equalsIgnoreCase(serdeConstants.SERIALIZATION_FORMAT) &&
+                    !serdeMap.getKey().equalsIgnoreCase(StatsSetupConst.COLUMN_STATS_ACCURATE)) {
+                properties.append(serdeMap.getValue().replaceAll("\\s","").replaceAll("\"","")); //replace space and double quotes if any in the values to avoid URI syntax exception
+                properties.append("/");
+            }
+        }
+        return properties.toString();
+    }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV2.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV2.java
index c1ce53d..1c6b691 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV2.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV2.java
@@ -18,14 +18,16 @@
 
 package org.apache.hadoop.hive.ql.security.authorization.command;
 
+import java.lang.reflect.Method;
+import java.net.URISyntaxException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
+import java.util.HashMap;
 import java.util.Set;
 import java.util.Map.Entry;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStore;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.DataConnector;
@@ -38,21 +40,28 @@ import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.hooks.Entity.Type;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageAuthorizationHandler;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
+import org.apache.hadoop.hive.ql.security.authorization.HiveCustomStorageHandlerUtils;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Command authorization, new type.
  */
 final class CommandAuthorizerV2 {
+  private static final Logger LOG = LoggerFactory.getLogger(CommandAuthorizerV2.class.getName());
   private CommandAuthorizerV2() {
     throw new UnsupportedOperationException("CommandAuthorizerV2 should not be instantiated");
   }
@@ -71,8 +80,8 @@ final class CommandAuthorizerV2 {
     List<WriteEntity> outputList = new ArrayList<WriteEntity>(outputs);
     addPermanentFunctionEntities(ss, inputList);
 
-    List<HivePrivilegeObject> inputsHObjs = getHivePrivObjects(inputList, selectTab2Cols);
-    List<HivePrivilegeObject> outputHObjs = getHivePrivObjects(outputList, updateTab2Cols);
+    List<HivePrivilegeObject> inputsHObjs = getHivePrivObjects(inputList, selectTab2Cols, hiveOpType);
+    List<HivePrivilegeObject> outputHObjs = getHivePrivObjects(outputList, updateTab2Cols, hiveOpType);
 
     HiveAuthzContext.Builder authzContextBuilder = new HiveAuthzContext.Builder();
     authzContextBuilder.setUserIpAddress(ss.getUserIpAddress());
@@ -97,7 +106,7 @@ final class CommandAuthorizerV2 {
   }
 
   private static List<HivePrivilegeObject> getHivePrivObjects(List<? extends Entity> privObjects,
-      Map<String, List<String>> tableName2Cols) {
+      Map<String, List<String>> tableName2Cols, HiveOperationType hiveOpType) throws HiveException {
     List<HivePrivilegeObject> hivePrivobjs = new ArrayList<HivePrivilegeObject>();
     if (privObjects == null){
       return hivePrivobjs;
@@ -138,7 +147,7 @@ final class CommandAuthorizerV2 {
         continue;
       }
 
-      addHivePrivObject(privObject, tableName2Cols, hivePrivobjs);
+      addHivePrivObject(privObject, tableName2Cols, hivePrivobjs, hiveOpType);
     }
     return hivePrivobjs;
   }
@@ -170,7 +179,7 @@ final class CommandAuthorizerV2 {
   }
 
   private static void addHivePrivObject(Entity privObject, Map<String, List<String>> tableName2Cols,
-      List<HivePrivilegeObject> hivePrivObjs) {
+      List<HivePrivilegeObject> hivePrivObjs, HiveOperationType hiveOpType) throws HiveException {
     HivePrivilegeObjectType privObjType = AuthorizationUtils.getHivePrivilegeObjectType(privObject.getType());
     HivePrivObjectActionType actionType = AuthorizationUtils.getActionType(privObject);
     HivePrivilegeObject hivePrivObject = null;
@@ -186,6 +195,34 @@ final class CommandAuthorizerV2 {
           tableName2Cols.get(Table.getCompleteName(table.getDbName(), table.getTableName()));
       hivePrivObject = new HivePrivilegeObject(privObjType, table.getDbName(), table.getTableName(),
           null, columns, actionType, null, null, table.getOwner(), table.getOwnerType());
+      if (table.getStorageHandler() != null) {
+        //TODO: add hive privilege object for storage based handlers for create and alter table commands.
+        if (hiveOpType == HiveOperationType.CREATETABLE ||
+                hiveOpType == HiveOperationType.ALTERTABLE_PROPERTIES ||
+                hiveOpType == HiveOperationType.CREATETABLE_AS_SELECT) {
+          String storageuri = null;
+          Map<String, String> tableProperties = new HashMap<>();
+          Configuration conf = new Configuration();
+          tableProperties.putAll(table.getSd().getSerdeInfo().getParameters());
+          tableProperties.putAll(table.getParameters());
+          try {
+            if (table.getStorageHandler() instanceof HiveStorageAuthorizationHandler) {
+              HiveStorageAuthorizationHandler authorizationHandler = (HiveStorageAuthorizationHandler) ReflectionUtils.newInstance(
+                      conf.getClassByName(table.getStorageHandler().getClass().getName()), SessionState.get().getConf());
+              storageuri = authorizationHandler.getURIForAuth(tableProperties).toString();
+            } else {
+              //Custom storage handler that has not implemented the HiveStorageAuthorizationHandler
+              storageuri = table.getStorageHandler().getClass().getSimpleName().toLowerCase() + "://" +
+                      HiveCustomStorageHandlerUtils.getTablePropsForCustomStorageHandler(tableProperties);
+            }
+          } catch(Exception ex) {
+            LOG.error("Exception occured while getting the URI from storage handler: "+ex.getMessage(), ex);
+            throw new HiveException("Exception occured while getting the URI from storage handler: "+ex.getMessage());
+          }
+          hivePrivObjs.add(new HivePrivilegeObject(HivePrivilegeObjectType.STORAGEHANDLER_URI, null, storageuri, null, null,
+                  actionType, null, table.getStorageHandler().getClass().getName(), table.getOwner(), table.getOwnerType()));
+        }
+      }
       break;
     case DFS_DIR:
     case LOCAL_DIR:
@@ -215,4 +252,5 @@ final class CommandAuthorizerV2 {
     }
     hivePrivObjs.add(hivePrivObject);
   }
+
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
index 0fc3af6..8c75610 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
@@ -112,7 +112,7 @@ public class HivePrivilegeObject implements Comparable<HivePrivilegeObject> {
     // used to give service actions a name. This is used by kill query command so it can
     // be authorized specifically to a service if necessary.
     SERVICE_NAME,
-    SCHEDULED_QUERY,
+    SCHEDULED_QUERY, STORAGEHANDLER_URI
   }
 
   /**
@@ -280,6 +280,7 @@ public class HivePrivilegeObject implements Comparable<HivePrivilegeObject> {
     case COLUMN:
     case LOCAL_URI:
     case DFS_URI:
+    case STORAGEHANDLER_URI:
       name = objectName;
       break;
     case COMMAND_PARAMS:
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterTableEvent.java b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterTableEvent.java
index b268fd7..5703086 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterTableEvent.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterTableEvent.java
@@ -20,18 +20,30 @@
 package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events;
 
 import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent;
 import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import org.apache.hadoop.hive.ql.security.authorization.HiveCustomStorageHandlerUtils;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo;
+import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageAuthorizationHandler;
+import org.apache.hadoop.util.ReflectionUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.lang.reflect.Method;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.Map;
+import java.util.HashMap;
 
 /*
  Authorizable Event for HiveMetaStore operation  AlterTableEvent
@@ -96,6 +108,36 @@ public class AlterTableEvent extends HiveMetaStoreAuthorizableEvent {
     }
 
     LOG.debug("<== AlterTableEvent.getOutputHObjs(): ret={}", ret);
+    if (newTable.getParameters().containsKey(hive_metastoreConstants.META_TABLE_STORAGE)) {
+      String storageUri = "";
+      DefaultStorageHandler defaultStorageHandler = null;
+      HiveStorageHandler hiveStorageHandler = null;
+      Configuration conf = new Configuration();
+      Map<String, String> tableProperties = new HashMap<>();
+      tableProperties.putAll(newTable.getSd().getSerdeInfo().getParameters());
+      tableProperties.putAll(newTable.getParameters());
+      try {
+        hiveStorageHandler = (HiveStorageHandler) ReflectionUtils.newInstance(
+                conf.getClassByName(newTable.getParameters().get(hive_metastoreConstants.META_TABLE_STORAGE)), event.getHandler().getConf());
+        if (hiveStorageHandler instanceof HiveStorageAuthorizationHandler) {
+          HiveStorageAuthorizationHandler authorizationHandler = (HiveStorageAuthorizationHandler) ReflectionUtils.newInstance(
+                  conf.getClassByName(newTable.getParameters().get(hive_metastoreConstants.META_TABLE_STORAGE)), event.getHandler().getConf());
+          storageUri = authorizationHandler.getURIForAuth(tableProperties).toString();
+        } else {
+          //Custom storage handler that has not implemented the getURIForAuth()
+          storageUri = hiveStorageHandler.getClass().getSimpleName().toLowerCase() + "://" +
+                  HiveCustomStorageHandlerUtils.getTablePropsForCustomStorageHandler(tableProperties);
+        }
+      } catch(Exception ex) {
+        LOG.error("Exception occured while getting the URI from storage handler: "+ex.getMessage(), ex);
+      }
+      ret.add(new HivePrivilegeObject(HivePrivilegeObjectType.STORAGEHANDLER_URI, null, storageUri, null, null,
+              HivePrivObjectActionType.OTHER, null, newTable.getParameters().get(hive_metastoreConstants.META_TABLE_STORAGE), newTable.getOwner(), newTable.getOwnerType()));
+    }
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("<== AlterTableEvent.getOutputHObjs(): ret=" + ret);
+    }
 
     return ret;
   }