You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by pr...@apache.org on 2014/11/11 01:56:44 UTC

svn commit: r1638004 - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/ metastore/src/java/org/apache/hadoop/hive/metastore/

Author: prasadm
Date: Tue Nov 11 00:56:43 2014
New Revision: 1638004

URL: http://svn.apache.org/r1638004
Log:
HIVE-8612: Support metadata result filter hooks (Prasad Mujumdar, reviewed by Brock Noland)

Added:
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java
Modified:
    hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1638004&r1=1638003&r2=1638004&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Tue Nov 11 00:56:43 2014
@@ -556,6 +556,8 @@ public class HiveConf extends Configurat
     METASTORE_PART_INHERIT_TBL_PROPS("hive.metastore.partition.inherit.table.properties", "",
         "List of comma separated keys occurring in table properties which will get inherited to newly created partitions. \n" +
         "* implies all the keys will get inherited."),
+    METASTORE_FILTER_HOOK("hive.metastore.filter.hook", "org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl",
+        "Metastore hook class for filtering the metadata read results"),
 
     // Parameters for exporting metadata on table drop (requires the use of the)
     // org.apache.hadoop.hive.ql.parse.MetaDataExportListener preevent listener

Added: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java?rev=1638004&view=auto
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java (added)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java Tue Nov 11 00:56:43 2014
@@ -0,0 +1,274 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore;
+
+import static org.junit.Assert.*;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.PartitionSpec;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.common.collect.Lists;
+
+public class TestFilterHooks {
+
+  public static class DummyMetaStoreFilterHookImpl extends DefaultMetaStoreFilterHookImpl {
+    public static boolean blockResults = false;
+
+    public DummyMetaStoreFilterHookImpl(HiveConf conf) {
+      super(conf);
+    }
+
+    @Override
+    public List<String> filterDatabases(List<String> dbList) {
+      if (blockResults) {
+        return new ArrayList<String>();
+      }
+      return super.filterDatabases(dbList);
+    }
+
+    @Override
+    public Database filterDatabase(Database dataBase) throws NoSuchObjectException {
+      if (blockResults) {
+        throw new NoSuchObjectException("Blocked access");
+      }
+      return super.filterDatabase(dataBase);
+    }
+
+    @Override
+    public List<String> filterTableNames(String dbName, List<String> tableList) {
+      if (blockResults) {
+        return new ArrayList<String>();
+      }
+      return super.filterTableNames(dbName, tableList);
+    }
+
+    @Override
+    public Table filterTable(Table table) throws NoSuchObjectException {
+      if (blockResults) {
+        throw new NoSuchObjectException("Blocked access");
+      }
+      return super.filterTable(table);
+    }
+
+    @Override
+    public List<Table> filterTables(List<Table> tableList) {
+      if (blockResults) {
+        return new ArrayList<Table>();
+      }
+      return super.filterTables(tableList);
+    }
+
+    @Override
+    public List<Partition> filterPartitions(List<Partition> partitionList) {
+      if (blockResults) {
+        return new ArrayList<Partition>();
+      }
+      return super.filterPartitions(partitionList);
+    }
+
+    @Override
+    public List<PartitionSpec> filterPartitionSpecs(
+        List<PartitionSpec> partitionSpecList) {
+      if (blockResults) {
+        return new ArrayList<PartitionSpec>();
+      }
+      return super.filterPartitionSpecs(partitionSpecList);
+    }
+
+    @Override
+    public Partition filterPartition(Partition partition) throws NoSuchObjectException {
+      if (blockResults) {
+        throw new NoSuchObjectException("Blocked access");
+      }
+      return super.filterPartition(partition);
+    }
+
+    @Override
+    public List<String> filterPartitionNames(String dbName, String tblName,
+        List<String> partitionNames) {
+      if (blockResults) {
+        return new ArrayList<String>();
+      }
+      return super.filterPartitionNames(dbName, tblName, partitionNames);
+    }
+
+    @Override
+    public Index filterIndex(Index index) throws NoSuchObjectException {
+      if (blockResults) {
+        throw new NoSuchObjectException("Blocked access");
+      }
+      return super.filterIndex(index);
+    }
+
+    @Override
+    public List<String> filterIndexNames(String dbName, String tblName,
+        List<String> indexList) {
+      if (blockResults) {
+        return new ArrayList<String>();
+      }
+      return super.filterIndexNames(dbName, tblName, indexList);
+    }
+
+    @Override
+    public List<Index> filterIndexes(List<Index> indexeList) {
+      if (blockResults) {
+        return new ArrayList<Index>();
+      }
+      return super.filterIndexes(indexeList);
+    }
+  }
+
+  private static final String DBNAME1 = "testdb1";
+  private static final String DBNAME2 = "testdb2";
+  private static final String TAB1 = "tab1";
+  private static final String TAB2 = "tab2";
+  private static final String INDEX1 = "idx1";
+  private HiveConf hiveConf;
+  private HiveMetaStoreClient msc;
+  private Driver driver;
+
+  @Before
+  public void setUp() throws Exception {
+    DummyMetaStoreFilterHookImpl.blockResults = false;
+    int port = MetaStoreUtils.findFreePort();
+    MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());
+
+    hiveConf = new HiveConf(this.getClass());
+    hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
+    hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
+    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+    hiveConf.setVar(ConfVars.METASTORE_FILTER_HOOK, DummyMetaStoreFilterHookImpl.class.getName());
+    SessionState.start(new CliSessionState(hiveConf));
+    msc = new HiveMetaStoreClient(hiveConf, null);
+    driver = new Driver(hiveConf);
+
+    driver.run("drop database if exists " + DBNAME1  + " cascade");
+    driver.run("drop database if exists " + DBNAME2  + " cascade");
+    driver.run("create database " + DBNAME1);
+    driver.run("create database " + DBNAME2);
+    driver.run("use " + DBNAME1);
+    driver.run("create table " + DBNAME1 + "." + TAB1 + " (id int, name string)");
+    driver.run("create table " + TAB2 + " (id int) partitioned by (name string)");
+    driver.run("ALTER TABLE " + TAB2 + " ADD PARTITION (name='value1')");
+    driver.run("ALTER TABLE " + TAB2 + " ADD PARTITION (name='value2')");
+    driver.run("CREATE INDEX " + INDEX1 + " on table " + TAB1 + "(id) AS 'COMPACT' WITH DEFERRED REBUILD");
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    DummyMetaStoreFilterHookImpl.blockResults = false;
+    driver.run("drop database if exists " + DBNAME1  + " cascade");
+    driver.run("drop database if exists " + DBNAME2  + " cascade");
+    driver.close();
+    driver.destroy();
+    msc.close();
+  }
+
+  @Test
+  public void testDefaultFilter() throws Exception {
+    assertNotNull(msc.getTable(DBNAME1, TAB1));
+    assertEquals(3, msc.getTables(DBNAME1, "*").size());
+    assertEquals(3, msc.getAllTables(DBNAME1).size());
+    assertEquals(1, msc.getTables(DBNAME1, TAB2).size());
+    assertEquals(0, msc.getAllTables(DBNAME2).size());
+
+    assertNotNull(msc.getDatabase(DBNAME1));
+    assertEquals(3, msc.getDatabases("*").size());
+    assertEquals(3, msc.getAllDatabases().size());
+    assertEquals(1, msc.getDatabases(DBNAME1).size());
+
+    assertNotNull(msc.getPartition(DBNAME1, TAB2, "name=value1"));
+    assertEquals(1, msc.getPartitionsByNames(DBNAME1, TAB2, Lists.newArrayList("name=value1")).size());
+
+    assertNotNull(msc.getIndex(DBNAME1, TAB1, INDEX1));
+  }
+
+  @Test
+  public void testDummyFilterForTables() throws Exception {
+    DummyMetaStoreFilterHookImpl.blockResults = true;
+    try {
+      msc.getTable(DBNAME1, TAB1);
+      fail("getTable() should fail with blocking mode");
+    } catch (NoSuchObjectException e) {
+      // Excepted
+    }
+    assertEquals(0, msc.getTables(DBNAME1, "*").size());
+    assertEquals(0, msc.getAllTables(DBNAME1).size());
+    assertEquals(0, msc.getTables(DBNAME1, TAB2).size());
+  }
+
+  @Test
+  public void testDummyFilterForDb() throws Exception {
+    DummyMetaStoreFilterHookImpl.blockResults = true;
+    try {
+      assertNotNull(msc.getDatabase(DBNAME1));
+      fail("getDatabase() should fail with blocking mode");
+    } catch (NoSuchObjectException e) {
+        // Excepted
+    }
+    assertEquals(0, msc.getDatabases("*").size());
+    assertEquals(0, msc.getAllDatabases().size());
+    assertEquals(0, msc.getDatabases(DBNAME1).size());
+  }
+
+  @Test
+  public void testDummyFilterForPartition() throws Exception {
+    DummyMetaStoreFilterHookImpl.blockResults = true;
+    try {
+      assertNotNull(msc.getPartition(DBNAME1, TAB2, "name=value1"));
+      fail("getPartition() should fail with blocking mode");
+    } catch (NoSuchObjectException e) {
+      // Excepted
+    }
+    assertEquals(0, msc.getPartitionsByNames(DBNAME1, TAB2,
+        Lists.newArrayList("name=value1")).size());
+  }
+
+  @Test
+  public void testDummyFilterForIndex() throws Exception {
+    DummyMetaStoreFilterHookImpl.blockResults = true;
+    try {
+      assertNotNull(msc.getIndex(DBNAME1, TAB1, INDEX1));
+      fail("getPartition() should fail with blocking mode");
+    } catch (NoSuchObjectException e) {
+      // Excepted
+    }
+  }
+
+}

Added: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java?rev=1638004&view=auto
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java (added)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java Tue Nov 11 00:56:43 2014
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.PartitionSpec;
+import org.apache.hadoop.hive.metastore.api.Table;
+
+/**
+ * Default no-op implementation of the MetaStoreFilterHook that returns the result as is
+ */
+public class DefaultMetaStoreFilterHookImpl implements MetaStoreFilterHook {
+
+  public DefaultMetaStoreFilterHookImpl(HiveConf conf) {
+  }
+
+  @Override
+  public List<String> filterDatabases(List<String> dbList) {
+    return dbList;
+  }
+
+  @Override
+  public Database filterDatabase(Database dataBase) throws NoSuchObjectException {
+    return dataBase;
+  }
+
+  @Override
+  public List<String> filterTableNames(String dbName, List<String> tableList) {
+    return tableList;
+  }
+
+  @Override
+  public Table filterTable(Table table)  throws NoSuchObjectException {
+    return table;
+  }
+
+  @Override
+  public List<Table> filterTables(List<Table> tableList) {
+    return tableList;
+  }
+
+  @Override
+  public List<Partition> filterPartitions(List<Partition> partitionList) {
+    return partitionList;
+  }
+
+  @Override
+  public List<PartitionSpec> filterPartitionSpecs(
+      List<PartitionSpec> partitionSpecList) {
+    return partitionSpecList;
+  }
+
+  @Override
+  public Partition filterPartition(Partition partition)  throws NoSuchObjectException {
+    return partition;
+  }
+
+  @Override
+  public List<String> filterPartitionNames(String dbName, String tblName,
+      List<String> partitionNames) {
+    return partitionNames;
+  }
+
+  @Override
+  public Index filterIndex(Index index)  throws NoSuchObjectException {
+    return index;
+  }
+
+  @Override
+  public List<String> filterIndexNames(String dbName, String tblName,
+      List<String> indexList) {
+    return indexList;
+  }
+
+  @Override
+  public List<Index> filterIndexes(List<Index> indexeList) {
+    return indexeList;
+  }
+}

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=1638004&r1=1638003&r2=1638004&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java Tue Nov 11 00:56:43 2014
@@ -22,6 +22,7 @@ import static org.apache.hadoop.hive.met
 import static org.apache.hadoop.hive.metastore.MetaStoreUtils.isIndexTable;
 
 import java.io.IOException;
+import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationHandler;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
@@ -147,6 +148,7 @@ public class HiveMetaStoreClient impleme
   protected final HiveConf conf;
   private String tokenStrForm;
   private final boolean localMetaStore;
+  private final MetaStoreFilterHook filterHook;
 
   private Map<String, String> currentMetaVars;
 
@@ -169,6 +171,7 @@ public class HiveMetaStoreClient impleme
       conf = new HiveConf(HiveMetaStoreClient.class);
     }
     this.conf = conf;
+    filterHook = loadFilterHooks();
 
     String msUri = conf.getVar(HiveConf.ConfVars.METASTOREURIS);
     localMetaStore = HiveConfUtil.isEmbeddedMetaStore(msUri);
@@ -215,6 +218,31 @@ public class HiveMetaStoreClient impleme
     open();
   }
 
+  private MetaStoreFilterHook loadFilterHooks() throws IllegalStateException {
+    Class<? extends MetaStoreFilterHook> authProviderClass = conf.
+        getClass(HiveConf.ConfVars.METASTORE_FILTER_HOOK.varname,
+            DefaultMetaStoreFilterHookImpl.class,
+            MetaStoreFilterHook.class);
+    String msg = "Unable to create instance of " + authProviderClass.getName() + ": ";
+    try {
+      Constructor<? extends MetaStoreFilterHook> constructor =
+          authProviderClass.getConstructor(HiveConf.class);
+      return constructor.newInstance(conf);
+    } catch (NoSuchMethodException e) {
+      throw new IllegalStateException(msg + e.getMessage(), e);
+    } catch (SecurityException e) {
+      throw new IllegalStateException(msg + e.getMessage(), e);
+    } catch (InstantiationException e) {
+      throw new IllegalStateException(msg + e.getMessage(), e);
+    } catch (IllegalAccessException e) {
+      throw new IllegalStateException(msg + e.getMessage(), e);
+    } catch (IllegalArgumentException e) {
+      throw new IllegalStateException(msg + e.getMessage(), e);
+    } catch (InvocationTargetException e) {
+      throw new IllegalStateException(msg + e.getMessage(), e);
+    }
+  }
+
   /**
    * Swaps the first element of the metastoreUris array with a random element from the
    * remainder of the array.
@@ -498,7 +526,7 @@ public class HiveMetaStoreClient impleme
         part.getDbName(), part.getTableName(), parts, ifNotExists);
     req.setNeedResult(needResults);
     AddPartitionsResult result = client.add_partitions_req(req);
-    return needResults ? result.getPartitions() : null;
+    return needResults ? filterHook.filterPartitions(result.getPartitions()) : null;
   }
 
   @Override
@@ -904,7 +932,7 @@ public class HiveMetaStoreClient impleme
   public List<String> getDatabases(String databasePattern)
     throws MetaException {
     try {
-      return client.get_databases(databasePattern);
+      return filterHook.filterDatabases(client.get_databases(databasePattern));
     } catch (Exception e) {
       MetaStoreUtils.logAndThrowMetaException(e);
     }
@@ -915,7 +943,7 @@ public class HiveMetaStoreClient impleme
   @Override
   public List<String> getAllDatabases() throws MetaException {
     try {
-      return client.get_all_databases();
+      return filterHook.filterDatabases(client.get_all_databases());
     } catch (Exception e) {
       MetaStoreUtils.logAndThrowMetaException(e);
     }
@@ -934,29 +962,30 @@ public class HiveMetaStoreClient impleme
   @Override
   public List<Partition> listPartitions(String db_name, String tbl_name,
       short max_parts) throws NoSuchObjectException, MetaException, TException {
-    return deepCopyPartitions(
-        client.get_partitions(db_name, tbl_name, max_parts));
+    return deepCopyPartitions(filterHook.filterPartitions(
+        client.get_partitions(db_name, tbl_name, max_parts)));
   }
 
   @Override
   public PartitionSpecProxy listPartitionSpecs(String dbName, String tableName, int maxParts) throws TException {
-    return PartitionSpecProxy.Factory.get(client.get_partitions_pspec(dbName, tableName, maxParts));
+    return PartitionSpecProxy.Factory.get(filterHook.filterPartitionSpecs(
+        client.get_partitions_pspec(dbName, tableName, maxParts)));
   }
 
   @Override
   public List<Partition> listPartitions(String db_name, String tbl_name,
       List<String> part_vals, short max_parts)
       throws NoSuchObjectException, MetaException, TException {
-    return deepCopyPartitions(
-        client.get_partitions_ps(db_name, tbl_name, part_vals, max_parts));
+    return deepCopyPartitions(filterHook.filterPartitions(
+        client.get_partitions_ps(db_name, tbl_name, part_vals, max_parts)));
   }
 
   @Override
   public List<Partition> listPartitionsWithAuthInfo(String db_name,
       String tbl_name, short max_parts, String user_name, List<String> group_names)
        throws NoSuchObjectException, MetaException, TException {
-    return deepCopyPartitions(
-        client.get_partitions_with_auth(db_name, tbl_name, max_parts, user_name, group_names));
+    return deepCopyPartitions(filterHook.filterPartitions(
+        client.get_partitions_with_auth(db_name, tbl_name, max_parts, user_name, group_names)));
   }
 
   @Override
@@ -964,8 +993,8 @@ public class HiveMetaStoreClient impleme
       String tbl_name, List<String> part_vals, short max_parts,
       String user_name, List<String> group_names) throws NoSuchObjectException,
       MetaException, TException {
-    return deepCopyPartitions(client.get_partitions_ps_with_auth(db_name,
-        tbl_name, part_vals, max_parts, user_name, group_names));
+    return deepCopyPartitions(filterHook.filterPartitions(client.get_partitions_ps_with_auth(db_name,
+        tbl_name, part_vals, max_parts, user_name, group_names)));
   }
 
   /**
@@ -986,16 +1015,16 @@ public class HiveMetaStoreClient impleme
   public List<Partition> listPartitionsByFilter(String db_name, String tbl_name,
       String filter, short max_parts) throws MetaException,
          NoSuchObjectException, TException {
-    return deepCopyPartitions(
-        client.get_partitions_by_filter(db_name, tbl_name, filter, max_parts));
+    return deepCopyPartitions(filterHook.filterPartitions(
+        client.get_partitions_by_filter(db_name, tbl_name, filter, max_parts)));
   }
 
   @Override
   public PartitionSpecProxy listPartitionSpecsByFilter(String db_name, String tbl_name,
                                                        String filter, int max_parts) throws MetaException,
          NoSuchObjectException, TException {
-    return PartitionSpecProxy.Factory.get(
-        client.get_part_specs_by_filter(db_name, tbl_name, filter, max_parts));
+    return PartitionSpecProxy.Factory.get(filterHook.filterPartitionSpecs(
+        client.get_part_specs_by_filter(db_name, tbl_name, filter, max_parts)));
   }
 
   @Override
@@ -1023,6 +1052,7 @@ public class HiveMetaStoreClient impleme
       throw new IncompatibleMetastoreException(
           "Metastore doesn't support listPartitionsByExpr: " + te.getMessage());
     }
+    r.setPartitions(filterHook.filterPartitions(r.getPartitions()));
     // TODO: in these methods, do we really need to deepcopy?
     deepCopyPartitions(r.getPartitions(), result);
     return !r.isSetHasUnknownPartitions() || r.isHasUnknownPartitions(); // Assume the worst.
@@ -1040,7 +1070,7 @@ public class HiveMetaStoreClient impleme
   @Override
   public Database getDatabase(String name) throws NoSuchObjectException,
       MetaException, TException {
-    return deepCopy(client.get_database(name));
+    return deepCopy(filterHook.filterDatabase(client.get_database(name)));
   }
 
   /**
@@ -1056,13 +1086,15 @@ public class HiveMetaStoreClient impleme
   @Override
   public Partition getPartition(String db_name, String tbl_name,
       List<String> part_vals) throws NoSuchObjectException, MetaException, TException {
-    return deepCopy(client.get_partition(db_name, tbl_name, part_vals));
+    return deepCopy(filterHook.filterPartition(
+        client.get_partition(db_name, tbl_name, part_vals)));
   }
 
   @Override
   public List<Partition> getPartitionsByNames(String db_name, String tbl_name,
       List<String> part_names) throws NoSuchObjectException, MetaException, TException {
-    return deepCopyPartitions(client.get_partitions_by_names(db_name, tbl_name, part_names));
+    return deepCopyPartitions(filterHook.filterPartitions(
+        client.get_partitions_by_names(db_name, tbl_name, part_names)));
   }
 
   @Override
@@ -1070,8 +1102,8 @@ public class HiveMetaStoreClient impleme
       List<String> part_vals, String user_name, List<String> group_names)
       throws MetaException, UnknownTableException, NoSuchObjectException,
       TException {
-    return deepCopy(client.get_partition_with_auth(db_name, tbl_name, part_vals, user_name,
-        group_names));
+    return deepCopy(filterHook.filterPartition(client.get_partition_with_auth(db_name,
+        tbl_name, part_vals, user_name, group_names)));
   }
 
   /**
@@ -1088,7 +1120,7 @@ public class HiveMetaStoreClient impleme
   @Override
   public Table getTable(String dbname, String name) throws MetaException,
       TException, NoSuchObjectException {
-    return deepCopy(client.get_table(dbname, name));
+    return deepCopy(filterHook.filterTable(client.get_table(dbname, name)));
   }
 
   /** {@inheritDoc} */
@@ -1096,21 +1128,23 @@ public class HiveMetaStoreClient impleme
   @Deprecated
   public Table getTable(String tableName) throws MetaException, TException,
       NoSuchObjectException {
-    return getTable(DEFAULT_DATABASE_NAME, tableName);
+    return filterHook.filterTable(getTable(DEFAULT_DATABASE_NAME, tableName));
   }
 
   /** {@inheritDoc} */
   @Override
   public List<Table> getTableObjectsByName(String dbName, List<String> tableNames)
       throws MetaException, InvalidOperationException, UnknownDBException, TException {
-    return deepCopyTables(client.get_table_objects_by_name(dbName, tableNames));
+    return deepCopyTables(filterHook.filterTables(
+        client.get_table_objects_by_name(dbName, tableNames)));
   }
 
   /** {@inheritDoc} */
   @Override
   public List<String> listTableNamesByFilter(String dbName, String filter, short maxTables)
       throws MetaException, TException, InvalidOperationException, UnknownDBException {
-    return client.get_table_names_by_filter(dbName, filter, maxTables);
+    return filterHook.filterTableNames(dbName,
+        client.get_table_names_by_filter(dbName, filter, maxTables));
   }
 
   /**
@@ -1129,7 +1163,7 @@ public class HiveMetaStoreClient impleme
   @Override
   public List<String> getTables(String dbname, String tablePattern) throws MetaException {
     try {
-      return client.get_tables(dbname, tablePattern);
+      return filterHook.filterTableNames(dbname, client.get_tables(dbname, tablePattern));
     } catch (Exception e) {
       MetaStoreUtils.logAndThrowMetaException(e);
     }
@@ -1140,7 +1174,7 @@ public class HiveMetaStoreClient impleme
   @Override
   public List<String> getAllTables(String dbname) throws MetaException {
     try {
-      return client.get_all_tables(dbname);
+      return filterHook.filterTableNames(dbname, client.get_all_tables(dbname));
     } catch (Exception e) {
       MetaStoreUtils.logAndThrowMetaException(e);
     }
@@ -1151,11 +1185,10 @@ public class HiveMetaStoreClient impleme
   public boolean tableExists(String databaseName, String tableName) throws MetaException,
       TException, UnknownDBException {
     try {
-      client.get_table(databaseName, tableName);
+      return filterHook.filterTable(client.get_table(databaseName, tableName)) == null;
     } catch (NoSuchObjectException e) {
       return false;
     }
-    return true;
   }
 
   /** {@inheritDoc} */
@@ -1169,14 +1202,16 @@ public class HiveMetaStoreClient impleme
   @Override
   public List<String> listPartitionNames(String dbName, String tblName,
       short max) throws MetaException, TException {
-    return client.get_partition_names(dbName, tblName, max);
+    return filterHook.filterPartitionNames(dbName, tblName, 
+        client.get_partition_names(dbName, tblName, max));
   }
 
   @Override
   public List<String> listPartitionNames(String db_name, String tbl_name,
       List<String> part_vals, short max_parts)
       throws MetaException, TException, NoSuchObjectException {
-    return client.get_partition_names_ps(db_name, tbl_name, part_vals, max_parts);
+    return filterHook.filterPartitionNames(db_name, tbl_name, 
+        client.get_partition_names_ps(db_name, tbl_name, part_vals, max_parts));
   }
 
   @Override
@@ -1259,7 +1294,7 @@ public class HiveMetaStoreClient impleme
   public Index getIndex(String dbName, String tblName, String indexName)
       throws MetaException, UnknownTableException, NoSuchObjectException,
       TException {
-    return deepCopy(client.get_index_by_name(dbName, tblName, indexName));
+    return deepCopy(filterHook.filterIndex(client.get_index_by_name(dbName, tblName, indexName)));
   }
 
   /**
@@ -1275,7 +1310,7 @@ public class HiveMetaStoreClient impleme
   @Override
   public List<String> listIndexNames(String dbName, String tblName, short max)
       throws MetaException, TException {
-    return client.get_index_names(dbName, tblName, max);
+    return filterHook.filterIndexNames(dbName, tblName, client.get_index_names(dbName, tblName, max));
   }
 
   /**
@@ -1291,7 +1326,7 @@ public class HiveMetaStoreClient impleme
   @Override
   public List<Index> listIndexes(String dbName, String tblName, short max)
       throws NoSuchObjectException, MetaException, TException {
-    return client.get_indexes(dbName, tblName, max);
+    return filterHook.filterIndexes(client.get_indexes(dbName, tblName, max));
   }
 
   /** {@inheritDoc} */
@@ -1380,7 +1415,7 @@ public class HiveMetaStoreClient impleme
   @Override
   public Partition getPartition(String db, String tableName, String partName)
       throws MetaException, TException, UnknownTableException, NoSuchObjectException {
-    return deepCopy(client.get_partition_by_name(db, tableName, partName));
+    return deepCopy(filterHook.filterPartition(client.get_partition_by_name(db, tableName, partName)));
   }
 
   public Partition appendPartitionByName(String dbName, String tableName, String partName)

Added: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java?rev=1638004&view=auto
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java (added)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java Tue Nov 11 00:56:43 2014
@@ -0,0 +1,131 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
+import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Index;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.PartitionSpec;
+import org.apache.hadoop.hive.metastore.api.Table;
+
+/**
+ * Metadata filter hook for metastore client. This will be useful for authorization
+ * plugins on hiveserver2 to filter metadata results, especially in case of
+ * non-impersonation mode where the metastore doesn't know the end user's identity.
+ */
+@LimitedPrivate(value = { "Apache Sentry (Incubating)" })
+@Evolving
+public interface MetaStoreFilterHook {
+
+  /**
+   * Filter given list of databases
+   * @param dbList
+   * @return List of filtered Db names
+   */
+  public List<String> filterDatabases(List<String> dbList);
+
+  /**
+   * filter to given database object if applicable
+   * @param dataBase
+   * @return the same database if it's not filtered out
+   * @throws NoSuchObjectException
+   */
+  public Database filterDatabase(Database dataBase) throws NoSuchObjectException;
+
+  /**
+   * Filter given list of tables
+   * @param dbName
+   * @param tableList
+   * @returnList of filtered table names
+   */
+  public List<String> filterTableNames(String dbName, List<String> tableList);
+
+  /**
+   * filter to given table object if applicable
+   * @param table
+   * @return the same table if it's not filtered out
+   * @throws NoSuchObjectException
+   */
+  public Table filterTable(Table table) throws NoSuchObjectException;
+
+  /**
+   * Filter given list of tables
+   * @param dbName
+   * @param tableList
+   * @returnList of filtered table names
+   */
+  public List<Table> filterTables(List<Table> tableList);
+
+  /**
+   * Filter given list of partitions
+   * @param partitionList
+   * @return
+   */
+  public List<Partition> filterPartitions(List<Partition> partitionList);
+
+  /**
+   * Filter given list of partition specs
+   * @param partitionSpecList
+   * @return
+   */
+  public List<PartitionSpec> filterPartitionSpecs(List<PartitionSpec> partitionSpecList);
+
+  /**
+   * filter to given partition object if applicable
+   * @param partition
+   * @return the same partition object if it's not filtered out
+   * @throws NoSuchObjectException
+   */
+  public Partition filterPartition(Partition partition) throws NoSuchObjectException;
+
+  /**
+   * Filter given list of partition names
+   * @param dbName
+   * @param tblName
+   * @param partitionNames
+   * @return
+   */
+  public List<String> filterPartitionNames(String dbName, String tblName,
+      List<String> partitionNames);
+
+  public Index filterIndex(Index index) throws NoSuchObjectException;
+
+  /**
+   * Filter given list of index names
+   * @param dbName
+   * @param tblName
+   * @param indexList
+   * @return
+   */
+  public List<String> filterIndexNames(String dbName, String tblName,
+      List<String> indexList);
+
+  /**
+   * Filter given list of index objects
+   * @param indexeList
+   * @return
+   */
+  public List<Index> filterIndexes(List<Index> indexeList);
+}
+