You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sentry.apache.org by sp...@apache.org on 2018/05/31 03:32:43 UTC

[65/86] sentry git commit: SENTRY-2208: Refactor out Sentry service into own module from sentry-provider-db (Anthony Young-Garner, reviewed by Sergio Pena, Steve Moist, Na Li)

http://git-wip-us.apache.org/repos/asf/sentry/blob/7db84b2f/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateInitializer.java
----------------------------------------------------------------------
diff --git a/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateInitializer.java b/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateInitializer.java
deleted file mode 100644
index 589acbe..0000000
--- a/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateInitializer.java
+++ /dev/null
@@ -1,346 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.service.thrift;
-
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.thrift.TException;
-import org.junit.Assert;
-import org.junit.Test;
-import org.mockito.Mockito;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-public class TestFullUpdateInitializer {
-
-  private static Configuration conf = new Configuration();
-
-  static {
-    conf.setInt(org.apache.sentry.hdfs.ServiceConstants.ServerConfig
-            .SENTRY_HDFS_SYNC_METASTORE_CACHE_MAX_PART_PER_RPC, 1);
-    conf.setInt(org.apache.sentry.hdfs.ServiceConstants.ServerConfig
-            .SENTRY_HDFS_SYNC_METASTORE_CACHE_MAX_TABLES_PER_RPC, 1);
-    conf.setInt(org.apache.sentry.hdfs.ServiceConstants.ServerConfig
-            .SENTRY_HDFS_SYNC_METASTORE_CACHE_INIT_THREADS, 8);
-  }
-
-  /**
-   * Representation of a Hive table. A table has a name and a list of partitions.
-   */
-  private static class HiveTable {
-    String name;
-    List<String> partitions;
-
-    HiveTable(String name) {
-      this.name = name;
-      this.partitions = new ArrayList<>();
-    }
-
-    HiveTable(String name, List<String> partitions) {
-      this.name = name;
-      this.partitions = partitions;
-      if (this.partitions == null) {
-        this.partitions = new ArrayList<>();
-      }
-    }
-
-    HiveTable add(String partition) {
-      partitions.add(partition);
-      return this;
-    }
-  }
-
-  /**
-   * Representation of a Hive database. A database has a name and a list of tables
-   */
-  private static class HiveDb {
-    String name;
-    Collection<HiveTable> tables;
-
-    HiveDb(String name) {
-      this.name = name;
-      tables = new ArrayList<>();
-    }
-
-    HiveDb(String name, Collection<HiveTable> tables) {
-      this.name = name;
-      this.tables = tables;
-      if (this.tables == null) {
-        this.tables = new ArrayList<>();
-      }
-    }
-
-    void add(HiveTable table) {
-      this.tables.add(table);
-    }
-  }
-
-  /**
-   * Representation of a full Hive snapshot. A snapshot is collection of databases
-   */
-  private static class HiveSnapshot {
-    List<HiveDb> databases = new ArrayList<>();
-
-    HiveSnapshot() {
-    }
-
-    HiveSnapshot(Collection<HiveDb> dblist) {
-      if (dblist != null) {
-        databases.addAll(dblist);
-      }
-    }
-
-    HiveSnapshot add(HiveDb db) {
-      this.databases.add(db);
-      return this;
-    }
-  }
-
-  /**
-   * Convert Hive snapshot to mock client that will return proper values
-   * for the snapshot.
-   */
-  private static class MockClient {
-    HiveMetaStoreClient client;
-
-    MockClient(HiveSnapshot snapshot) throws TException {
-      client = Mockito.mock(HiveMetaStoreClient.class);
-      List<String> dbNames = new ArrayList<>(snapshot.databases.size());
-      // Walk over all databases and mock appropriate objects
-      for (HiveDb mdb: snapshot.databases) {
-        String dbName = mdb.name;
-        dbNames.add(dbName);
-        Database db = makeDb(dbName);
-        Mockito.when(client.getDatabase(dbName)).thenReturn(db);
-        List<String> tableNames = new ArrayList<>(mdb.tables.size());
-        // Walk over all tables for the database and mock appropriate objects
-        for (HiveTable table: mdb.tables) {
-          String tableName = table.name;
-          tableNames.add(tableName);
-          Table mockTable = makeTable(dbName, tableName);
-          Mockito.when(client.getTableObjectsByName(dbName,
-                  Lists.newArrayList(tableName)))
-                  .thenReturn(Lists.newArrayList(mockTable));
-          Mockito.when(client.listPartitionNames(dbName, tableName, (short) -1))
-                  .thenReturn(table.partitions);
-          // Walk across all partitions and mock appropriate objects
-          for (String partName: table.partitions) {
-            Partition p = makePartition(dbName, tableName, partName);
-            Mockito.when(client.getPartitionsByNames(dbName, tableName,
-                    Lists.<String>newArrayList(partName)))
-                    .thenReturn(Lists.<Partition>newArrayList(p));
-          }
-        }
-        Mockito.when(client.getAllTables(dbName)).thenReturn(tableNames);
-      }
-      // Return all database names
-      Mockito.when(client.getAllDatabases()).thenReturn(dbNames);
-    }
-  }
-
-  private static class MockHMSClientFactory implements HiveConnectionFactory {
-
-    private final HiveMetaStoreClient mClient;
-
-    private MockHMSClientFactory(MockClient mClient) {
-      this.mClient = mClient.client;
-    }
-
-    private MockHMSClientFactory(HiveMetaStoreClient client) {
-      this.mClient = client;
-    }
-
-    @Override
-    public HMSClient connect() throws IOException, InterruptedException, MetaException {
-      return new HMSClient(mClient);
-    }
-
-    @Override
-    public void close() throws Exception {
-    }
-  }
-
-  /**
-   * Create mock database with the given name
-   * @param name Database name
-   * @return Mock database object
-   */
-  private static Database makeDb(String name) {
-    Database db = Mockito.mock(Database.class);
-    Mockito.when(db.getName()).thenReturn(name);
-    Mockito.when(db.getLocationUri()).thenReturn("hdfs:///" + name);
-    return db;
-  }
-
-  /**
-   * Create mock table
-   * @param dbName db for this table
-   * @param tableName name of the table
-   * @return mock table object
-   */
-  private static Table makeTable(String dbName, String tableName) {
-    Table table = Mockito.mock(Table.class);
-    Mockito.when(table.getDbName()).thenReturn(dbName);
-    Mockito.when(table.getTableName()).thenReturn(tableName);
-    StorageDescriptor sd = Mockito.mock(StorageDescriptor.class);
-    Mockito.when(sd.getLocation()).thenReturn(
-            String.format("hdfs:///%s/%s", dbName, tableName));
-    Mockito.when(table.getSd()).thenReturn(sd);
-    return table;
-  }
-
-  /**
-   * Create mock partition
-   * @param dbName database for this partition
-   * @param tableName table for this partition
-   * @param partName partition name
-   * @return mock partition object
-   */
-  private static Partition makePartition(String dbName, String tableName, String partName) {
-    Partition partition = Mockito.mock(Partition.class);
-    StorageDescriptor sd = Mockito.mock(StorageDescriptor.class);
-    Mockito.when(sd.getLocation()).thenReturn(
-            String.format("hdfs:///%s/%s/%s", dbName, tableName, partName));
-    Mockito.when(partition.getSd()).thenReturn(sd);
-    return partition;
-  }
-
-  @Test
-  // Test basic operation with small database
-  public void testSimple() throws Exception {
-    HiveTable tab21 = new HiveTable("tab21");
-    HiveTable tab31 = new HiveTable("tab31").add("part311").add("part312");
-    HiveDb db3 = new HiveDb("db3", Lists.newArrayList(tab31));
-    HiveDb db2 = new HiveDb("db2", Lists.newArrayList(tab21));
-    HiveDb db1 = new HiveDb("db1");
-    HiveSnapshot snap = new HiveSnapshot().add(db1).add(db2).add(db3);
-    MockClient c = new MockClient(snap);
-
-    Map<String, Collection<String>> update;
-    try(FullUpdateInitializer cacheInitializer =
-                new FullUpdateInitializer(new MockHMSClientFactory(c), conf)) {
-      update = cacheInitializer.getFullHMSSnapshot();
-    }
-    Assert.assertEquals(5, update.size());
-    Assert.assertEquals(Sets.newHashSet("db1"), update.get("db1"));
-    Assert.assertEquals(Sets.newHashSet("db2"), update.get("db2"));
-    Assert.assertEquals(Sets.newHashSet("db3"), update.get("db3"));
-    Assert.assertEquals(Sets.newHashSet("db2/tab21"), update.get("db2.tab21"));
-    Assert.assertEquals(Sets.newHashSet("db3/tab31",
-            "db3/tab31/part311", "db3/tab31/part312"), update.get("db3.tab31"));
-  }
-
-  @Test
-  // Test that invalid paths are handled correctly
-  public void testInvalidPaths() throws Exception {
-    //Set up mocks: db1.tb1, with tb1 returning a wrong dbname (db2)
-    Database db1 = makeDb("db1");
-
-    Table tab1 = Mockito.mock(Table.class);
-    //Return a wrong db name, so that this triggers an exception
-    Mockito.when(tab1.getDbName()).thenReturn("db2");
-    Mockito.when(tab1.getTableName()).thenReturn("tab1");
-
-    HiveMetaStoreClient client = Mockito.mock(HiveMetaStoreClient.class);
-    Mockito.when(client.getAllDatabases()).thenReturn(Lists.newArrayList("db1"));
-    Mockito.when(client.getDatabase("db1")).thenReturn(db1);
-
-    Table tab12 = Mockito.mock(Table.class);
-    Mockito.when(tab12.getDbName()).thenReturn("db1");
-    Mockito.when(tab12.getTableName()).thenReturn("tab21");
-    StorageDescriptor sd21 = Mockito.mock(StorageDescriptor.class);
-    Mockito.when(sd21.getLocation()).thenReturn("hdfs:///db1/tab21");
-    Mockito.when(tab12.getSd()).thenReturn(sd21);
-
-    Mockito.when(client.getTableObjectsByName("db1",
-            Lists.newArrayList("tab1"))).thenReturn(Lists.newArrayList(tab1));
-    Mockito.when(client.getTableObjectsByName("db1",
-            Lists.newArrayList("tab12"))).thenReturn(Lists.newArrayList(tab12));
-    Mockito.when(client.getAllTables("db1")).
-            thenReturn(Lists.newArrayList("tab1", "tab12"));
-
-
-    Map<String, Collection<String>> update;
-    try(FullUpdateInitializer cacheInitializer =
-                new FullUpdateInitializer(new MockHMSClientFactory(client), conf)) {
-      update = cacheInitializer.getFullHMSSnapshot();
-    }
-    Assert.assertEquals(2, update.size());
-    Assert.assertEquals(Sets.newHashSet("db1"), update.get("db1"));
-    Assert.assertEquals(Sets.newHashSet("db1/tab21"), update.get("db1.tab21"));
-  }
-
-  @Test
-  // Test handling of a big tables and partitions
-  public void testBig() throws Exception {
-    int ndbs = 3;
-    int ntables = 51;
-    int nparts = 131;
-
-    HiveSnapshot snap = new HiveSnapshot();
-
-    for (int i = 0; i < ndbs; i++) {
-      HiveDb db = new HiveDb("db" + i);
-      for (int j = 0; j < ntables; j++) {
-        HiveTable table = new HiveTable("table" + i + j);
-        for (int k = 0; k < nparts; k++) {
-          table.add("part" + i + j + k);
-        }
-        db.add(table);
-      }
-      snap.add(db);
-    }
-    MockClient c = new MockClient(snap);
-    Map<String, Collection<String>> update;
-    try(FullUpdateInitializer cacheInitializer =
-                new FullUpdateInitializer(new MockHMSClientFactory(c), conf)) {
-      update = cacheInitializer.getFullHMSSnapshot();
-    }
-    Assert.assertEquals((ntables * ndbs) + ndbs, update.size());
-    for (int i = 0; i < ndbs; i++) {
-      String dbName = "db" + i;
-      Assert.assertEquals(Sets.newHashSet(dbName), update.get(dbName));
-
-      for (int j = 0; j < ntables; j++) {
-        String tableName = "table" + i + j;
-        Set<String> values = new HashSet<>();
-        values.add(String.format("%s/%s", dbName, tableName));
-        for (int k = 0; k < nparts; k++) {
-          String partName = "part" + i + j + k;
-          values.add(String.format("%s/%s/%s", dbName, tableName, partName));
-        }
-        String authz = dbName + "." + tableName;
-        Assert.assertEquals(values, update.get(authz));
-      }
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7db84b2f/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateModifier.java
----------------------------------------------------------------------
diff --git a/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateModifier.java b/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateModifier.java
deleted file mode 100644
index c6be80d..0000000
--- a/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateModifier.java
+++ /dev/null
@@ -1,482 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.service.thrift;
-
-import org.apache.hadoop.hive.metastore.api.NotificationEvent;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.messaging.MessageDeserializer;
-import org.apache.sentry.binding.metastore.messaging.json.SentryJSONAddPartitionMessage;
-import org.apache.sentry.binding.metastore.messaging.json.SentryJSONAlterPartitionMessage;
-import org.apache.sentry.binding.metastore.messaging.json.SentryJSONAlterTableMessage;
-import org.apache.sentry.binding.metastore.messaging.json.SentryJSONCreateDatabaseMessage;
-import org.apache.sentry.binding.metastore.messaging.json.SentryJSONCreateTableMessage;
-import org.apache.sentry.binding.metastore.messaging.json.SentryJSONDropDatabaseMessage;
-import org.apache.sentry.binding.metastore.messaging.json.SentryJSONDropPartitionMessage;
-import org.apache.sentry.binding.metastore.messaging.json.SentryJSONDropTableMessage;
-import org.apache.sentry.binding.metastore.messaging.json.SentryJSONMessageDeserializer;
-import org.junit.Test;
-import org.mockito.Mockito;
-
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-import static org.apache.hadoop.hive.metastore.messaging.EventMessage.EventType.*;
-import static org.junit.Assert.*;
-
-public class TestFullUpdateModifier {
-  private static final String SERVER = "s";
-  private static final String PRINCIPAL = "p";
-  private static final String DB = "Db1";
-  private static final String TABLE = "Tab1";
-  private static final String AUTH = DB.toLowerCase() + "." + TABLE.toLowerCase();
-  private static final String PATH = "foo/bar";
-  private static final String LOCATION = uri(PATH);
-
-  private static final Table TABLE_OBJ = new Table(TABLE, DB, "", 0, 0, 0,
-      buildStorageDescriptor(LOCATION), null, null, "", "", "");
-
-  /**
-   * Convert path to HDFS URI
-   */
-  private static final String uri(String path) {
-    return "hdfs:///" + path;
-  }
-
-  /**
-   * Creates a StorageDescriptor using the location as parameter.
-   *
-   * @param location The location string for the StorageDescriptor
-   * @return A StorageDescriptor object
-   */
-  private static StorageDescriptor buildStorageDescriptor(String location) {
-    return new StorageDescriptor(null, location, "", "", false, 0, null, null, null, null);
-  }
-
-  /**
-   * Creates a Table object using the db name, table name and table location as parameters.
-   *
-   * @param dbName The database name string.
-   * @param tableName The table name string.
-   * @param location The table location string.
-   * @return A Table object
-   */
-  private static Table buildTable(String dbName, String tableName, String location) {
-    return new Table(tableName, dbName, "", 0, 0, 0,
-        buildStorageDescriptor(location), null, null, "", "", "");
-  }
-
-  /**
-   * Test create database event. It should add database and its location.
-   * As a result we should have entry {"db1": {foo/bar}}
-   * @throws Exception
-   */
-  @Test
-  public void testCreateDatabase() throws Exception {
-    Map<String, Collection<String>> update = new HashMap<>();
-    NotificationEvent event = new NotificationEvent(0, 0, CREATE_DATABASE.toString(), "");
-    MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class);
-
-    SentryJSONCreateDatabaseMessage message =
-            new SentryJSONCreateDatabaseMessage(SERVER, PRINCIPAL, DB, 0L, LOCATION);
-    Mockito.when(deserializer.getCreateDatabaseMessage("")).thenReturn(message);
-    FullUpdateModifier.applyEvent(update, event, deserializer);
-    Map<String, Set<String>> expected = new HashMap<>();
-    expected.put(DB.toLowerCase(), Collections.singleton(PATH));
-    assertEquals(expected, update);
-  }
-
-  /**
-   * Test drop database event. It should drop database record.
-   * @throws Exception
-   */
-  @Test
-  public void testDropDatabase() throws Exception {
-    Map<String, Collection<String>> update = new HashMap<>();
-    update.put(DB.toLowerCase(), Collections.singleton(PATH));
-    NotificationEvent event = new NotificationEvent(0, 0, DROP_DATABASE.toString(), "");
-    MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class);
-
-    SentryJSONDropDatabaseMessage message =
-            new SentryJSONDropDatabaseMessage(SERVER, PRINCIPAL, DB, 0L, LOCATION);
-    Mockito.when(deserializer.getDropDatabaseMessage("")).thenReturn(message);
-    FullUpdateModifier.applyEvent(update, event, deserializer);
-    assertTrue(update.isEmpty());
-  }
-
-  /**
-   * Test drop database event when dropped database location doesn't
-   * match original database location. Should leave update intact.
-   * @throws Exception
-   */
-  @Test
-  public void testDropDatabaseWrongLocation() throws Exception {
-    Map<String, Collection<String>> update = new HashMap<>();
-    update.put(DB.toLowerCase(), Collections.singleton(PATH));
-
-    NotificationEvent event = new NotificationEvent(0, 0, DROP_DATABASE.toString(), "");
-    MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class);
-
-    SentryJSONDropDatabaseMessage message =
-            new SentryJSONDropDatabaseMessage(SERVER, PRINCIPAL, DB, 0L,
-                    "hdfs:///bad/location");
-    Mockito.when(deserializer.getDropDatabaseMessage("")).thenReturn(message);
-    FullUpdateModifier.applyEvent(update, event, deserializer);
-    // DB should stay
-    Map<String, Set<String>> expected = new HashMap<>();
-    expected.put(DB.toLowerCase(), Collections.singleton(PATH));
-    assertEquals(expected, update);
-  }
-
-  /**
-   * Test drop database which has tables/partitions.
-   * Should drop all reated database records but leave unrelated records in place.
-   * @throws Exception
-   */
-  @Test
-  public void testDropDatabaseWithTables() throws Exception {
-    Map<String, Collection<String>> update = new HashMap<>();
-    update.put(DB.toLowerCase(), Collections.singleton(PATH));
-    update.put(AUTH, Collections.singleton(PATH));
-    update.put("unrelated", Collections.singleton(PATH));
-    NotificationEvent event = new NotificationEvent(0, 0, DROP_DATABASE.toString(), "");
-    MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class);
-
-    SentryJSONDropDatabaseMessage message =
-            new SentryJSONDropDatabaseMessage(SERVER, PRINCIPAL, DB, 0L, LOCATION);
-    Mockito.when(deserializer.getDropDatabaseMessage("")).thenReturn(message);
-    FullUpdateModifier.applyEvent(update, event, deserializer);
-    Map<String, Set<String>> expected = new HashMap<>();
-    expected.put("unrelated", Collections.singleton(PATH));
-    assertEquals(expected, update);
-  }
-
-  /**
-   * Test create table event. It should add table and its location.
-   * As a result we should have entry {"db1.tab1": {foo/bar}}
-   * @throws Exception
-   */
-  @Test
-  public void testCreateTable() throws Exception {
-    Map<String, Collection<String>> update = new HashMap<>();
-    NotificationEvent event = new NotificationEvent(0, 0, CREATE_TABLE.toString(), "");
-    MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class);
-
-    SentryJSONCreateTableMessage message =
-            new SentryJSONCreateTableMessage(SERVER, PRINCIPAL, TABLE_OBJ, Collections.emptyIterator(), 0L);
-    Mockito.when(deserializer.getCreateTableMessage("")).thenReturn(message);
-    FullUpdateModifier.applyEvent(update, event, deserializer);
-    Map<String, Set<String>> expected = new HashMap<>();
-    expected.put(AUTH, Collections.singleton(PATH));
-    assertEquals(expected, update);
-  }
-
-  /**
-   * Test drop table event. It should drop table record.
-   * @throws Exception
-   */
-  @Test
-  public void testDropTable() throws Exception {
-    Map<String, Collection<String>> update = new HashMap<>();
-    update.put(AUTH, Collections.singleton(PATH));
-    NotificationEvent event = new NotificationEvent(0, 0, DROP_TABLE.toString(), "");
-    MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class);
-
-    SentryJSONDropTableMessage message =
-            new SentryJSONDropTableMessage(SERVER, PRINCIPAL, DB, TABLE, 0L, LOCATION);
-    Mockito.when(deserializer.getDropTableMessage("")).thenReturn(message);
-    FullUpdateModifier.applyEvent(update, event, deserializer);
-    assertTrue(update.isEmpty());
-  }
-
-  /**
-   * Test drop table event. It should drop table record.
-   * @throws Exception
-   */
-  @Test
-  public void testDropTableWrongLocation() throws Exception {
-    Map<String, Collection<String>> update = new HashMap<>();
-    update.put(AUTH, Collections.singleton(PATH));
-    NotificationEvent event = new NotificationEvent(0, 0, DROP_TABLE.toString(), "");
-    MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class);
-
-    SentryJSONDropTableMessage message =
-            new SentryJSONDropTableMessage(SERVER, PRINCIPAL, DB, TABLE, 0L,
-                    "hdfs:///bad/location");
-    Mockito.when(deserializer.getDropTableMessage("")).thenReturn(message);
-    FullUpdateModifier.applyEvent(update, event, deserializer);
-    // DB should stay
-    assertEquals(Collections.singleton(PATH), update.get(AUTH));
-    assertEquals(1, update.size());
-  }
-
-  /**
-   * Test add partition event. It should add table and its location.
-   * As a result we should have entry {"db1.tab1": {foo/bar, hello/world}}
-   * @throws Exception
-   */
-  @Test
-  public void testAddPartition() throws Exception {
-    Map<String, Collection<String>> update = new HashMap<>();
-    Set<String> locations = new HashSet<>();
-    locations.add(PATH);
-    update.put(AUTH, locations);
-
-    NotificationEvent event = new NotificationEvent(0, 0, ADD_PARTITION.toString(), "");
-    MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class);
-
-    String partPath = "hello/world";
-    String partLocation = uri(partPath);
-
-    SentryJSONAddPartitionMessage message =
-            new SentryJSONAddPartitionMessage(SERVER, PRINCIPAL, TABLE_OBJ,
-                Collections.emptyIterator(), Collections.emptyIterator(),
-                0L, Collections.singletonList(partLocation));
-    Mockito.when(deserializer.getAddPartitionMessage("")).thenReturn(message);
-    FullUpdateModifier.applyEvent(update, event, deserializer);
-    Set<String> expected = new HashSet<>(2);
-    expected.add(PATH);
-    expected.add(partPath);
-    assertEquals(expected, update.get(AUTH));
-  }
-
-  /**
-   * Test drop partition event. It should drop partition info from the list of locations.
-   * @throws Exception
-   */
-  @Test
-  public void testDropPartitions() throws Exception {
-    String partPath = "hello/world";
-    String partLocation = uri(partPath);
-    Map<String, Collection<String>> update = new HashMap<>();
-    Set<String> locations = new HashSet<>();
-    locations.add(PATH);
-    locations.add(partPath);
-    update.put(AUTH, locations);
-
-    NotificationEvent event = new NotificationEvent(0, 0, DROP_PARTITION.toString(), "");
-    MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class);
-
-    SentryJSONDropPartitionMessage message =
-            new SentryJSONDropPartitionMessage(SERVER, PRINCIPAL, TABLE_OBJ,
-                    Collections.<Map<String,String>>emptyList(), 0L, Collections.singletonList(partLocation));
-    Mockito.when(deserializer.getDropPartitionMessage("")).thenReturn(message);
-    FullUpdateModifier.applyEvent(update, event, deserializer);
-    assertEquals(Collections.singleton(PATH), update.get(AUTH));
-  }
-
-  /**
-   * Test alter partition event. It should change partition location
-   * @throws Exception
-   */
-  @Test
-  public void testAlterPartition() throws Exception {
-    String partPath = "hello/world";
-    String partLocation = uri(partPath);
-
-    String newPath = "better/world";
-    String newLocation = uri(newPath);
-
-    Map<String, Collection<String>> update = new HashMap<>();
-    Set<String> locations = new HashSet<>();
-    locations.add(PATH);
-    locations.add(partPath);
-    update.put(AUTH, locations);
-
-    NotificationEvent event = new NotificationEvent(0, 0, ALTER_PARTITION.toString(), "");
-    MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class);
-
-    Partition partitionObjBefore = new Partition(null, DB, TABLE, 0, 0, buildStorageDescriptor(partLocation), null);
-    Partition partitionObjAfter = new Partition(null, DB, TABLE, 0, 0, buildStorageDescriptor(newLocation), null);
-
-    SentryJSONAlterPartitionMessage message =
-            new SentryJSONAlterPartitionMessage(SERVER, PRINCIPAL, TABLE_OBJ,
-                    partitionObjBefore, partitionObjAfter, 0L);
-
-    Mockito.when(deserializer.getAlterPartitionMessage("")).thenReturn(message);
-    FullUpdateModifier.applyEvent(update, event, deserializer);
-
-    Set<String> expected = new HashSet<>(2);
-    expected.add(PATH);
-    expected.add(newPath);
-    assertEquals(expected, update.get(AUTH));
-  }
-
-  /**
-   * Test alter table  event that changes database name when there are no tables.
-   * @throws Exception
-   */
-  @Test
-  public void testAlterTableChangeDbNameNoTables() throws Exception {
-    Map<String, Collection<String>> update = new HashMap<>();
-    update.put(DB.toLowerCase(), Collections.singleton(PATH));
-    String newDbName = "Db2";
-
-    NotificationEvent event = new NotificationEvent(0, 0, ALTER_TABLE.toString(), "");
-    event.setDbName(newDbName);
-    event.setTableName(TABLE);
-
-    MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class);
-
-    SentryJSONAlterTableMessage message =
-            new SentryJSONAlterTableMessage(SERVER, PRINCIPAL, TABLE_OBJ, TABLE_OBJ, 0L);
-
-    Mockito.when(deserializer.getAlterTableMessage("")).thenReturn(message);
-    FullUpdateModifier.applyEvent(update, event, deserializer);
-    assertEquals(Collections.singleton(PATH), update.get(newDbName.toLowerCase()));
-    assertFalse(update.containsKey(DB.toLowerCase()));
-  }
-
-  @Test
-  /**
-   * Test alter table  event that changes database name when there are tables.
-   * All entries like "dbName.tableName" should have dbName changed to the new name.
-   * @throws Exception
-   */
-  public void testAlterTableChangeDbNameWithTables() throws Exception {
-    Map<String, Collection<String>> update = new HashMap<>();
-    update.put(DB.toLowerCase(), Collections.singleton(PATH));
-    Set<String> locations = new HashSet<>(1);
-    locations.add(PATH);
-    update.put(AUTH, locations);
-
-    String newDbName = "Db2";
-    String newAuth = newDbName.toLowerCase() + "." + TABLE.toLowerCase();
-
-    NotificationEvent event = new NotificationEvent(0, 0, ALTER_TABLE.toString(), "");
-    event.setDbName(newDbName);
-    event.setTableName(TABLE);
-
-    MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class);
-
-    SentryJSONAlterTableMessage message =
-            new SentryJSONAlterTableMessage(SERVER, PRINCIPAL, TABLE_OBJ, TABLE_OBJ, 0L);
-
-    Mockito.when(deserializer.getAlterTableMessage("")).thenReturn(message);
-    FullUpdateModifier.applyEvent(update, event, deserializer);
-    Map<String, Set<String>> expected = new HashMap<>(2);
-    expected.put(newDbName.toLowerCase(), Collections.singleton(PATH));
-    expected.put(newAuth, Collections.singleton(PATH));
-    assertEquals(expected, update);
-  }
-
-  /**
-   * Test alter table event that changes table name.
-   * @throws Exception
-   */
-  @Test
-  public void testAlterTableChangeTableName() throws Exception {
-    Map<String, Collection<String>> update = new HashMap<>();
-    update.put(DB.toLowerCase(), Collections.singleton(PATH));
-    Set<String> locations = new HashSet<>(1);
-    locations.add(PATH);
-    update.put(AUTH, locations);
-
-    String newTableName = "Table2";
-    String newAuth = DB.toLowerCase() + "." + newTableName.toLowerCase();
-
-    NotificationEvent event = new NotificationEvent(0, 0, ALTER_TABLE.toString(), "");
-    event.setDbName(DB);
-    event.setTableName(newTableName);
-
-    MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class);
-
-    SentryJSONAlterTableMessage message =
-            new SentryJSONAlterTableMessage(SERVER, PRINCIPAL, TABLE_OBJ, TABLE_OBJ, 0L);
-
-    Mockito.when(deserializer.getAlterTableMessage("")).thenReturn(message);
-    FullUpdateModifier.applyEvent(update, event, deserializer);
-    Map<String, Set<String>> expected = new HashMap<>(2);
-    expected.put(DB.toLowerCase(), Collections.singleton(PATH));
-    expected.put(newAuth, Collections.singleton(PATH));
-    assertEquals(expected, update);
-  }
-
-  /**
-   * Test alter table event that changes object location.
-   * @throws Exception
-   */
-  @Test
-  public void testAlterTableChangeLocation() throws Exception {
-    Map<String, Collection<String>> update = new HashMap<>();
-    update.put(DB.toLowerCase(), Collections.singleton(PATH));
-    Set<String> locations = new HashSet<>(1);
-    locations.add(PATH);
-    update.put(AUTH, locations);
-
-    NotificationEvent event = new NotificationEvent(0, 0, ALTER_TABLE.toString(), "");
-    event.setDbName(DB);
-    event.setTableName(TABLE);
-
-    String newPath = "hello/world";
-    String newLocation = uri(newPath);
-
-    MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class);
-
-    Table tableWithNewLocation = buildTable(DB, TABLE, newLocation);
-    SentryJSONAlterTableMessage message =
-            new SentryJSONAlterTableMessage(SERVER, PRINCIPAL, TABLE_OBJ, tableWithNewLocation, 0L);
-
-    Mockito.when(deserializer.getAlterTableMessage("")).thenReturn(message);
-    FullUpdateModifier.applyEvent(update, event, deserializer);
-    Map<String, Set<String>> expected = new HashMap<>(2);
-    expected.put(DB.toLowerCase(), Collections.singleton(PATH));
-    expected.put(AUTH.toLowerCase(), Collections.singleton(newPath));
-    assertEquals(expected, update);
-  }
-
-  /**
-   * Test renamePrefixKeys function.
-   * We ask to rename "foo.bar" key to "foo.baz" key.
-   * @throws Exception
-   */
-  @Test
-  public void testRenamePrefixKeys() throws Exception {
-    String oldKey = "foo.";
-    String newKey = "baz.";
-    String postfix = "bar";
-    Map<String, Collection<String>> update = new HashMap<>();
-    update.put(oldKey + postfix , Collections.<String>emptySet());
-    FullUpdateModifier.renamePrefixKeys(update, oldKey, newKey);
-    assertEquals(1, update.size());
-    assertTrue(update.containsKey(newKey + postfix));
-  }
-
-  /**
-   * Test renamePostfixKeys and RenamePrefixKeys functions mwhen the destination keys exist.
-   * Should nto change anything.
-   * We ask to rename "foo.bar" key to "baz.bar" key.
-   * @throws Exception
-   */
-  @Test
-  public void testRenameKeysWithConflicts() throws Exception {
-    Map<String, Collection<String>> update = new HashMap<>();
-    update.put("foo.bar", Collections.<String>emptySet());
-    update.put("baz.bar", Collections.<String>emptySet());
-    Map<String, Collection<String>> expected = new HashMap<>(update);
-
-    FullUpdateModifier.renamePrefixKeys(update, "foo.", "baz.");
-    assertEquals(update, expected);
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/sentry/blob/7db84b2f/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestHiveNotificationFetcher.java
----------------------------------------------------------------------
diff --git a/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestHiveNotificationFetcher.java b/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestHiveNotificationFetcher.java
deleted file mode 100644
index 83a1bec..0000000
--- a/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestHiveNotificationFetcher.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
-  Licensed to the Apache Software Foundation (ASF) under one
-  or more contributor license agreements.  See the NOTICE file
-  distributed with this work for additional information
-  regarding copyright ownership.  The ASF licenses this file
-  to you under the Apache License, Version 2.0 (the
-  "License"); you may not use this file except in compliance
-  with the License.  You may obtain a copy of the License at
-  <p>
-  http://www.apache.org/licenses/LICENSE-2.0
-  <p>
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
- */
-
-package org.apache.sentry.service.thrift;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.IMetaStoreClient.NotificationFilter;
-import org.apache.hadoop.hive.metastore.api.NotificationEvent;
-import org.apache.hadoop.hive.metastore.api.NotificationEventResponse;
-import org.apache.sentry.hdfs.UniquePathsUpdate;
-import org.apache.sentry.provider.db.service.persistent.SentryStore;
-import org.junit.Test;
-import org.mockito.Mockito;
-import org.mockito.invocation.InvocationOnMock;
-import org.mockito.stubbing.Answer;
-
-public class TestHiveNotificationFetcher {
-  @Test
-  public void testGetEmptyNotificationsWhenHmsReturnsANullResponse() throws Exception {
-    SentryStore store = Mockito.mock(SentryStore.class);
-    HiveConnectionFactory hmsConnection = Mockito.mock(HiveConnectionFactory.class);
-    HiveMetaStoreClient hmsClient = Mockito.mock(HiveMetaStoreClient.class);
-
-    Mockito.when(hmsConnection.connect()).thenReturn(new HMSClient(hmsClient));
-
-    try (HiveNotificationFetcher fetcher = new HiveNotificationFetcher(store, hmsConnection)) {
-      List<NotificationEvent> events;
-
-      Mockito.when(hmsClient.getNextNotification(0, Integer.MAX_VALUE, null))
-          .thenReturn(null);
-
-      events = fetcher.fetchNotifications(0);
-      assertTrue(events.isEmpty());
-    }
-  }
-
-  @Test
-  public void testGetEmptyNotificationsWhenHmsReturnsEmptyEvents() throws Exception {
-    SentryStore store = Mockito.mock(SentryStore.class);
-    HiveConnectionFactory hmsConnection = Mockito.mock(HiveConnectionFactory.class);
-    HiveMetaStoreClient hmsClient = Mockito.mock(HiveMetaStoreClient.class);
-
-    Mockito.when(hmsConnection.connect()).thenReturn(new HMSClient(hmsClient));
-
-    try (HiveNotificationFetcher fetcher = new HiveNotificationFetcher(store, hmsConnection)) {
-      List<NotificationEvent> events;
-
-      Mockito.when(hmsClient.getNextNotification(0, Integer.MAX_VALUE, null))
-          .thenReturn(new NotificationEventResponse(Collections.<NotificationEvent>emptyList()));
-
-      events = fetcher.fetchNotifications(0);
-      assertTrue(events.isEmpty());
-    }
-  }
-
-  @Test
-  public void testGetAllNotificationsReturnedByHms() throws Exception {
-    SentryStore store = Mockito.mock(SentryStore.class);
-    HiveConnectionFactory hmsConnection = Mockito.mock(HiveConnectionFactory.class);
-    HiveMetaStoreClient hmsClient = Mockito.mock(HiveMetaStoreClient.class);
-
-    Mockito.when(hmsConnection.connect()).thenReturn(new HMSClient(hmsClient));
-
-    try (HiveNotificationFetcher fetcher = new HiveNotificationFetcher(store, hmsConnection)) {
-      List<NotificationEvent> events;
-
-      Mockito.when(hmsClient.getNextNotification(0, Integer.MAX_VALUE, null))
-          .thenReturn(new NotificationEventResponse(
-              Arrays.<NotificationEvent>asList(
-                  new NotificationEvent(1L, 0, "CREATE_DATABASE", ""),
-                  new NotificationEvent(2L, 0, "CREATE_TABLE", "")
-              )
-          ));
-
-      events = fetcher.fetchNotifications(0);
-      assertEquals(2, events.size());
-      assertEquals(1, events.get(0).getEventId());
-      assertEquals("CREATE_DATABASE", events.get(0).getEventType());
-      assertEquals(2, events.get(1).getEventId());
-      assertEquals("CREATE_TABLE", events.get(1).getEventType());
-    }
-  }
-
-  @Test
-  public void testGetDuplicatedEventsAndFilterEventsAlreadySeen() throws Exception {
-    final SentryStore store = Mockito.mock(SentryStore.class);
-    HiveConnectionFactory hmsConnection = Mockito.mock(HiveConnectionFactory.class);
-    HiveMetaStoreClient hmsClient = Mockito.mock(HiveMetaStoreClient.class);
-
-    Mockito.when(hmsConnection.connect()).thenReturn(new HMSClient(hmsClient));
-
-    try (HiveNotificationFetcher fetcher = new HiveNotificationFetcher(store, hmsConnection)) {
-      List<NotificationEvent> events;
-
-      /*
-       * Requesting an ID > 0 will request all notifications from 0 again but filter those
-       * already seen notifications with ID = 1
-       */
-
-      // This mock will also test that the NotificationFilter works as expected
-      Mockito.when(hmsClient.getNextNotification(Mockito.eq(0L), Mockito.eq(Integer.MAX_VALUE),
-          (NotificationFilter) Mockito.notNull())).thenAnswer(new Answer<NotificationEventResponse>() {
-            @Override
-            public NotificationEventResponse answer(InvocationOnMock invocation)
-                throws Throwable {
-              NotificationFilter filter = (NotificationFilter) invocation.getArguments()[2];
-              NotificationEventResponse response = new NotificationEventResponse();
-
-              List<NotificationEvent> events = Arrays.<NotificationEvent>asList(
-                  new NotificationEvent(1L, 0, "CREATE_DATABASE", ""),
-                  new NotificationEvent(1L, 0, "CREATE_TABLE", ""),
-                  new NotificationEvent(2L, 0, "ALTER_TABLE", "")
-              );
-
-              for (NotificationEvent event : events) {
-                String hash = UniquePathsUpdate.sha1(event);
-                
-                // We simulate that CREATE_DATABASE is already processed
-                if (event.getEventType().equals("CREATE_DATABASE")) {
-                  Mockito.when(store.isNotificationProcessed(Mockito.eq(hash))).thenReturn(true);
-                } else {
-                  Mockito.when(store.isNotificationProcessed(Mockito.eq(hash))).thenReturn(false);
-                }
-
-                if (filter.accept(event)) {
-                  response.addToEvents(event);
-                }
-              }
-
-              return response;
-            }
-          });
-
-      events = fetcher.fetchNotifications(1);
-      assertEquals(2, events.size());
-      assertEquals(1, events.get(0).getEventId());
-      assertEquals("CREATE_TABLE", events.get(0).getEventType());
-      assertEquals(2, events.get(1).getEventId());
-      assertEquals("ALTER_TABLE", events.get(1).getEventType());
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7db84b2f/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestSentryHMSClient.java
----------------------------------------------------------------------
diff --git a/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestSentryHMSClient.java b/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestSentryHMSClient.java
deleted file mode 100644
index 38668ca..0000000
--- a/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestSentryHMSClient.java
+++ /dev/null
@@ -1,344 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.service.thrift;
-
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
-import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
-import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.hive.metastore.api.Partition;
-import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
-import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.sentry.provider.db.service.persistent.PathsImage;
-import org.apache.thrift.TException;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.mockito.Mockito;
-import org.mockito.invocation.InvocationOnMock;
-import org.mockito.stubbing.Answer;
-
-import javax.security.auth.login.LoginException;
-
-/**
- * Test mocks HiveMetaStoreClient class and tests SentryHMSClient.
- */
-public class TestSentryHMSClient {
-
-  private static final Configuration conf = new Configuration();
-  private static SentryHMSClient client;
-  private static MockHMSClientFactory hiveConnectionFactory;
-
-  /**
-   * Create mock database with the given name
-   *
-   * @param name Database name
-   * @return Mock database object
-   */
-  private static Database makeDb(String name) {
-    Database db = Mockito.mock(Database.class);
-    Mockito.when(db.getName()).thenReturn(name);
-    Mockito.when(db.getLocationUri()).thenReturn("hdfs:///" + name);
-    return db;
-  }
-
-  /**
-   * Create mock table
-   *
-   * @param dbName db for this table
-   * @param tableName name of the table
-   * @return mock table object
-   */
-  private static Table makeTable(String dbName, String tableName) {
-    Table table = Mockito.mock(Table.class);
-    Mockito.when(table.getDbName()).thenReturn(dbName);
-    Mockito.when(table.getTableName()).thenReturn(tableName);
-    StorageDescriptor sd = Mockito.mock(StorageDescriptor.class);
-    Mockito.when(sd.getLocation()).thenReturn(
-        String.format("hdfs:///%s/%s", dbName, tableName));
-    Mockito.when(table.getSd()).thenReturn(sd);
-    return table;
-  }
-
-  /**
-   * Create mock partition
-   *
-   * @param dbName database for this partition
-   * @param tableName table for this partition
-   * @param partName partition name
-   * @return mock partition object
-   */
-  private static Partition makePartition(String dbName, String tableName, String partName) {
-    Partition partition = Mockito.mock(Partition.class);
-    StorageDescriptor sd = Mockito.mock(StorageDescriptor.class);
-    Mockito.when(sd.getLocation()).thenReturn(
-        String.format("hdfs:///%s/%s/%s", dbName, tableName, partName));
-    Mockito.when(partition.getSd()).thenReturn(sd);
-    return partition;
-  }
-
-  @BeforeClass
-  static public void initialize() throws IOException, LoginException {
-    hiveConnectionFactory = new MockHMSClientFactory();
-    client = new SentryHMSClient(conf, (HiveConnectionFactory)hiveConnectionFactory);
-  }
-
-  /**
-   * Creating snapshot when SentryHMSClient is not connected to HMS
-   */
-  @Test
-  public void testSnapshotCreationWithOutClientConnected() throws Exception {
-    // Make sure that client is not connected
-    Assert.assertFalse(client.isConnected());
-    PathsImage snapshotInfo = client.getFullSnapshot();
-    Assert.assertTrue(snapshotInfo.getPathImage().isEmpty());
-  }
-
-  /**
-   * Creating snapshot when HMS doesn't have any data
-   */
-  @Test
-  public void testSnapshotCreationWithNoHmsData() throws Exception {
-    MockClient mockClient = new MockClient(new HiveSnapshot(), 1);
-    client.setClient(mockClient.client);
-    // Make sure that client is connected
-    Assert.assertTrue(client.isConnected());
-    PathsImage snapshotInfo = client.getFullSnapshot();
-    Assert.assertTrue(snapshotInfo.getPathImage().isEmpty());
-  }
-
-  /**
-   * Creating a snapshot when there is data but there are updates to HMS data mean while
-   */
-  @Test
-  public void testSnapshotCreationWhenDataIsActivelyUpdated() throws Exception {
-    HiveTable tab21 = new HiveTable("tab21");
-    HiveTable tab31 = new HiveTable("tab31").add("part311").add("part312");
-    HiveDb db3 = new HiveDb("db3", Lists.newArrayList(tab31));
-    HiveDb db2 = new HiveDb("db2", Lists.newArrayList(tab21));
-    HiveDb db1 = new HiveDb("db1");
-    HiveSnapshot snap = new HiveSnapshot().add(db1).add(db2).add(db3);
-    final MockClient mockClient = new MockClient(snap, 1);
-
-    client.setClient(mockClient.client);
-    hiveConnectionFactory.setClient(mockClient);
-    // Make sure that client is connected
-    Assert.assertTrue(client.isConnected());
-    PathsImage snapshotInfo = client.getFullSnapshot();
-    // Make sure that snapshot is not empty
-    Assert.assertTrue(!snapshotInfo.getPathImage().isEmpty());
-
-    Mockito.when(mockClient.client.getCurrentNotificationEventId()).
-        thenAnswer(new Answer<CurrentNotificationEventId>() {
-          @Override
-          public CurrentNotificationEventId answer(InvocationOnMock invocation)
-              throws Throwable {
-            return new CurrentNotificationEventId(mockClient.incrementNotificationEventId());
-          }
-
-        });
-
-    snapshotInfo = client.getFullSnapshot();
-    Assert.assertTrue(snapshotInfo.getPathImage().isEmpty());
-  }
-
-  /**
-   * Creating a snapshot when there is data in HMS.
-   */
-  @Test
-  public void testSnapshotCreationSuccess() throws Exception {
-    HiveTable tab21 = new HiveTable("tab21");
-    HiveTable tab31 = new HiveTable("tab31");
-    HiveDb db3 = new HiveDb("db3", Lists.newArrayList(tab31));
-    HiveDb db2 = new HiveDb("db2", Lists.newArrayList(tab21));
-    HiveDb db1 = new HiveDb("db1");
-    HiveSnapshot snap = new HiveSnapshot().add(db1).add(db2).add(db3);
-    MockClient mockClient = new MockClient(snap, 1);
-    Mockito.when(mockClient.client.getCurrentNotificationEventId()).
-        thenReturn(new CurrentNotificationEventId(mockClient.eventId));
-    client.setClient(mockClient.client);
-    hiveConnectionFactory.setClient(mockClient);
-    // Make sure that client is connected
-    Assert.assertTrue(client.isConnected());
-
-    PathsImage snapshotInfo = client.getFullSnapshot();
-    Assert.assertEquals(5, snapshotInfo.getPathImage().size());
-    Assert.assertEquals(Sets.newHashSet("db1"), snapshotInfo.getPathImage().get("db1"));
-    Assert.assertEquals(Sets.newHashSet("db2"), snapshotInfo.getPathImage().get("db2"));
-    Assert.assertEquals(Sets.newHashSet("db3"), snapshotInfo.getPathImage().get("db3"));
-    Assert.assertEquals(Sets.newHashSet("db2/tab21"),
-        snapshotInfo.getPathImage().get("db2.tab21"));
-    Assert.assertEquals(Sets.newHashSet("db3/tab31"), snapshotInfo.getPathImage().get("db3.tab31"));
-    Assert.assertEquals(snapshotInfo.getId(), mockClient.eventId);
-
-  }
-
-  /**
-   * Representation of a Hive table. A table has a name and a list of partitions.
-   */
-  private static class HiveTable {
-
-    private final String name;
-    private final List<String> partitions;
-
-    HiveTable(String name) {
-      this.name = name;
-      this.partitions = new ArrayList<>();
-    }
-
-    HiveTable add(String partition) {
-      partitions.add(partition);
-      return this;
-    }
-  }
-
-  /**
-   * Representation of a Hive database. A database has a name and a list of tables
-   */
-  private static class HiveDb {
-
-    final String name;
-    Collection<HiveTable> tables;
-
-    @SuppressWarnings("SameParameterValue")
-    HiveDb(String name) {
-      this.name = name;
-      tables = new ArrayList<>();
-    }
-
-    HiveDb(String name, Collection<HiveTable> tables) {
-      this.name = name;
-      this.tables = tables;
-      if (this.tables == null) {
-        this.tables = new ArrayList<>();
-      }
-    }
-
-    void add(HiveTable table) {
-      this.tables.add(table);
-    }
-  }
-
-  /**
-   * Representation of a full Hive snapshot. A snapshot is collection of databases
-   */
-  private static class HiveSnapshot {
-
-    final List<HiveDb> databases = new ArrayList<>();
-
-    HiveSnapshot() {
-    }
-
-    HiveSnapshot(Collection<HiveDb> dblist) {
-      if (dblist != null) {
-        databases.addAll(dblist);
-      }
-    }
-
-    HiveSnapshot add(HiveDb db) {
-      this.databases.add(db);
-      return this;
-    }
-  }
-
-  /**
-   * Mock for HMSClientFactory
-   */
-  private static class MockHMSClientFactory implements HiveConnectionFactory {
-
-    private HiveMetaStoreClient mClient;
-
-    public MockHMSClientFactory() {
-      mClient  = null;
-    }
-
-    void setClient(MockClient mockClient) {
-      this.mClient = mockClient.client;
-    }
-    @Override
-    public HMSClient connect() throws IOException, InterruptedException, MetaException {
-      return new HMSClient(mClient);
-    }
-
-    @Override
-    public void close() throws Exception {
-    }
-  }
-
-  /**
-   * Convert Hive snapshot to mock client that will return proper values
-   * for the snapshot.
-   */
-  private static class MockClient {
-
-    public HiveMetaStoreClient client;
-    public long eventId;
-
-    MockClient(HiveSnapshot snapshot, long eventId) throws TException {
-      this.eventId = eventId;
-      client = Mockito.mock(HiveMetaStoreClient.class);
-      List<String> dbNames = new ArrayList<>(snapshot.databases.size());
-      // Walk over all databases and mock appropriate objects
-      for (HiveDb mdb : snapshot.databases) {
-        String dbName = mdb.name;
-        dbNames.add(dbName);
-        Database db = makeDb(dbName);
-        Mockito.when(client.getDatabase(dbName)).thenReturn(db);
-        List<String> tableNames = new ArrayList<>(mdb.tables.size());
-        // Walk over all tables for the database and mock appropriate objects
-        for (HiveTable table : mdb.tables) {
-          String tableName = table.name;
-          tableNames.add(tableName);
-          Table mockTable = makeTable(dbName, tableName);
-          Mockito.when(client.getTableObjectsByName(dbName,
-              Lists.newArrayList(tableName)))
-              .thenReturn(Lists.newArrayList(mockTable));
-          Mockito.when(client.listPartitionNames(dbName, tableName, (short) -1))
-              .thenReturn(table.partitions);
-          // Walk across all partitions and mock appropriate objects
-          for (String partName : table.partitions) {
-            Partition p = makePartition(dbName, tableName, partName);
-            Mockito.when(client.getPartitionsByNames(dbName, tableName,
-                Lists.<String>newArrayList(partName)))
-                .thenReturn(Lists.<Partition>newArrayList(p));
-          }
-        }
-        Mockito.when(client.getAllTables(dbName)).thenReturn(tableNames);
-      }
-      // Return all database names
-      Mockito.when(client.getAllDatabases()).thenReturn(dbNames);
-      Mockito.when(client.getCurrentNotificationEventId()).
-          thenReturn(new CurrentNotificationEventId(eventId));
-
-    }
-
-    public Long incrementNotificationEventId() {
-      eventId = eventId + 1;
-      return eventId;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7db84b2f/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestSentryStateBank.java
----------------------------------------------------------------------
diff --git a/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestSentryStateBank.java b/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestSentryStateBank.java
deleted file mode 100644
index 4f71e1c..0000000
--- a/sentry-provider/sentry-provider-db/src/test/java/org/apache/sentry/service/thrift/TestSentryStateBank.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
- * agreements.  See the NOTICE file distributed with this work for additional information regarding
- * copyright ownership.  The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with the License.  You may obtain
- * a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
- * or implied. See the License for the specific language governing permissions and limitations under
- * the License.
- */
-package org.apache.sentry.service.thrift;
-
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import java.util.Arrays;
-import java.util.HashSet;
-import org.junit.Before;
-import org.junit.Test;
-
-/**
- *
- */
-public class TestSentryStateBank {
-
-  @Before
-  public void setUp() {
-    SentryStateBank.clearAllStates();
-  }
-
-  @Test
-  public void testEnableState() {
-    SentryStateBank.enableState(TestState.COMPONENT, TestState.FIRST_STATE);
-    assertTrue("Expected FIRST_STATE to be enabled",
-        SentryStateBank.isEnabled(TestState.COMPONENT, TestState.FIRST_STATE));
-    assertFalse("Expected SECOND_STATE to be disabled",
-        SentryStateBank.isEnabled(TestState.COMPONENT, TestState.SECOND_STATE));
-  }
-
-  @Test
-  public void testStatesGetDisabled() {
-    SentryStateBank.enableState(TestState.COMPONENT, TestState.FIRST_STATE);
-    assertTrue("Expected FIRST_STATE to be enabled",
-        SentryStateBank.isEnabled(TestState.COMPONENT, TestState.FIRST_STATE));
-    SentryStateBank.disableState(TestState.COMPONENT, TestState.FIRST_STATE);
-    assertFalse("Expected FIRST_STATE to be disabled",
-        SentryStateBank.isEnabled(TestState.COMPONENT, TestState.FIRST_STATE));
-  }
-
-  @Test
-  public void testCheckMultipleStateCheckSuccess() {
-    SentryStateBank.enableState(TestState.COMPONENT, TestState.FIRST_STATE);
-    SentryStateBank.enableState(TestState.COMPONENT, TestState.SECOND_STATE);
-
-    assertTrue("Expected both FIRST_STATE and SECOND_STATE to be enabled",
-        SentryStateBank.hasStatesEnabled(TestState.COMPONENT, new HashSet<SentryState>(
-            Arrays.asList(TestState.FIRST_STATE, TestState.SECOND_STATE))));
-  }
-
-  @Test
-  public void testCheckMultipleStateCheckFailure() {
-    SentryStateBank.enableState(TestState.COMPONENT, TestState.FIRST_STATE);
-    assertFalse("Expected only FIRST_STATE to be enabled",
-        SentryStateBank.hasStatesEnabled(TestState.COMPONENT, new HashSet<SentryState>(
-            Arrays.asList(TestState.FIRST_STATE, TestState.SECOND_STATE))));
-  }
-
-
-  public enum TestState implements SentryState {
-    FIRST_STATE,
-    SECOND_STATE;
-
-    public static final String COMPONENT = "TestState";
-
-    @Override
-    public long getValue() {
-      return 1 << this.ordinal();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7db84b2f/sentry-service/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-service/pom.xml b/sentry-service/pom.xml
index 0388476..b63467b 100644
--- a/sentry-service/pom.xml
+++ b/sentry-service/pom.xml
@@ -31,6 +31,8 @@ limitations under the License.
 
     <modules>
         <module>sentry-service-api</module>
+        <module>sentry-service-server</module>
+        <module>sentry-service-client</module>
     </modules>
 
 </project>

http://git-wip-us.apache.org/repos/asf/sentry/blob/7db84b2f/sentry-service/sentry-service-client/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-service/sentry-service-client/pom.xml b/sentry-service/sentry-service-client/pom.xml
new file mode 100644
index 0000000..a1ae8c8
--- /dev/null
+++ b/sentry-service/sentry-service-client/pom.xml
@@ -0,0 +1,69 @@
+<?xml version="1.0"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.sentry</groupId>
+        <artifactId>sentry-service</artifactId>
+        <version>2.1.0-SNAPSHOT</version>
+    </parent>
+
+    <artifactId>sentry-service-client</artifactId>
+    <name>Sentry Service Client</name>
+
+    <dependencies>
+        <dependency>
+            <groupId>commons-lang</groupId>
+            <artifactId>commons-lang</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.derby</groupId>
+            <artifactId>derby</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.thrift</groupId>
+            <artifactId>libfb303</artifactId>
+            <version>${libfb303.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.thrift</groupId>
+            <artifactId>libthrift</artifactId>
+            <version>${libthrift.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.sentry</groupId>
+            <artifactId>sentry-service-api</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/sentry/blob/7db84b2f/sentry-service/sentry-service-client/src/main/java/org/apache/sentry/service/thrift/SentryServiceClientFactory.java
----------------------------------------------------------------------
diff --git a/sentry-service/sentry-service-client/src/main/java/org/apache/sentry/service/thrift/SentryServiceClientFactory.java b/sentry-service/sentry-service-client/src/main/java/org/apache/sentry/service/thrift/SentryServiceClientFactory.java
new file mode 100644
index 0000000..d146a0d
--- /dev/null
+++ b/sentry-service/sentry-service-client/src/main/java/org/apache/sentry/service/thrift/SentryServiceClientFactory.java
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.service.thrift;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.sentry.core.common.transport.RetryClientInvocationHandler;
+import org.apache.sentry.core.common.transport.SentryPolicyClientTransportConfig;
+import org.apache.sentry.core.common.transport.SentryTransportFactory;
+import org.apache.sentry.core.common.transport.SentryTransportPool;
+import org.apache.sentry.api.service.thrift.SentryPolicyServiceClient;
+import org.apache.sentry.api.service.thrift.SentryPolicyServiceClientDefaultImpl;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.annotation.concurrent.ThreadSafe;
+import java.lang.reflect.Proxy;
+import java.util.concurrent.atomic.AtomicReference;
+
+/**
+ * Client factory for Hive clients. The factory uses connection pooling.
+ */
+@ThreadSafe
+public final class SentryServiceClientFactory {
+  private static final Logger LOGGER = LoggerFactory.getLogger(SentryServiceClientFactory.class);
+
+  private static final SentryPolicyClientTransportConfig transportConfig =
+          new SentryPolicyClientTransportConfig();
+  private final Configuration conf;
+  private final SentryTransportPool transportPool;
+
+  /** Keep track of singleton instances */
+  private static final AtomicReference<SentryServiceClientFactory> clientFactory =
+          new AtomicReference<>();
+
+  /**
+   * Create a client instance. The supplied configuration is only used the first time and
+   * ignored afterwords. Tests that want to supply different configurations
+   * should call {@link #factoryReset(SentryServiceClientFactory)} to force new configuration
+   * read.
+   * @param conf Configuration
+   * @return client instance
+   * @throws Exception
+   */
+  public static SentryPolicyServiceClient create(Configuration conf) throws Exception {
+    SentryServiceClientFactory factory = clientFactory.get();
+    if (factory != null) {
+      return factory.create();
+    }
+    factory = new SentryServiceClientFactory(conf);
+    boolean ok = clientFactory.compareAndSet(null, factory);
+    if (ok) {
+      return factory.create();
+    }
+    // Close old factory
+    factory.close();
+    return clientFactory.get().create();
+  }
+
+  /**
+   * Create a new instance of the factory which will hand hand off connections from
+   * the pool.
+   * @param conf Configuration object
+   */
+  private SentryServiceClientFactory(Configuration conf) {
+    this.conf = conf;
+
+    transportPool = new SentryTransportPool(conf, transportConfig,
+            new SentryTransportFactory(conf, transportConfig));
+  }
+
+  private SentryPolicyServiceClient create() throws Exception {
+    return (SentryPolicyServiceClient) Proxy
+      .newProxyInstance(SentryPolicyServiceClientDefaultImpl.class.getClassLoader(),
+        SentryPolicyServiceClientDefaultImpl.class.getInterfaces(),
+        new RetryClientInvocationHandler(conf,
+          new SentryPolicyServiceClientDefaultImpl(conf, transportPool), transportConfig));
+  }
+
+  /**
+   * Reset existing factory and return the old one.
+   * Only used by tests.
+   */
+  public static SentryServiceClientFactory factoryReset(SentryServiceClientFactory factory) {
+    LOGGER.debug("factory reset");
+    return clientFactory.getAndSet(factory);
+  }
+
+  public void close() {
+    try {
+      transportPool.close();
+    } catch (Exception e) {
+      LOGGER.error("failed to close transport pool", e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/7db84b2f/sentry-service/sentry-service-server/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-service/sentry-service-server/pom.xml b/sentry-service/sentry-service-server/pom.xml
new file mode 100644
index 0000000..a103c1e
--- /dev/null
+++ b/sentry-service/sentry-service-server/pom.xml
@@ -0,0 +1,321 @@
+<?xml version="1.0"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.sentry</groupId>
+        <artifactId>sentry-service</artifactId>
+        <version>2.1.0-SNAPSHOT</version>
+    </parent>
+
+    <artifactId>sentry-service-server</artifactId>
+    <name>Sentry Service Server</name>
+
+    <dependencies>
+        <dependency>
+            <groupId>commons-lang</groupId>
+            <artifactId>commons-lang</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.derby</groupId>
+            <artifactId>derby</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.thrift</groupId>
+            <artifactId>libfb303</artifactId>
+            <version>${libfb303.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.thrift</groupId>
+            <artifactId>libthrift</artifactId>
+            <version>${libthrift.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.curator</groupId>
+            <artifactId>curator-test</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.sentry</groupId>
+            <artifactId>sentry-service-api</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.sentry</groupId>
+            <artifactId>sentry-provider-file</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.sentry</groupId>
+            <artifactId>sentry-service-client</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.sentry</groupId>
+            <artifactId>sentry-core-common</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.sentry</groupId>
+            <artifactId>sentry-core-model-db</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.sentry</groupId>
+            <artifactId>sentry-core-model-kafka</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.sentry</groupId>
+            <artifactId>sentry-core-model-solr</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.sentry</groupId>
+            <artifactId>sentry-core-model-sqoop</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.sentry</groupId>
+            <artifactId>sentry-hdfs-common</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.sentry</groupId>
+            <artifactId>sentry-binding-hive-follower</artifactId>
+            <version>${project.version}</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.apache.hive</groupId>
+                    <artifactId>hive-exec</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.datanucleus</groupId>
+            <artifactId>datanucleus-core</artifactId>
+            <version>${datanucleus-core.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.datanucleus</groupId>
+            <artifactId>datanucleus-api-jdo</artifactId>
+            <version>${datanucleus-api-jdo.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.datanucleus</groupId>
+            <artifactId>datanucleus-rdbms</artifactId>
+            <version>${datanucleus-rdbms.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.datanucleus</groupId>
+            <artifactId>javax.jdo</artifactId>
+            <version>${datanucleus-jdo.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-metastore</artifactId>
+            <version>${hive.version}</version>
+            <exclusions>
+                <!-- This dependency needs to be excluded to avoid compilation errors in the Eclipse build.
+                     Without this change, the Eclipse build classpath contains this jar file ahead of
+                     datanucleus javax.jdo*.jar. This error can not be reproduced with the maven build.
+                     Cause of compilation error : PersistenceManager class provided as part of this version,
+                     does not implement java.lang.AutoClosable interface. This breaks the usage of
+                     PersistenceManager inside try-with-resources clause in Sentry TransactionManager class.
+                 -->
+                <exclusion>
+                    <groupId>javax.jdo</groupId>
+                    <artifactId>jdo-api</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>javax.jdo</groupId>
+                    <artifactId>jdo2-api</artifactId>
+                </exclusion>
+                <exclusion>
+                    <groupId>javax.jdo</groupId>
+                    <artifactId>jdo2-api-legacy</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+        <dependency>
+            <groupId>org.codehaus.jackson</groupId>
+            <artifactId>jackson-core-asl</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.codehaus.jackson</groupId>
+            <artifactId>jackson-mapper-asl</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>io.dropwizard.metrics</groupId>
+            <artifactId>metrics-core</artifactId>
+            <version>${metrics.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>io.dropwizard.metrics</groupId>
+            <artifactId>metrics-servlets</artifactId>
+            <version>${metrics.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>io.dropwizard.metrics</groupId>
+            <artifactId>metrics-jvm</artifactId>
+            <version>${metrics.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.eclipse.jetty</groupId>
+            <artifactId>jetty-server</artifactId>
+            <version>${jetty.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.eclipse.jetty</groupId>
+            <artifactId>jetty-servlet</artifactId>
+            <version>${jetty.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.mockito</groupId>
+            <artifactId>mockito-all</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-minikdc</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <sourceDirectory>${basedir}/src/main/java</sourceDirectory>
+        <testSourceDirectory>${basedir}/src/test/java</testSourceDirectory>
+        <resources>
+            <resource>
+                <directory>../../sentry-service/sentry-service-server/src/main/java/org/apache/sentry/provider/db/service/model</directory>
+                <includes>
+                    <include>package.jdo</include>
+                </includes>
+            </resource>
+            <resource>
+                <directory>${basedir}/src/main</directory>
+                <includes>
+                    <include>webapp/*</include>
+                    <include>webapp/css/*</include>
+                </includes>
+            </resource>
+        </resources>
+        <plugins>
+            <plugin>
+                <groupId>com.google.code.maven-replacer-plugin</groupId>
+                <artifactId>replacer</artifactId>
+                <version>1.5.2</version>
+                <executions>
+                    <execution>
+                        <id>replaceTokens</id>
+                        <phase>clean</phase>
+                        <goals>
+                            <goal>replace</goal>
+                        </goals>
+                    </execution>
+                </executions>
+                <configuration>
+                    <file>${basedir}/src/main/webapp/SentryService.html</file>
+                    <replacements>
+                        <replacement>
+                            <token>%PROJECT_VERSION%</token>
+                            <value>${version}</value>
+                        </replacement>
+                    </replacements>
+                </configuration>
+            </plugin>
+            <plugin>
+                <groupId>org.datanucleus</groupId>
+                <artifactId>datanucleus-maven-plugin</artifactId>
+                <configuration>
+                    <api>JDO</api>
+                    <metadataIncludes>**/*.jdo</metadataIncludes>
+                    <verbose>true</verbose>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>process-classes</phase>
+                        <goals>
+                            <goal>enhance</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <version>${maven.shade.plugin.version}</version>
+                <executions>
+                    <execution>
+                        <id>curator-shade</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <artifactSet>
+                                <includes>
+                                    <!-- This is needed to support projects running on different versions of curator -->
+                                    <include>org.apache.curator:curator-recipes</include>
+                                    <include>org.apache.curator:curator-x-discovery</include>
+                                    <include>org.apache.curator:curator-framework</include>
+                                    <include>org.apache.curator:curator-client</include>
+                                </includes>
+                            </artifactSet>
+                            <relocations>
+                                <!-- Adding prefix to the package to make it unique -->
+                                <relocation>
+                                    <pattern>org.apache.curator</pattern>
+                                    <shadedPattern>sentry.org.apache.curator</shadedPattern>
+                                </relocation>
+                            </relocations>
+                            <shadedArtifactAttached>false</shadedArtifactAttached>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-jar-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <goals>
+                            <goal>test-jar</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <configuration>
+                    <reuseForks>false</reuseForks>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+</project>
\ No newline at end of file