You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sentry.apache.org by pr...@apache.org on 2014/09/15 23:08:03 UTC

[25/25] git commit: SENTRY-432. HDFS ingration. Initial patch

SENTRY-432. HDFS ingration. Initial patch


Project: http://git-wip-us.apache.org/repos/asf/incubator-sentry/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-sentry/commit/0eb6645e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-sentry/tree/0eb6645e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-sentry/diff/0eb6645e

Branch: refs/heads/sentry-hdfs-sync
Commit: 0eb6645e8848681e1746da4d616cb4160c514861
Parents: 00a5eba
Author: Prasad Mujumdar <pr...@cloudera.com>
Authored: Mon Sep 15 14:02:07 2014 -0700
Committer: Prasad Mujumdar <pr...@cloudera.com>
Committed: Mon Sep 15 14:07:13 2014 -0700

----------------------------------------------------------------------
 pom.xml                                         |    26 +-
 sentry-binding/sentry-binding-hive/pom.xml      |     8 +
 .../SentryHiveAuthorizationTaskFactoryImpl.java |    16 +
 .../sentry/binding/hive/conf/HiveAuthzConf.java |     6 +-
 .../SentryMetastorePostEventListener.java       |   105 +-
 sentry-dist/pom.xml                             |    12 +
 sentry-hdfs-int/pom.xml                         |    70 +
 .../hdfs/SentryAuthorizationConstants.java      |    55 +
 .../sentry/hdfs/SentryAuthorizationInfo.java    |   233 +
 .../hdfs/SentryAuthorizationProvider.java       |   370 +
 .../apache/sentry/hdfs/SentryPermissions.java   |   159 +
 .../apache/sentry/hdfs/SentryServiceClient.java |   200 +
 .../org/apache/sentry/hdfs/SentryUpdater.java   |    60 +
 .../sentry/hdfs/UpdateableAuthzPermissions.java |   179 +
 .../hdfs/MockSentryAuthorizationProvider.java   |    26 +
 .../sentry/hdfs/SentryAuthorizationInfoX.java   |    85 +
 .../hdfs/TestSentryAuthorizationProvider.java   |   163 +
 .../src/test/resources/hdfs-sentry.xml          |    33 +
 sentry-hdfs/pom.xml                             |    66 +
 .../java/org/apache/sentry/hdfs/AuthzPaths.java |    30 +
 .../apache/sentry/hdfs/AuthzPathsDumper.java    |    28 +
 .../apache/sentry/hdfs/AuthzPermissions.java    |    28 +
 .../sentry/hdfs/ExtendedMetastoreClient.java    |   104 +
 .../java/org/apache/sentry/hdfs/HMSPaths.java   |   467 +
 .../org/apache/sentry/hdfs/HMSPathsSerDe.java   |   113 +
 .../org/apache/sentry/hdfs/MetastoreClient.java |    34 +
 .../org/apache/sentry/hdfs/PathsUpdate.java     |    84 +
 .../apache/sentry/hdfs/PermissionsUpdate.java   |    93 +
 .../java/org/apache/sentry/hdfs/Updateable.java |    61 +
 .../sentry/hdfs/UpdateableAuthzPaths.java       |   130 +
 .../org/apache/sentry/hdfs/DummyAdapter.java    |    39 +
 .../apache/sentry/hdfs/DummyAuthzSource.java    |    60 +
 .../org/apache/sentry/hdfs/DummyHMSClient.java  |    79 +
 .../sentry/hdfs/TestAuthzPathCacheOld.java      |   523 +
 .../apache/sentry/hdfs/TestAuthzPermCache.java  |    64 +
 .../org/apache/sentry/hdfs/TestHMSPaths.java    |   357 +
 .../sentry/hdfs/TestHMSPathsFullDump.java       |    97 +
 .../sentry/hdfs/TestUpdateableAuthzPaths.java   |   136 +
 sentry-hdfs/src/test/resources/hdfs-sentry.xml  |    22 +
 sentry-provider/sentry-provider-db/pom.xml      |    91 +-
 .../db/service/thrift/SentryPolicyService.java  |  8966 -------------
 .../TAlterSentryRoleAddGroupsRequest.java       |   742 --
 .../TAlterSentryRoleAddGroupsResponse.java      |   390 -
 .../TAlterSentryRoleDeleteGroupsRequest.java    |   742 --
 .../TAlterSentryRoleDeleteGroupsResponse.java   |   390 -
 .../TAlterSentryRoleGrantPrivilegeRequest.java  |   693 -
 .../TAlterSentryRoleGrantPrivilegeResponse.java |   390 -
 .../TAlterSentryRoleRevokePrivilegeRequest.java |   693 -
 ...TAlterSentryRoleRevokePrivilegeResponse.java |   390 -
 .../thrift/TCreateSentryRoleRequest.java        |   587 -
 .../thrift/TCreateSentryRoleResponse.java       |   390 -
 .../service/thrift/TDropPrivilegesRequest.java  |   592 -
 .../service/thrift/TDropPrivilegesResponse.java |   390 -
 .../service/thrift/TDropSentryRoleRequest.java  |   587 -
 .../service/thrift/TDropSentryRoleResponse.java |   390 -
 ...TListSentryPrivilegesForProviderRequest.java |   759 --
 ...ListSentryPrivilegesForProviderResponse.java |   543 -
 .../thrift/TListSentryPrivilegesRequest.java    |   702 -
 .../thrift/TListSentryPrivilegesResponse.java   |   554 -
 .../service/thrift/TListSentryRolesRequest.java |   596 -
 .../thrift/TListSentryRolesResponse.java        |   554 -
 .../thrift/TRenamePrivilegesRequest.java        |   698 -
 .../thrift/TRenamePrivilegesResponse.java       |   390 -
 .../db/service/thrift/TSentryActiveRoleSet.java |   536 -
 .../db/service/thrift/TSentryAuthorizable.java  |   707 -
 .../db/service/thrift/TSentryGrantOption.java   |    48 -
 .../db/service/thrift/TSentryGroup.java         |   385 -
 .../db/service/thrift/TSentryPrivilege.java     |  1251 --
 .../provider/db/service/thrift/TSentryRole.java |   641 -
 .../service/thrift/TSentryResponseStatus.java   |   594 -
 .../thrift/sentry_common_serviceConstants.java  |    50 -
 .../provider/db/service/UpdateForwarder.java    |   227 +
 .../db/service/UpdateablePermissions.java       |    62 +
 .../db/service/persistent/SentryStore.java      |    71 +-
 .../thrift/SentryPolicyServiceClient.java       |    14 +-
 .../thrift/SentryPolicyStoreProcessor.java      |   190 +
 .../sentry/service/thrift/SentryService.java    |     3 +-
 .../sentry/service/thrift/ServiceConstants.java |   150 -
 .../main/resources/sentry_common_service.thrift |    42 -
 .../main/resources/sentry_policy_service.thrift |   223 -
 .../db/service/TestUpdateForwarder.java         |   277 +
 .../thrift/TestSentryPolicyStoreProcessor.java  |     2 +
 .../thrift/TestSentryServerWithoutKerberos.java |    60 +-
 sentry-service-client/pom.xml                   |   164 +
 .../db/service/thrift/SentryPolicyService.java  | 11554 +++++++++++++++++
 .../TAlterSentryRoleAddGroupsRequest.java       |   742 ++
 .../TAlterSentryRoleAddGroupsResponse.java      |   390 +
 .../TAlterSentryRoleDeleteGroupsRequest.java    |   742 ++
 .../TAlterSentryRoleDeleteGroupsResponse.java   |   390 +
 .../TAlterSentryRoleGrantPrivilegeRequest.java  |   693 +
 .../TAlterSentryRoleGrantPrivilegeResponse.java |   390 +
 .../TAlterSentryRoleRevokePrivilegeRequest.java |   693 +
 ...TAlterSentryRoleRevokePrivilegeResponse.java |   390 +
 .../db/service/thrift/TAuthzUpdateResponse.java |   603 +
 .../thrift/TCreateSentryRoleRequest.java        |   587 +
 .../thrift/TCreateSentryRoleResponse.java       |   390 +
 .../service/thrift/TDropPrivilegesRequest.java  |   592 +
 .../service/thrift/TDropPrivilegesResponse.java |   390 +
 .../service/thrift/TDropSentryRoleRequest.java  |   587 +
 .../service/thrift/TDropSentryRoleResponse.java |   390 +
 ...TListSentryPrivilegesForProviderRequest.java |   759 ++
 ...ListSentryPrivilegesForProviderResponse.java |   543 +
 .../thrift/TListSentryPrivilegesRequest.java    |   702 +
 .../thrift/TListSentryPrivilegesResponse.java   |   554 +
 .../service/thrift/TListSentryRolesRequest.java |   596 +
 .../thrift/TListSentryRolesResponse.java        |   554 +
 .../db/service/thrift/TPathChanges.java         |   765 ++
 .../provider/db/service/thrift/TPathEntry.java  |   747 ++
 .../provider/db/service/thrift/TPathsDump.java  |   549 +
 .../db/service/thrift/TPathsUpdate.java         |   748 ++
 .../db/service/thrift/TPermissionsUpdate.java   |   810 ++
 .../db/service/thrift/TPrivilegeChanges.java    |   713 +
 .../thrift/TRenamePrivilegesRequest.java        |   698 +
 .../thrift/TRenamePrivilegesResponse.java       |   390 +
 .../db/service/thrift/TRoleChanges.java         |   691 +
 .../db/service/thrift/TSentryActiveRoleSet.java |   536 +
 .../db/service/thrift/TSentryAuthorizable.java  |   707 +
 .../db/service/thrift/TSentryGrantOption.java   |    48 +
 .../db/service/thrift/TSentryGroup.java         |   385 +
 .../db/service/thrift/TSentryPrivilege.java     |  1251 ++
 .../provider/db/service/thrift/TSentryRole.java |   641 +
 .../service/thrift/TSentryResponseStatus.java   |   594 +
 .../thrift/sentry_common_serviceConstants.java  |    50 +
 .../sentry/service/thrift/ServiceConstants.java |   153 +
 .../main/resources/sentry_common_service.thrift |    42 +
 .../main/resources/sentry_policy_service.thrift |   266 +
 sentry-tests/sentry-tests-hive/pom.xml          |     5 +
 ...actMetastoreTestWithStaticConfiguration.java |     3 +
 .../e2e/metastore/TestMetastoreEndToEnd.java    |    49 +-
 129 files changed, 39566 insertions(+), 25843 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index e172e92..e66e790 100644
--- a/pom.xml
+++ b/pom.xml
@@ -68,7 +68,7 @@ limitations under the License.
     <derby.version>10.10.2.0</derby.version>
     <commons-cli.version>1.2</commons-cli.version>
     <hive.version>0.13.1-cdh5.2.0-SNAPSHOT</hive.version>
-    <hadoop.version>2.3.0-cdh5.1.0-SNAPSHOT</hadoop.version>
+    <hadoop.version>2.5.0</hadoop.version>
     <fest.reflect.version>1.4.1</fest.reflect.version>
     <guava.version>11.0.2</guava.version>
     <junit.version>4.9</junit.version>
@@ -149,6 +149,12 @@ limitations under the License.
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-hdfs</artifactId>
+        <version>${hadoop.version}</version>
+        <type>test-jar</type>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-minicluster</artifactId>
         <version>${hadoop.version}</version>
       </dependency>
@@ -324,6 +330,11 @@ limitations under the License.
       </dependency>
       <dependency>
         <groupId>org.apache.sentry</groupId>
+        <artifactId>sentry-service-client</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.sentry</groupId>
         <artifactId>sentry-provider-common</artifactId>
         <version>${project.version}</version>
       </dependency>
@@ -334,6 +345,16 @@ limitations under the License.
       </dependency>
       <dependency>
         <groupId>org.apache.sentry</groupId>
+        <artifactId>sentry-hdfs</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.sentry</groupId>
+        <artifactId>sentry-hdfs-int</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.sentry</groupId>
         <artifactId>sentry-provider-cache</artifactId>
         <version>${project.version}</version>
       </dependency>
@@ -402,6 +423,9 @@ limitations under the License.
     <module>sentry-policy</module>
     <module>sentry-tests</module>
     <module>sentry-dist</module>
+    <module>sentry-service-client</module>
+    <module>sentry-hdfs</module>
+    <module>sentry-hdfs-int</module>
   </modules>
 
   <build>

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/sentry-binding/sentry-binding-hive/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/pom.xml b/sentry-binding/sentry-binding-hive/pom.xml
index e72b370..aa3a8c9 100644
--- a/sentry-binding/sentry-binding-hive/pom.xml
+++ b/sentry-binding/sentry-binding-hive/pom.xml
@@ -75,6 +75,14 @@ limitations under the License.
     <!-- required for SentryGrantRevokeTask -->
     <dependency>
       <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-service-client</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-hdfs</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
       <artifactId>sentry-provider-db</artifactId>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryHiveAuthorizationTaskFactoryImpl.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryHiveAuthorizationTaskFactoryImpl.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryHiveAuthorizationTaskFactoryImpl.java
index f38ee91..dfcf63a 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryHiveAuthorizationTaskFactoryImpl.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/SentryHiveAuthorizationTaskFactoryImpl.java
@@ -50,11 +50,14 @@ import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.sentry.core.model.db.AccessConstants;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
 
 public class SentryHiveAuthorizationTaskFactoryImpl implements HiveAuthorizationTaskFactory {
 
+  private static final Logger LOG = LoggerFactory.getLogger(SentryHiveAuthorizationTaskFactoryImpl.class);
 
   public SentryHiveAuthorizationTaskFactoryImpl(HiveConf conf, Hive db) {
 
@@ -236,8 +239,11 @@ public class SentryHiveAuthorizationTaskFactoryImpl implements HiveAuthorization
 
   private Task<? extends Serializable> analyzeGrantRevokeRole(boolean isGrant, ASTNode ast,
       HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
+    LOG.debug("## FULL AST : [" + ast.dump() + "]");
+    LOG.debug("## CHILD AST : [" + ((ASTNode)ast.getChild(0)).dump() + "]");
     List<PrincipalDesc> principalDesc = analyzePrincipalListDef(
         (ASTNode) ast.getChild(0));
+    
     List<String> roles = new ArrayList<String>();
     for (int i = 1; i < ast.getChildCount(); i++) {
       roles.add(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(i).getText()));
@@ -314,18 +320,28 @@ public class SentryHiveAuthorizationTaskFactoryImpl implements HiveAuthorization
       ASTNode child = (ASTNode) node.getChild(i);
       PrincipalType type = null;
       switch (child.getType()) {
+      case 880:
+        type = PrincipalType.USER;
+        break;
       case HiveParser.TOK_USER:
         type = PrincipalType.USER;
         break;
+      case 685:
+        type = PrincipalType.GROUP;
+        break;
       case HiveParser.TOK_GROUP:
         type = PrincipalType.GROUP;
         break;
+      case 782:
+        type = PrincipalType.ROLE;
+        break;
       case HiveParser.TOK_ROLE:
         type = PrincipalType.ROLE;
         break;
       }
       String principalName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
       PrincipalDesc principalDesc = new PrincipalDesc(principalName, type);
+      LOG.debug("## Principal : [ " + principalName + ", " + type + "]");
       principalList.add(principalDesc);
     }
     return principalList;

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
index 3a83895..65d3933 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/hive/conf/HiveAuthzConf.java
@@ -18,7 +18,7 @@ package org.apache.sentry.binding.hive.conf;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.mortbay.log.Log;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -201,8 +201,8 @@ public class HiveAuthzConf extends Configuration {
       if (retVal == null) {
         retVal = AuthzConfVars.getDefault(varName);
       } else {
-        Log.warn("Using the deprecated config setting " + currentToDeprecatedProps.get(varName).getVar() +
-            " instead of " + varName);
+//        Log.warn("Using the deprecated config setting " + currentToDeprecatedProps.get(varName).getVar() +
+//            " instead of " + varName);
       }
     }
     if (retVal == null) {

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
----------------------------------------------------------------------
diff --git a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
index 0500483..30b68ab 100644
--- a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
+++ b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
@@ -19,34 +19,48 @@ package org.apache.sentry.binding.metastore;
 
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.MetaStoreEventListener;
 import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
 import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent;
 import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
 import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.DropTableEvent;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.sentry.SentryUserException;
-import org.apache.sentry.binding.hive.HiveAuthzBindingHook;
 import org.apache.sentry.binding.hive.conf.HiveAuthzConf;
 import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars;
 import org.apache.sentry.core.common.Authorizable;
 import org.apache.sentry.core.model.db.Database;
 import org.apache.sentry.core.model.db.Server;
 import org.apache.sentry.core.model.db.Table;
+import org.apache.sentry.hdfs.PathsUpdate;
 import org.apache.sentry.provider.db.service.thrift.SentryPolicyServiceClient;
+import org.apache.sentry.provider.db.service.thrift.TPathChanges;
 import org.apache.sentry.service.thrift.SentryServiceClientFactory;
+import org.apache.sentry.service.thrift.ServiceConstants.ServerConfig;
+
+import com.google.common.collect.Lists;
 
 public class SentryMetastorePostEventListener extends MetaStoreEventListener {
   private final SentryServiceClientFactory sentryClientFactory;
   private final HiveAuthzConf authzConf;
   private final Server server;
 
+  // Initialized to some value > 1 so that the first update notification
+  // will trigger a full Image fetch
+  private final AtomicInteger seqNum = new AtomicInteger(5);
+
   public SentryMetastorePostEventListener(Configuration config) {
     super(config);
     sentryClientFactory = new SentryServiceClientFactory();
@@ -57,6 +71,14 @@ public class SentryMetastorePostEventListener extends MetaStoreEventListener {
 
   @Override
   public void onCreateTable (CreateTableEvent tableEvent) throws MetaException {
+    PathsUpdate update = createHMSUpdate();
+    if (tableEvent.getTable().getSd().getLocation() != null) {
+      update.newPathChange(
+          tableEvent.getTable().getDbName() + "."
+              + tableEvent.getTable().getTableName()).addToAddPaths(
+                  PathsUpdate.cleanPath(tableEvent.getTable().getSd().getLocation()));
+      notifySentry(update);
+    }
     // drop the privileges on the given table, in case if anything was left
     // behind during the drop
     if (!syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_CREATE_WITH_POLICY_STORE)) {
@@ -68,6 +90,12 @@ public class SentryMetastorePostEventListener extends MetaStoreEventListener {
 
   @Override
   public void onDropTable(DropTableEvent tableEvent) throws MetaException {
+    PathsUpdate update = createHMSUpdate();
+    update.newPathChange(
+        tableEvent.getTable().getDbName() + "."
+            + tableEvent.getTable().getTableName()).addToDelPaths(
+        Lists.newArrayList(PathsUpdate.ALL_PATHS));
+    notifySentry(update);
     // drop the privileges on the given table
     if (!syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_DROP_WITH_POLICY_STORE)) {
       return;
@@ -79,6 +107,12 @@ public class SentryMetastorePostEventListener extends MetaStoreEventListener {
   @Override
   public void onCreateDatabase(CreateDatabaseEvent dbEvent)
       throws MetaException {
+    if (dbEvent.getDatabase().getLocationUri() != null) {
+      PathsUpdate update = createHMSUpdate();
+      update.newPathChange(dbEvent.getDatabase().getName()).addToAddPaths(
+          PathsUpdate.cleanPath(dbEvent.getDatabase().getLocationUri()));
+      notifySentry(update);
+    }
     // drop the privileges on the database, incase anything left behind during
     // last drop db
     if (!syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_CREATE_WITH_POLICY_STORE)) {
@@ -94,10 +128,14 @@ public class SentryMetastorePostEventListener extends MetaStoreEventListener {
    */
   @Override
   public void onDropDatabase(DropDatabaseEvent dbEvent) throws MetaException {
+    PathsUpdate update = createHMSUpdate();
+    update.newPathChange(dbEvent.getDatabase().getName()).addToDelPaths(
+        Lists.newArrayList(PathsUpdate.ALL_PATHS));
+    notifySentry(update);
+    dropSentryDbPrivileges(dbEvent.getDatabase().getName());
     if (!syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_DROP_WITH_POLICY_STORE)) {
       return;
     }
-    dropSentryDbPrivileges(dbEvent.getDatabase().getName());
   }
 
   /**
@@ -106,6 +144,7 @@ public class SentryMetastorePostEventListener extends MetaStoreEventListener {
   @Override
   public void onAlterTable (AlterTableEvent tableEvent) throws MetaException {
     String oldTableName = null, newTableName = null;
+    // TODO : notify SentryHMSPathCache
     if (!syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_ALTER_WITH_POLICY_STORE)) {
       return;
     }
@@ -121,6 +160,51 @@ public class SentryMetastorePostEventListener extends MetaStoreEventListener {
     }
   }
 
+  
+
+  @Override
+  public void onAddPartition(AddPartitionEvent partitionEvent)
+      throws MetaException {
+    PathsUpdate update = createHMSUpdate();
+//    TPathChanges pathUpdate = update.newPathChange(
+//        partitionEvent.getTable().getDbName() + "."
+//            + partitionEvent.getTable().getTableName());
+    Map<String, TPathChanges> pcMap = new HashMap<String, TPathChanges>();
+    boolean anyPaths = false;
+    for (Partition part : partitionEvent.getPartitions()) {
+      if ((part.getSd() != null) && (part.getSd().getLocation() != null)) {
+        String authzObj = part.getDbName() + "." + part.getTableName();
+        TPathChanges pathUpdate = pcMap.get(authzObj);
+        if (pathUpdate == null) {
+          pathUpdate = update.newPathChange(authzObj);
+          pcMap.put(authzObj, pathUpdate);
+        }
+        pathUpdate.addToAddPaths(PathsUpdate
+            .cleanPath(part.getSd().getLocation()));
+        anyPaths = true;
+      }
+    }
+    if (anyPaths) {
+      notifySentry(update);
+    }
+    // TODO Auto-generated method stub
+    super.onAddPartition(partitionEvent);
+  }
+
+  @Override
+  public void onDropPartition(DropPartitionEvent partitionEvent)
+      throws MetaException {
+    PathsUpdate update = createHMSUpdate();
+    update.newPathChange(
+        partitionEvent.getTable().getDbName() + "."
+            + partitionEvent.getTable().getTableName()).addToDelPaths(
+        PathsUpdate.cleanPath(partitionEvent.getPartition().getSd()
+            .getLocation()));
+    notifySentry(update);
+    // TODO Auto-generated method stub
+    super.onDropPartition(partitionEvent);
+  }
+
   private SentryPolicyServiceClient getSentryServiceClient()
       throws MetaException {
     try {
@@ -201,8 +285,25 @@ public class SentryMetastorePostEventListener extends MetaStoreEventListener {
     }
   }
 
+  private void notifySentry(PathsUpdate update) throws MetaException {
+    if (!authzConf.getBoolean(ServerConfig.SENTRY_HDFS_INTEGRATION_ENABLE, true)) {
+      return;
+    }
+    try {
+      getSentryServiceClient().notifyHMSUpdate(update);
+    } catch (SentryUserException e) {
+      throw new MetaException("Error sending update to Sentry [" + e.getMessage() + "]");
+    }
+  }
+
   private boolean syncWithPolicyStore(AuthzConfVars syncConfVar) {
     return "true"
         .equalsIgnoreCase((authzConf.get(syncConfVar.getVar(), "true")));
   }
+
+  private PathsUpdate createHMSUpdate() {
+    PathsUpdate update = new PathsUpdate(seqNum.incrementAndGet(), false);
+    return update;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/sentry-dist/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-dist/pom.xml b/sentry-dist/pom.xml
index cd7126b..510fd97 100644
--- a/sentry-dist/pom.xml
+++ b/sentry-dist/pom.xml
@@ -64,6 +64,18 @@ limitations under the License.
     </dependency>
     <dependency>
       <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-hdfs</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-hdfs-int</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-service-client</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
       <artifactId>sentry-policy-common</artifactId>
     </dependency>
     <dependency>

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/sentry-hdfs-int/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-hdfs-int/pom.xml b/sentry-hdfs-int/pom.xml
new file mode 100644
index 0000000..7f86186
--- /dev/null
+++ b/sentry-hdfs-int/pom.xml
@@ -0,0 +1,70 @@
+<?xml version="1.0"?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.apache.sentry</groupId>
+    <artifactId>sentry</artifactId>
+    <version>1.5.0-incubating-SNAPSHOT</version>
+    <relativePath>..</relativePath>
+  </parent>
+
+  <artifactId>sentry-hdfs-int</artifactId>
+  <name>Sentry HDFS Integration Plugin</name>
+
+  <dependencies>
+
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-hdfs</artifactId>
+      <version>1.5.0-incubating-SNAPSHOT</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-service-client</artifactId>
+      <version>1.5.0-incubating-SNAPSHOT</version>
+    </dependency>
+
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationConstants.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationConstants.java b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationConstants.java
new file mode 100644
index 0000000..9f219ce
--- /dev/null
+++ b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationConstants.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+public class SentryAuthorizationConstants {
+
+  public static final String CONFIG_FILE = "hdfs-sentry.xml";
+
+  public static final String CONFIG_PREFIX = "sentry.authorization-provider.";
+
+  public static final String HDFS_USER_KEY = CONFIG_PREFIX + "hdfs-user";
+  public static final String HDFS_USER_DEFAULT = "hive";
+
+  public static final String HDFS_GROUP_KEY = CONFIG_PREFIX + "hdfs-group";
+  public static final String HDFS_GROUP_DEFAULT = "hive";
+
+  public static final String HDFS_PERMISSION_KEY = CONFIG_PREFIX + 
+      "hdfs-permission";
+  public static final long HDFS_PERMISSION_DEFAULT = 0770;
+
+  public static final String HDFS_PATH_PREFIXES_KEY = CONFIG_PREFIX + 
+      "hdfs-path-prefixes";
+  public static final String[] HDFS_PATH_PREFIXES_DEFAULT = new String[0];
+
+  public static final String CACHE_REFRESH_INTERVAL_KEY = CONFIG_PREFIX + 
+      "cache-refresh-interval.ms";
+  public static final int CACHE_REFRESH_INTERVAL_DEFAULT = 500;
+
+  public static final String CACHE_STALE_THRESHOLD_KEY = CONFIG_PREFIX + 
+      "cache-stale-threshold.ms";
+  public static final int CACHE_STALE_THRESHOLD_DEFAULT = 60 * 1000;
+
+  public static final String CACHE_REFRESH_RETRY_WAIT_KEY = CONFIG_PREFIX +
+      "cache-refresh-retry-wait.ms";
+  public static final int CACHE_REFRESH_RETRY_WAIT_DEFAULT = 30 * 1000;
+
+  public static final String INCLUDE_HDFS_AUTHZ_AS_ACL_KEY = CONFIG_PREFIX + 
+      "include-hdfs-authz-as-acl";
+  public static final boolean INCLUDE_HDFS_AUTHZ_AS_ACL_DEFAULT = true;
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationInfo.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationInfo.java b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationInfo.java
new file mode 100644
index 0000000..59f4f5e
--- /dev/null
+++ b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationInfo.java
@@ -0,0 +1,233 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.sentry.hdfs.SentryServiceClient.SentryAuthzUpdate;
+import org.apache.sentry.hdfs.Updateable.Update;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.annotations.VisibleForTesting;
+
+public class SentryAuthorizationInfo implements Runnable {
+  private static Logger LOG =
+      LoggerFactory.getLogger(SentryAuthorizationInfo.class);
+
+  private SentryUpdater updater;
+  private volatile UpdateableAuthzPaths authzPaths;
+  private volatile UpdateableAuthzPermissions authzPermissions;
+
+  private int refreshIntervalMillisec;
+  private int staleThresholdMillisec;
+  private int retryWaitMillisec;
+  private ScheduledExecutorService executor;
+  private volatile long lastUpdate;
+  private volatile long waitUntil;
+  private volatile long lastStaleReport;
+  // We don't need a re-entrant lock.. but we do need a ReadWriteLock
+  // Unfortunately, the ReentrantReadWriteLick is the only available
+  // concrete implementation of a ReadWriteLock.
+  private final ReadWriteLock lock = new ReentrantReadWriteLock();
+
+  @VisibleForTesting
+  SentryAuthorizationInfo() {}
+
+  public SentryAuthorizationInfo(Configuration conf) throws Exception {
+    String[] pathPrefixes = conf.getTrimmedStrings(
+        SentryAuthorizationConstants.HDFS_PATH_PREFIXES_KEY, 
+        SentryAuthorizationConstants.HDFS_PATH_PREFIXES_DEFAULT);
+    if (pathPrefixes.length == 0) {
+      LOG.warn("There are not HDFS path prefixes configured in [{}], "
+          + "Sentry authorization won't be enforced on any HDFS location",
+          SentryAuthorizationConstants.HDFS_PATH_PREFIXES_KEY);
+    } else {
+      refreshIntervalMillisec = conf.getInt(
+          SentryAuthorizationConstants.CACHE_REFRESH_INTERVAL_KEY,
+          SentryAuthorizationConstants.CACHE_REFRESH_INTERVAL_DEFAULT);
+      staleThresholdMillisec = conf.getInt(
+          SentryAuthorizationConstants.CACHE_STALE_THRESHOLD_KEY,
+          SentryAuthorizationConstants.CACHE_STALE_THRESHOLD_DEFAULT);
+      retryWaitMillisec = conf.getInt(
+          SentryAuthorizationConstants.CACHE_REFRESH_RETRY_WAIT_KEY,
+          SentryAuthorizationConstants.CACHE_REFRESH_RETRY_WAIT_DEFAULT);
+
+      LOG.debug("Sentry authorization will enforced in the following HDFS " +
+          "locations: [{}]", StringUtils.arrayToString(pathPrefixes));
+      LOG.debug("Refresh interval [{}]ms, retry wait [{}], stale threshold " +
+              "[{}]ms", new Object[] 
+          {refreshIntervalMillisec, retryWaitMillisec, staleThresholdMillisec});
+
+      authzPaths = new UpdateableAuthzPaths(pathPrefixes);
+      authzPermissions = new UpdateableAuthzPermissions();
+      waitUntil = System.currentTimeMillis();
+      lastStaleReport = 0;
+      updater = new SentryUpdater(conf, this);
+    }
+  }
+
+  UpdateableAuthzPaths getAuthzPaths() {
+    return authzPaths;
+  }
+
+  UpdateableAuthzPermissions getAuthzPermissions() {
+    return authzPermissions;
+  }
+
+  private void update() {
+    SentryAuthzUpdate updates = updater.getUpdates();
+    UpdateableAuthzPaths newAuthzPaths = processUpdates(
+        updates.getPathUpdates(), authzPaths);
+    UpdateableAuthzPermissions newAuthzPerms = processUpdates(
+        updates.getPermUpdates(), authzPermissions);
+    // If there were any FULL updates the returned instance would be
+    // different
+    if ((newAuthzPaths != authzPaths)||(newAuthzPerms != authzPermissions)) {
+      lock.writeLock().lock();
+      try {
+        authzPaths = newAuthzPaths;
+        LOG.warn("##### FULL Updated paths seq Num [" + authzPaths.getLastUpdatedSeqNum() + "]");
+        authzPermissions = newAuthzPerms;
+        LOG.warn("##### FULL Updated perms seq Num [" + authzPermissions.getLastUpdatedSeqNum() + "]");
+      } finally {
+        lock.writeLock().unlock();
+      }
+    }
+
+  }
+
+  private <K extends Update, V extends Updateable<K>> V processUpdates(List<K> updates,
+      V updateable) {
+    // In a list of Updates, if there is a full Update, it will be the first
+    // one in the List.. all the remaining will be partial updates
+    if (updates.size() > 0) {
+      if (updates.get(0).hasFullImage()) {
+        updateable = (V)updateable.updateFull(updates.remove(0));
+      }
+      // Any more elements ?
+      if (!updates.isEmpty()) {
+        updateable.updatePartial(updates, lock);
+      }
+    }
+    return updateable;
+  }
+
+  public void run() {
+    try {
+      // In case of previous preUpdate failure, we sleep for a retry wait 
+      // interval we can do this because we are using a singledthreadedexecutor
+      // and scheduling the runs with fixed delay.
+      long currTime = System.currentTimeMillis();
+      if (waitUntil > currTime) {
+        Thread.sleep(waitUntil - currTime);
+      }
+      update();
+      // we reset lastUpdate only on successful pulling
+      lastUpdate = System.currentTimeMillis();
+      waitUntil = lastUpdate;
+    } catch (Exception ex) {
+      LOG.warn("Failed to update, will retry in [{}]ms, error: ", 
+          new Object[]{ retryWaitMillisec, ex.getMessage(), ex});
+      waitUntil = System.currentTimeMillis() + retryWaitMillisec;
+    }
+  }
+
+  public void start() {
+    if (authzPaths != null) {
+      try {
+        update();
+      } catch (Exception ex) {
+        LOG.warn("Failed to do initial update, will retry in [{}]ms, error: ",
+            new Object[]{retryWaitMillisec, ex.getMessage(), ex});
+        waitUntil = System.currentTimeMillis() + retryWaitMillisec;
+      }
+      executor = Executors.newSingleThreadScheduledExecutor(
+          new ThreadFactory() {
+            @Override
+            public Thread newThread(Runnable r) {
+              Thread t = new Thread(r, SentryAuthorizationInfo.class.getName() +
+                  "-refresher");
+              t.setDaemon(true);
+              return t;
+            }
+          }
+      );
+      executor.scheduleWithFixedDelay(this, refreshIntervalMillisec, 
+          refreshIntervalMillisec, TimeUnit.MILLISECONDS);
+    }
+  }
+
+  public void stop() {
+    if (authzPaths != null) {
+      executor.shutdownNow();
+    }
+  }
+
+  public boolean isStale() {
+    long now = System.currentTimeMillis();
+    boolean stale = now - lastUpdate > staleThresholdMillisec;
+    if (stale && now - lastStaleReport > 30 * 1000) {
+      LOG.warn("Authorization information has been stale for [{}]s", 
+          (now - lastUpdate) / 1000);
+      lastStaleReport = now;
+    }
+    return stale;
+  }
+
+  public boolean isManaged(String[] pathElements) {
+    lock.readLock().lock();
+    try {
+      return authzPaths.isUnderPrefix(pathElements);
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  public boolean doesBelongToAuthzObject(String[] pathElements) {
+    lock.readLock().lock();
+    try {
+      return authzPaths.findAuthzObject(pathElements) != null;
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  public List<AclEntry> getAclEntries(String[] pathElements) {
+    lock.readLock().lock();
+    try {
+      String authzObj = authzPaths.findAuthzObject(pathElements);
+      return (authzObj != null) ? authzPermissions.getAcls(authzObj) 
+          : Collections.EMPTY_LIST;
+    } finally {
+      lock.readLock().unlock();
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java
new file mode 100644
index 0000000..2375d1b
--- /dev/null
+++ b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java
@@ -0,0 +1,370 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permission and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.UnresolvedLinkException;
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclEntryScope;
+import org.apache.hadoop.fs.permission.AclEntryType;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.server.namenode.AclFeature;
+import org.apache.hadoop.hdfs.server.namenode.AuthorizationProvider;
+import org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider;
+import org.apache.hadoop.security.AccessControlException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableList;
+
+public class SentryAuthorizationProvider 
+    implements AuthorizationProvider, Configurable {
+  
+  static class SentryAclFeature extends AclFeature {
+    public SentryAclFeature(ImmutableList<AclEntry> entries) {
+      super(entries);
+    }
+  }
+
+  private static Logger LOG = 
+      LoggerFactory.getLogger(SentryAuthorizationProvider.class);
+
+  private boolean started;
+  private Configuration conf;
+  private AuthorizationProvider defaultAuthzProvider;
+  private String user;
+  private String group;
+  private FsPermission permission;
+  private boolean originalAuthzAsAcl;
+  private SentryAuthorizationInfo authzInfo;
+
+  public SentryAuthorizationProvider() {
+    this(null);
+  }
+
+  @VisibleForTesting
+  SentryAuthorizationProvider(SentryAuthorizationInfo authzInfo) {
+    this.authzInfo = authzInfo;
+  }
+  
+  @Override
+  public void setConf(Configuration conf) {
+    this.conf = conf;
+  }
+
+  @Override
+  public Configuration getConf() {
+    return conf;
+  }
+
+  @Override
+  public synchronized void start() {
+    if (started) {
+      throw new IllegalStateException("Provider already started");
+    }
+    started = true;
+    try {
+      if (!conf.getBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, false)) {
+        throw new RuntimeException("HDFS ACLs must be enabled");
+      }
+
+      defaultAuthzProvider = new DefaultAuthorizationProvider();
+      defaultAuthzProvider.start();
+      // Configuration is read from hdfs-sentry.xml and NN configuration, in
+      // that order of precedence.
+      Configuration conf = new Configuration(this.conf);
+      conf.addResource(SentryAuthorizationConstants.CONFIG_FILE);
+      user = conf.get(SentryAuthorizationConstants.HDFS_USER_KEY,
+          SentryAuthorizationConstants.HDFS_USER_DEFAULT);
+      group = conf.get(SentryAuthorizationConstants.HDFS_GROUP_KEY,
+          SentryAuthorizationConstants.HDFS_GROUP_DEFAULT);
+      permission = FsPermission.createImmutable(
+          (short) conf.getLong(SentryAuthorizationConstants.HDFS_PERMISSION_KEY,
+              SentryAuthorizationConstants.HDFS_PERMISSION_DEFAULT)
+      );
+      originalAuthzAsAcl = conf.getBoolean(
+          SentryAuthorizationConstants.INCLUDE_HDFS_AUTHZ_AS_ACL_KEY,
+          SentryAuthorizationConstants.INCLUDE_HDFS_AUTHZ_AS_ACL_DEFAULT);
+
+      LOG.info("Starting");
+      LOG.info("Config: hdfs-user[{}] hdfs-group[{}] hdfs-permission[{}] " +
+          "include-hdfs-authz-as-acl[{}]", new Object[]
+          {user, group, permission, originalAuthzAsAcl});
+
+      if (authzInfo == null) {
+        authzInfo = new SentryAuthorizationInfo(conf);
+      }
+      authzInfo.start();
+    } catch (Exception ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
+  @Override
+  public synchronized void stop() {
+    LOG.debug("Stopping");
+    authzInfo.stop();
+    defaultAuthzProvider.stop();
+    defaultAuthzProvider = null;
+  }
+
+  @Override
+  public void setSnaphottableDirs(Map<INodeAuthorizationInfo, Integer>
+      snapshotableDirs) {
+    defaultAuthzProvider.setSnaphottableDirs(snapshotableDirs);
+  }
+
+  @Override
+  public void addSnapshottable(INodeAuthorizationInfo dir) {
+    defaultAuthzProvider.addSnapshottable(dir);
+  }
+
+  @Override
+  public void removeSnapshottable(INodeAuthorizationInfo dir) {
+    defaultAuthzProvider.removeSnapshottable(dir);
+  }
+
+  @Override
+  public void createSnapshot(INodeAuthorizationInfo dir, int snapshotId)
+      throws IOException{
+    defaultAuthzProvider.createSnapshot(dir, snapshotId);
+  }
+
+  @Override
+  public void removeSnapshot(INodeAuthorizationInfo dir, int snapshotId)
+      throws IOException {
+    defaultAuthzProvider.removeSnapshot(dir, snapshotId);
+  }
+
+  @Override
+  public void checkPermission(String user, Set<String> groups,
+      INodeAuthorizationInfo[] inodes, int snapshotId,
+      boolean doCheckOwner, FsAction ancestorAccess, FsAction parentAccess,
+      FsAction access, FsAction subAccess, boolean ignoreEmptyDir)
+      throws AccessControlException, UnresolvedLinkException {
+    defaultAuthzProvider.checkPermission(user, groups, inodes, snapshotId,
+        doCheckOwner, ancestorAccess, parentAccess, access, subAccess,
+        ignoreEmptyDir);
+  }
+
+  private static final String[] EMPTY_STRING_ARRAY = new String[0];
+  
+  private String[] getPathElements(INodeAuthorizationInfo node) {
+    return getPathElements(node, 0);
+  }
+
+  private String[] getPathElements(INodeAuthorizationInfo node, int idx) {
+    String[] paths;
+    INodeAuthorizationInfo parent = node.getParent();
+    if (parent == null) {
+      paths = (idx > 0) ? new String[idx] : EMPTY_STRING_ARRAY;
+    } else {
+      paths = getPathElements(parent, idx + 1);
+      paths[paths.length - 1 - idx] = node.getLocalName();
+    }
+    return paths;
+  }
+
+  @Override
+  public void setUser(INodeAuthorizationInfo node, String user) {
+    defaultAuthzProvider.setUser(node, user);
+  }
+
+  @Override
+  public String getUser(INodeAuthorizationInfo node, int snapshotId) {
+    String user;
+    String[] pathElements = getPathElements(node);
+    if (!authzInfo.isManaged(pathElements)) {
+      user = defaultAuthzProvider.getUser(node, snapshotId);
+    } else {
+      if (!authzInfo.isStale()) {
+        if (authzInfo.doesBelongToAuthzObject(pathElements)) {
+          user = this.user;
+        } else {
+          user = defaultAuthzProvider.getUser(node, snapshotId);
+        }
+      } else {
+        user = this.user;
+      }
+    }
+    return user;
+  }
+
+  @Override
+  public void setGroup(INodeAuthorizationInfo node, String group) {
+    defaultAuthzProvider.setGroup(node, group);
+  }
+
+  @Override
+  public String getGroup(INodeAuthorizationInfo node, int snapshotId) {
+    String group;
+    String[] pathElements = getPathElements(node);
+    if (!authzInfo.isManaged(pathElements)) {
+      group = defaultAuthzProvider.getGroup(node, snapshotId);
+    } else {
+      if (!authzInfo.isStale()) {
+        if (authzInfo.doesBelongToAuthzObject(pathElements)) {
+          group = this.group;
+        } else {
+          group = defaultAuthzProvider.getGroup(node, snapshotId);
+        }
+      } else {
+        group = this.group;
+      }
+    }
+    return group;
+  }
+
+  @Override
+  public void setPermission(INodeAuthorizationInfo node,
+      FsPermission permission) {
+    defaultAuthzProvider.setPermission(node, permission);
+  }
+
+  @Override
+  public FsPermission getFsPermission(
+      INodeAuthorizationInfo node, int snapshotId) {
+    FsPermission permission;
+    String[] pathElements = getPathElements(node);
+    if (!authzInfo.isManaged(pathElements)) {
+      permission = defaultAuthzProvider.getFsPermission(node, snapshotId);
+    } else {
+      if (!authzInfo.isStale()) {
+        if (authzInfo.doesBelongToAuthzObject(pathElements)) {
+          permission = this.permission;
+        } else {
+          permission = defaultAuthzProvider.getFsPermission(node, snapshotId);
+        }
+      } else {
+        permission = this.permission;
+      }
+    }
+    return permission;
+  }
+
+  private List<AclEntry> createAclEntries(String user, String group,
+      FsPermission permission) {
+    List<AclEntry> list = new ArrayList<AclEntry>();
+    AclEntry.Builder builder = new AclEntry.Builder();
+    FsPermission fsPerm = new FsPermission(permission);
+    builder.setName(user);
+    builder.setType(AclEntryType.USER);
+    builder.setScope(AclEntryScope.ACCESS);
+    builder.setPermission(fsPerm.getUserAction());
+    list.add(builder.build());
+    builder.setName(group);
+    builder.setType(AclEntryType.GROUP);
+    builder.setScope(AclEntryScope.ACCESS);
+    builder.setPermission(fsPerm.getGroupAction());
+    list.add(builder.build());
+    builder.setName(null);
+    builder.setType(AclEntryType.OTHER);
+    builder.setScope(AclEntryScope.ACCESS);
+    builder.setPermission(fsPerm.getOtherAction());
+    list.add(builder.build());
+    return list;
+  }
+
+  @Override
+  public AclFeature getAclFeature(INodeAuthorizationInfo node, int snapshotId) {
+    AclFeature f = null;
+    String[] pathElements = getPathElements(node);
+    String p = Arrays.toString(pathElements);
+    boolean isManaged = false;
+    boolean isStale = false;
+    boolean hasAuthzObj = false;
+    if (!authzInfo.isManaged(pathElements)) {
+      isManaged = false;
+      f = defaultAuthzProvider.getAclFeature(node, snapshotId);
+    } else {
+      isManaged = true;
+      List<AclEntry> list = new ArrayList<AclEntry>();
+      if (originalAuthzAsAcl) {
+        String user = defaultAuthzProvider.getUser(node, snapshotId);
+        String group = defaultAuthzProvider.getGroup(node, snapshotId);
+        INodeAuthorizationInfo pNode = node.getParent();
+        while  (group == null || pNode != null) {
+          group = defaultAuthzProvider.getGroup(pNode, snapshotId);
+          pNode = pNode.getParent();
+        }
+        FsPermission perm = defaultAuthzProvider.getFsPermission(node, snapshotId);
+        list.addAll(createAclEntries(user, group, perm));
+      }
+      if (!authzInfo.isStale()) { 
+        isStale = false;
+        if (authzInfo.doesBelongToAuthzObject(pathElements)) {
+          hasAuthzObj = true;
+          list.addAll(authzInfo.getAclEntries(pathElements));
+          f = new SentryAclFeature(ImmutableList.copyOf(list));
+        } else {
+          hasAuthzObj = false;
+          f = defaultAuthzProvider.getAclFeature(node, snapshotId);
+        }
+      } else {
+        isStale = true;
+        f = new SentryAclFeature(ImmutableList.copyOf(list));
+      }
+    }
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("### getAclEntry [" + (p == null ? "null" : p) + "] : ["
+          + "isManaged=" + isManaged
+          + ",isStale=" + isStale
+          + ",hasAuthzObj=" + hasAuthzObj
+          + ",origAtuhzAsAcl=" + originalAuthzAsAcl + "]"
+          + "[" + (f == null ? "null" : f.getEntries()) + "]");
+    }
+    return f;
+  }
+
+  @Override
+  public void removeAclFeature(INodeAuthorizationInfo node) {
+    AclFeature aclFeature = node.getAclFeature(CURRENT_STATE_ID);
+    if (aclFeature.getClass() != SentryAclFeature.class) {
+      defaultAuthzProvider.removeAclFeature(node);
+    }
+  }
+
+  @Override
+  public void addAclFeature(INodeAuthorizationInfo node, AclFeature f) {
+    String[] pathElements = getPathElements(node);
+    if (!authzInfo.isManaged(pathElements)) {
+      defaultAuthzProvider.addAclFeature(node, f);
+    }
+  }
+
+  @Override 
+  public boolean doesAllowChanges(INodeAuthorizationInfo node) {
+    String[] pathElements = getPathElements(node);
+    if (!authzInfo.isManaged(pathElements)) {
+      return defaultAuthzProvider.doesAllowChanges(node);
+    }
+    return !authzInfo.doesBelongToAuthzObject(getPathElements(node));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryPermissions.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryPermissions.java b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryPermissions.java
new file mode 100644
index 0000000..7461f89
--- /dev/null
+++ b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryPermissions.java
@@ -0,0 +1,159 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclEntryScope;
+import org.apache.hadoop.fs.permission.AclEntryType;
+import org.apache.hadoop.fs.permission.FsAction;
+
+import com.google.common.collect.Lists;
+
+public class SentryPermissions implements AuthzPermissions {
+  
+  public static class PrivilegeInfo {
+    private final String authzObj;
+    private final Map<String, FsAction> roleToPermission = new HashMap<String, FsAction>();
+    public PrivilegeInfo(String authzObj) {
+      this.authzObj = authzObj;
+    }
+    public PrivilegeInfo setPermission(String role, FsAction perm) {
+      roleToPermission.put(role, perm);
+      return this;
+    }
+    public PrivilegeInfo removePermission(String role) {
+      roleToPermission.remove(role);
+      return this;
+    }
+    public FsAction getPermission(String role) {
+      return roleToPermission.get(role);
+    }
+    public Map<String, FsAction> getAllPermissions() {
+      return roleToPermission;
+    }
+    public String getAuthzObj() {
+      return authzObj;
+    }
+  }
+
+  public static class RoleInfo {
+    private final String role;
+    private final Set<String> groups = new HashSet<String>();
+    public RoleInfo(String role) {
+      this.role = role;
+    }
+    public RoleInfo addGroup(String group) {
+      groups.add(group);
+      return this;
+    }
+    public RoleInfo delGroup(String group) {
+      groups.remove(group);
+      return this;
+    }
+    public String getRole() {
+      return role;
+    }
+    public Set<String> getAllGroups() {
+      return groups;
+    }
+  }
+
+  private final Map<String, PrivilegeInfo> privileges = new HashMap<String, PrivilegeInfo>();
+  private final Map<String, RoleInfo> roles = new HashMap<String, RoleInfo>();
+
+  @Override
+  public List<AclEntry> getAcls(String authzObj) {
+    PrivilegeInfo privilegeInfo = privileges.get(authzObj);
+    Map<String, FsAction> groupPerms = new HashMap<String, FsAction>();
+    if (privilegeInfo != null) {
+      for (Map.Entry<String, FsAction> privs : privilegeInfo
+          .getAllPermissions().entrySet()) {
+        constructAclEntry(privs.getKey(), privs.getValue(), groupPerms);
+      }
+    }
+    List<AclEntry> retList = new LinkedList<AclEntry>();
+    for (Map.Entry<String, FsAction> groupPerm : groupPerms.entrySet()) {
+      AclEntry.Builder builder = new AclEntry.Builder();
+      builder.setName(groupPerm.getKey());
+      builder.setType(AclEntryType.GROUP);
+      builder.setScope(AclEntryScope.ACCESS);
+      FsAction action = groupPerm.getValue(); 
+      if ((action == FsAction.READ) || (action == FsAction.WRITE)
+          || (action == FsAction.READ_WRITE)) {
+        action = action.or(FsAction.EXECUTE);
+      }
+      builder.setPermission(action);
+      retList.add(builder.build());
+    }
+    return retList;
+  }
+
+  private void constructAclEntry(String role, FsAction permission,
+      Map<String, FsAction> groupPerms) {
+    RoleInfo roleInfo = roles.get(role);
+    if (roleInfo != null) {
+      for (String group : roleInfo.groups) {
+        FsAction fsAction = groupPerms.get(group);
+        if (fsAction == null) {
+          fsAction = FsAction.NONE;
+        }
+        groupPerms.put(group, fsAction.or(permission));
+      }
+    }
+  }
+
+  public PrivilegeInfo getPrivilegeInfo(String authzObj) {
+    return privileges.get(authzObj);
+  }
+
+  Collection<PrivilegeInfo> getAllPrivileges() {
+    return privileges.values();
+  }
+
+  Collection<RoleInfo> getAllRoles() {
+    return roles.values();
+  }
+
+  public void delPrivilegeInfo(String authzObj) {
+    privileges.remove(authzObj);
+  }
+
+  public void addPrivilegeInfo(PrivilegeInfo privilegeInfo) {
+    privileges.put(privilegeInfo.authzObj, privilegeInfo);
+  }
+
+  public RoleInfo getRoleInfo(String role) {
+    return roles.get(role);
+  }
+
+  public void delRoleInfo(String role) {
+    roles.remove(role);
+  }
+
+  public void addRoleInfo(RoleInfo roleInfo) {
+    roles.put(roleInfo.role, roleInfo);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryServiceClient.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryServiceClient.java b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryServiceClient.java
new file mode 100644
index 0000000..97da9aa
--- /dev/null
+++ b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryServiceClient.java
@@ -0,0 +1,200 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.security.PrivilegedExceptionAction;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import javax.security.auth.callback.CallbackHandler;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.SaslRpcServer;
+import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.sentry.provider.db.service.thrift.SentryPolicyService;
+import org.apache.sentry.provider.db.service.thrift.SentryPolicyService.Client;
+import org.apache.sentry.provider.db.service.thrift.TAuthzUpdateResponse;
+import org.apache.sentry.provider.db.service.thrift.TPathsUpdate;
+import org.apache.sentry.provider.db.service.thrift.TPermissionsUpdate;
+import org.apache.sentry.service.thrift.ServiceConstants.ClientConfig;
+import org.apache.sentry.service.thrift.ServiceConstants.ServerConfig;
+import org.apache.thrift.protocol.TCompactProtocol;
+import org.apache.thrift.protocol.TMultiplexedProtocol;
+import org.apache.thrift.transport.TSaslClientTransport;
+import org.apache.thrift.transport.TSocket;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+
+public class SentryServiceClient {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(SentryServiceClient.class);
+  
+  public static class SentryAuthzUpdate {
+
+    private final List<PermissionsUpdate> permUpdates;
+    private final List<PathsUpdate> pathUpdates;
+
+    public SentryAuthzUpdate(List<PermissionsUpdate> permUpdates, List<PathsUpdate> pathUpdates) {
+      this.permUpdates = permUpdates;
+      this.pathUpdates = pathUpdates;
+    }
+
+    public List<PermissionsUpdate> getPermUpdates() {
+      return permUpdates;
+    }
+
+    public List<PathsUpdate> getPathUpdates() {
+      return pathUpdates;
+    }
+  }
+  
+  /**
+   * This transport wraps the Sasl transports to set up the right UGI context for open().
+   */
+  public static class UgiSaslClientTransport extends TSaslClientTransport {
+    protected UserGroupInformation ugi = null;
+
+    public UgiSaslClientTransport(String mechanism, String authorizationId,
+        String protocol, String serverName, Map<String, String> props,
+        CallbackHandler cbh, TTransport transport, boolean wrapUgi)
+        throws IOException {
+      super(mechanism, authorizationId, protocol, serverName, props, cbh,
+          transport);
+      if (wrapUgi) {
+        ugi = UserGroupInformation.getLoginUser();
+      }
+    }
+
+    // open the SASL transport with using the current UserGroupInformation
+    // This is needed to get the current login context stored
+    @Override
+    public void open() throws TTransportException {
+      if (ugi == null) {
+        baseOpen();
+      } else {
+        try {
+          ugi.doAs(new PrivilegedExceptionAction<Void>() {
+            public Void run() throws TTransportException {
+              baseOpen();
+              return null;
+            }
+          });
+        } catch (IOException e) {
+          throw new TTransportException("Failed to open SASL transport", e);
+        } catch (InterruptedException e) {
+          throw new TTransportException(
+              "Interrupted while opening underlying transport", e);
+        }
+      }
+    }
+
+    private void baseOpen() throws TTransportException {
+      super.open();
+    }
+  }
+
+  private final Configuration conf;
+  private final InetSocketAddress serverAddress;
+  private final int connectionTimeout;
+  private boolean kerberos;
+  private TTransport transport;
+
+  private String[] serverPrincipalParts;
+  private Client client;
+  
+  public SentryServiceClient(Configuration conf) throws IOException {
+    this.conf = conf;
+    Preconditions.checkNotNull(this.conf, "Configuration object cannot be null");
+    this.serverAddress = NetUtils.createSocketAddr(Preconditions.checkNotNull(
+                           conf.get(ClientConfig.SERVER_RPC_ADDRESS), "Config key "
+                           + ClientConfig.SERVER_RPC_ADDRESS + " is required"), conf.getInt(
+                           ClientConfig.SERVER_RPC_PORT, ClientConfig.SERVER_RPC_PORT_DEFAULT));
+    this.connectionTimeout = conf.getInt(ClientConfig.SERVER_RPC_CONN_TIMEOUT,
+                                         ClientConfig.SERVER_RPC_CONN_TIMEOUT_DEFAULT);
+    kerberos = ServerConfig.SECURITY_MODE_KERBEROS.equalsIgnoreCase(
+        conf.get(ServerConfig.SECURITY_MODE, ServerConfig.SECURITY_MODE_KERBEROS).trim());
+    transport = new TSocket(serverAddress.getHostName(),
+        serverAddress.getPort(), connectionTimeout);
+    if (kerberos) {
+      String serverPrincipal = Preconditions.checkNotNull(conf.get(ServerConfig.PRINCIPAL), ServerConfig.PRINCIPAL + " is required");
+
+      // Resolve server host in the same way as we are doing on server side
+      serverPrincipal = SecurityUtil.getServerPrincipal(serverPrincipal, serverAddress.getAddress());
+      LOGGER.info("Using server kerberos principal: " + serverPrincipal);
+
+      serverPrincipalParts = SaslRpcServer.splitKerberosName(serverPrincipal);
+      Preconditions.checkArgument(serverPrincipalParts.length == 3,
+           "Kerberos principal should have 3 parts: " + serverPrincipal);
+      boolean wrapUgi = "true".equalsIgnoreCase(conf
+          .get(ServerConfig.SECURITY_USE_UGI_TRANSPORT, "true"));
+      transport = new UgiSaslClientTransport(AuthMethod.KERBEROS.getMechanismName(),
+          null, serverPrincipalParts[0], serverPrincipalParts[1],
+          ClientConfig.SASL_PROPERTIES, null, transport, wrapUgi);
+    } else {
+      serverPrincipalParts = null;
+    }
+    try {
+      transport.open();
+    } catch (TTransportException e) {
+      throw new IOException("Transport exception while opening transport: " + e.getMessage(), e);
+    }
+    LOGGER.info("Successfully opened transport: " + transport + " to " + serverAddress);
+    TMultiplexedProtocol protocol = new TMultiplexedProtocol(
+      new TCompactProtocol(transport),
+      "SentryPolicyService");
+    client = new SentryPolicyService.Client(protocol);
+    LOGGER.info("Successfully created client");
+  }
+
+  public synchronized SentryAuthzUpdate getAllUpdatesFrom(long permSeqNum, long pathSeqNum)
+      throws IOException {
+    SentryAuthzUpdate retVal = new SentryAuthzUpdate(new LinkedList<PermissionsUpdate>(), new LinkedList<PathsUpdate>());
+    try {
+      TAuthzUpdateResponse sentryUpdates = client.get_all_authz_updates_from(permSeqNum, pathSeqNum);
+      if (sentryUpdates.getAuthzPathUpdate() != null) {
+        for (TPathsUpdate pathsUpdate : sentryUpdates.getAuthzPathUpdate()) {
+          retVal.getPathUpdates().add(new PathsUpdate(pathsUpdate));
+        }
+      }
+      if (sentryUpdates.getAuthzPermUpdate() != null) {
+        for (TPermissionsUpdate permsUpdate : sentryUpdates.getAuthzPermUpdate()) {
+          retVal.getPermUpdates().add(new PermissionsUpdate(permsUpdate));
+        }
+      }
+    } catch (Exception e) {
+      throw new IOException("Thrift Exception occurred !!", e);
+    }
+    return retVal;
+  }
+
+  public void close() {
+    if (transport != null) {
+      transport.close();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryUpdater.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryUpdater.java b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryUpdater.java
new file mode 100644
index 0000000..bc46651
--- /dev/null
+++ b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/SentryUpdater.java
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.sentry.hdfs.SentryServiceClient.SentryAuthzUpdate;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class SentryUpdater {
+
+  private SentryServiceClient sentryClient;
+  private final Configuration conf;
+  private final SentryAuthorizationInfo authzInfo;
+
+  private static Logger LOG = LoggerFactory.getLogger(SentryUpdater.class);
+
+  public SentryUpdater(Configuration conf, SentryAuthorizationInfo authzInfo) throws Exception {
+    this.conf = conf;
+    this.authzInfo = authzInfo;
+  }
+
+  public SentryAuthzUpdate getUpdates() {
+    if (sentryClient == null) {
+      try {
+        sentryClient = new SentryServiceClient(conf);
+      } catch (Exception e) {
+        LOG.error("Error connecting to Sentry ['{}'] !!",
+            e.getMessage());
+        return null;
+      }
+    }
+    try {
+      SentryAuthzUpdate sentryUpdates = sentryClient.getAllUpdatesFrom(
+          authzInfo.getAuthzPermissions().getLastUpdatedSeqNum() + 1,
+          authzInfo.getAuthzPaths().getLastUpdatedSeqNum() + 1);
+      return sentryUpdates;
+    } catch (Exception e)  {
+      sentryClient = null;
+      LOG.error("Error receiving updates from Sentry !!", e);
+      return null;
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPermissions.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPermissions.java b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPermissions.java
new file mode 100644
index 0000000..f9e1cf4
--- /dev/null
+++ b/sentry-hdfs-int/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPermissions.java
@@ -0,0 +1,179 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.ReadWriteLock;
+
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.sentry.hdfs.SentryPermissions.PrivilegeInfo;
+import org.apache.sentry.hdfs.SentryPermissions.RoleInfo;
+import org.apache.sentry.provider.db.service.thrift.TPrivilegeChanges;
+import org.apache.sentry.provider.db.service.thrift.TRoleChanges;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class UpdateableAuthzPermissions implements AuthzPermissions, Updateable<PermissionsUpdate> {
+  private volatile SentryPermissions perms = new SentryPermissions();
+  private final AtomicLong seqNum = new AtomicLong(0);
+  
+  private static Logger LOG = LoggerFactory.getLogger(UpdateableAuthzPermissions.class);
+
+  @Override
+  public List<AclEntry> getAcls(String authzObj) {
+    return perms.getAcls(authzObj);
+  }
+
+  @Override
+  public UpdateableAuthzPermissions updateFull(PermissionsUpdate update) {
+    UpdateableAuthzPermissions other = new UpdateableAuthzPermissions();
+    other.applyPartialUpdate(update);
+    other.seqNum.set(update.getSeqNum());
+    return other;
+  }
+
+  @Override
+  public void updatePartial(Iterable<PermissionsUpdate> updates, ReadWriteLock lock) {
+    lock.writeLock().lock();
+    try {
+      int counter = 0;
+      for (PermissionsUpdate update : updates) {
+        applyPartialUpdate(update);
+        if (++counter > 99) {
+          counter = 0;
+          lock.writeLock().unlock();
+          lock.writeLock().lock();
+        }
+        seqNum.set(update.getSeqNum());
+        LOG.warn("##### Updated perms seq Num [" + seqNum.get() + "]");
+      }
+    } finally {
+      lock.writeLock().unlock();
+    }
+  }
+  
+
+  private void applyPartialUpdate(PermissionsUpdate update) {
+    applyPrivilegeUpdates(update);
+    applyRoleUpdates(update);
+  }
+
+  private void applyRoleUpdates(PermissionsUpdate update) {
+    for (TRoleChanges rUpdate : update.getRoleUpdates()) {
+      if (rUpdate.getRole().equals(PermissionsUpdate.ALL_ROLES)) {
+        // Request to remove group from all roles
+        String groupToRemove = rUpdate.getDelGroups().iterator().next();
+        for (RoleInfo rInfo : perms.getAllRoles()) {
+          rInfo.delGroup(groupToRemove);
+        }
+      }
+      RoleInfo rInfo = perms.getRoleInfo(rUpdate.getRole());
+      for (String group : rUpdate.getAddGroups()) {
+        if (rInfo == null) {
+          rInfo = new RoleInfo(rUpdate.getRole());
+        }
+        rInfo.addGroup(group);
+      }
+      if (rInfo != null) {
+        perms.addRoleInfo(rInfo);
+        for (String group : rUpdate.getDelGroups()) {
+          if (group.equals(PermissionsUpdate.ALL_GROUPS)) {
+            perms.delRoleInfo(rInfo.getRole());
+            break;
+          }
+          // If there are no groups to remove, rUpdate.getDelGroups() will
+          // return empty list and this code will not be reached
+          rInfo.delGroup(group);
+        }
+      }
+    }
+  }
+
+  private void applyPrivilegeUpdates(PermissionsUpdate update) {
+    for (TPrivilegeChanges pUpdate : update.getPrivilegeUpdates()) {
+      if (pUpdate.getAuthzObj().equals(PermissionsUpdate.ALL_PRIVS)) {
+        // Request to remove role from all Privileges
+        String roleToRemove = pUpdate.getDelPrivileges().keySet().iterator()
+            .next();
+        for (PrivilegeInfo pInfo : perms.getAllPrivileges()) {
+          pInfo.removePermission(roleToRemove);
+        }
+      }
+      PrivilegeInfo pInfo = perms.getPrivilegeInfo(pUpdate.getAuthzObj());
+      for (Map.Entry<String, String> aMap : pUpdate.getAddPrivileges().entrySet()) {
+        if (pInfo == null) {
+          pInfo = new PrivilegeInfo(pUpdate.getAuthzObj());
+        }
+        FsAction fsAction = pInfo.getPermission(aMap.getKey());
+        if (fsAction == null) {
+          fsAction = FsAction.getFsAction(aMap.getValue());
+        } else {
+          fsAction = fsAction.or(FsAction.getFsAction(aMap.getValue()));
+        }
+        pInfo.setPermission(aMap.getKey(), fsAction);
+      }
+      if (pInfo != null) {
+        perms.addPrivilegeInfo(pInfo);
+        for (Map.Entry<String, String> dMap : pUpdate.getDelPrivileges().entrySet()) {
+          if (dMap.getKey().equals(PermissionsUpdate.ALL_ROLES)) {
+            // Remove all privileges
+            perms.delPrivilegeInfo(pUpdate.getAuthzObj());
+            break;
+          }
+          FsAction fsAction = pInfo.getPermission(dMap.getKey());
+          if (fsAction != null) {
+            fsAction = fsAction.and(FsAction.getFsAction(dMap.getValue()).not());
+            if (FsAction.NONE == fsAction) {
+              pInfo.removePermission(dMap.getKey());
+            } else {
+              pInfo.setPermission(dMap.getKey(), fsAction);
+            }
+          }
+        }
+      }
+    }
+  }
+
+  @Override
+  public long getLastUpdatedSeqNum() {
+    return seqNum.get();
+  }
+
+  @Override
+  public PermissionsUpdate createFullImageUpdate(long currSeqNum) {
+    PermissionsUpdate retVal = new PermissionsUpdate(currSeqNum, true);
+    for (PrivilegeInfo pInfo : perms.getAllPrivileges()) {
+      TPrivilegeChanges pUpdate = retVal.addPrivilegeUpdate(pInfo.getAuthzObj());
+      for (Map.Entry<String, FsAction> ent : pInfo.getAllPermissions().entrySet()) {
+        pUpdate.putToAddPrivileges(ent.getKey(), ent.getValue().SYMBOL);
+      }
+    }
+    for (RoleInfo rInfo : perms.getAllRoles()) {
+      TRoleChanges rUpdate = retVal.addRoleUpdate(rInfo.getRole());
+      for (String group : rInfo.getAllGroups()) {
+        rUpdate.addToAddGroups(group);
+      }
+    }
+    return retVal;
+  }
+
+  
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/sentry-hdfs-int/src/test/java/org/apache/sentry/hdfs/MockSentryAuthorizationProvider.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs-int/src/test/java/org/apache/sentry/hdfs/MockSentryAuthorizationProvider.java b/sentry-hdfs-int/src/test/java/org/apache/sentry/hdfs/MockSentryAuthorizationProvider.java
new file mode 100644
index 0000000..2085b52
--- /dev/null
+++ b/sentry-hdfs-int/src/test/java/org/apache/sentry/hdfs/MockSentryAuthorizationProvider.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+public class MockSentryAuthorizationProvider extends
+    SentryAuthorizationProvider {
+
+  public MockSentryAuthorizationProvider() {
+    super(new SentryAuthorizationInfoX());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/0eb6645e/sentry-hdfs-int/src/test/java/org/apache/sentry/hdfs/SentryAuthorizationInfoX.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs-int/src/test/java/org/apache/sentry/hdfs/SentryAuthorizationInfoX.java b/sentry-hdfs-int/src/test/java/org/apache/sentry/hdfs/SentryAuthorizationInfoX.java
new file mode 100644
index 0000000..7a1539b
--- /dev/null
+++ b/sentry-hdfs-int/src/test/java/org/apache/sentry/hdfs/SentryAuthorizationInfoX.java
@@ -0,0 +1,85 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.permission.AclEntry;
+import org.apache.hadoop.fs.permission.AclEntryScope;
+import org.apache.hadoop.fs.permission.AclEntryType;
+import org.apache.hadoop.fs.permission.FsAction;
+
+public class SentryAuthorizationInfoX extends SentryAuthorizationInfo {
+
+  public SentryAuthorizationInfoX() {
+    super();
+  }
+
+  @Override
+  public void run() {
+    
+  }
+
+  @Override
+  public void start() {
+
+  }
+
+  @Override
+  public void stop() {
+
+  }
+
+  @Override
+  public boolean isStale() {
+    return false;
+  }
+
+  private static final String[] MANAGED = {"user", "authz"};
+  private static final String[] AUTHZ_OBJ = {"user", "authz", "obj"};
+
+  private boolean hasPrefix(String[] prefix, String[] pathElement) {
+    int i = 0;
+    for (; i < prefix.length && i < pathElement.length; i ++) {
+      if (!prefix[i].equals(pathElement[i])) {
+        return false;
+      }
+    }    
+    return (i == prefix.length);
+  }
+  
+  @Override
+  public boolean isManaged(String[] pathElements) {
+    return hasPrefix(MANAGED, pathElements);
+  }
+
+  @Override
+  public boolean doesBelongToAuthzObject(String[] pathElements) {
+    return hasPrefix(AUTHZ_OBJ, pathElements);
+  }
+
+  @Override
+  public List<AclEntry> getAclEntries(String[] pathElements) {
+    AclEntry acl = new AclEntry.Builder().setType(AclEntryType.USER).
+        setPermission(FsAction.ALL).setName("user-authz").
+        setScope(AclEntryScope.ACCESS).build();
+    return Arrays.asList(acl);
+  }
+}