You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sentry.apache.org by ha...@apache.org on 2016/07/14 22:01:22 UTC

sentry git commit: SENTRY-1403: Move SentryHDFSServiceClient code from hdfs-common into hdfs-service (Hao Hao, Reviewed by: Colin P. McCabe and Anne Yu)

Repository: sentry
Updated Branches:
  refs/heads/sentry-ha-redesign 6f298b0f7 -> a62664153


SENTRY-1403: Move SentryHDFSServiceClient code from hdfs-common into hdfs-service (Hao Hao, Reviewed by: Colin P. McCabe and Anne Yu)

Change-Id: I03169a5bc1127ccb3248b286dc45b669ac2b8605


Project: http://git-wip-us.apache.org/repos/asf/sentry/repo
Commit: http://git-wip-us.apache.org/repos/asf/sentry/commit/a6266415
Tree: http://git-wip-us.apache.org/repos/asf/sentry/tree/a6266415
Diff: http://git-wip-us.apache.org/repos/asf/sentry/diff/a6266415

Branch: refs/heads/sentry-ha-redesign
Commit: a62664153e51256ad38d6aaf9e717e52230d15a0
Parents: 6f298b0
Author: hahao <ha...@cloudera.com>
Authored: Thu Jul 14 15:00:21 2016 -0700
Committer: hahao <ha...@cloudera.com>
Committed: Thu Jul 14 15:00:21 2016 -0700

----------------------------------------------------------------------
 sentry-hdfs/sentry-hdfs-common/pom.xml          |  11 -
 .../sentry/hdfs/SentryHDFSServiceClient.java    |  33 ---
 .../SentryHDFSServiceClientDefaultImpl.java     | 215 -------------------
 .../hdfs/SentryHDFSServiceClientFactory.java    |  36 ----
 .../sentry/hdfs/SentryHdfsServiceException.java |  33 ---
 .../hdfs/SentryHdfsServiceIntegrationBase.java  |  81 -------
 .../sentry/hdfs/TestKrbConnectionTimeout.java   |  51 -----
 sentry-hdfs/sentry-hdfs-namenode-plugin/pom.xml |   4 +
 sentry-hdfs/sentry-hdfs-service/pom.xml         |   6 +
 .../sentry/hdfs/SentryHDFSServiceClient.java    |  33 +++
 .../SentryHDFSServiceClientDefaultImpl.java     | 215 +++++++++++++++++++
 .../hdfs/SentryHDFSServiceClientFactory.java    |  36 ++++
 .../sentry/hdfs/SentryHdfsServiceException.java |  33 +++
 .../hdfs/SentryHdfsServiceIntegrationBase.java  |  81 +++++++
 .../sentry/hdfs/TestKrbConnectionTimeout.java   |  51 +++++
 15 files changed, 459 insertions(+), 460 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sentry/blob/a6266415/sentry-hdfs/sentry-hdfs-common/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-common/pom.xml b/sentry-hdfs/sentry-hdfs-common/pom.xml
index d244edc..e767e06 100644
--- a/sentry-hdfs/sentry-hdfs-common/pom.xml
+++ b/sentry-hdfs/sentry-hdfs-common/pom.xml
@@ -66,20 +66,9 @@ limitations under the License.
     </dependency>
     <dependency>
       <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-provider-db</artifactId>
-      <scope>provided</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
       <artifactId>sentry-provider-file</artifactId>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>org.apache.sentry</groupId>
-      <artifactId>sentry-provider-db</artifactId>
-      <type>test-jar</type>
-      <scope>test</scope>
-    </dependency>
   </dependencies>
   <build>
     <sourceDirectory>${basedir}/src/main/java</sourceDirectory>

http://git-wip-us.apache.org/repos/asf/sentry/blob/a6266415/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClient.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClient.java b/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClient.java
deleted file mode 100644
index ab12bf4..0000000
--- a/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClient.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.hdfs;
-
-public interface SentryHDFSServiceClient {
-  String SENTRY_HDFS_SERVICE_NAME = "SentryHDFSService";
-
-  void notifyHMSUpdate(PathsUpdate update)
-      throws SentryHdfsServiceException;
-
-  long getLastSeenHMSPathSeqNum() throws SentryHdfsServiceException;
-
-  SentryAuthzUpdate getAllUpdatesFrom(long permSeqNum, long pathSeqNum)
-      throws SentryHdfsServiceException;
-
-  void close();
-}
-

http://git-wip-us.apache.org/repos/asf/sentry/blob/a6266415/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientDefaultImpl.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientDefaultImpl.java b/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientDefaultImpl.java
deleted file mode 100644
index 03bf39e..0000000
--- a/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientDefaultImpl.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.hdfs;
-
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.security.PrivilegedExceptionAction;
-import java.util.LinkedList;
-import java.util.Map;
-
-import javax.security.auth.callback.CallbackHandler;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.SaslRpcServer;
-import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.sentry.hdfs.service.thrift.SentryHDFSService;
-import org.apache.sentry.hdfs.service.thrift.SentryHDFSService.Client;
-import org.apache.sentry.hdfs.service.thrift.TAuthzUpdateResponse;
-import org.apache.sentry.hdfs.service.thrift.TPathsUpdate;
-import org.apache.sentry.hdfs.service.thrift.TPermissionsUpdate;
-import org.apache.sentry.hdfs.ServiceConstants.ClientConfig;
-import org.apache.thrift.protocol.TBinaryProtocol;
-import org.apache.thrift.protocol.TCompactProtocol;
-import org.apache.thrift.protocol.TMultiplexedProtocol;
-import org.apache.thrift.protocol.TProtocol;
-import org.apache.thrift.transport.TSaslClientTransport;
-import org.apache.thrift.transport.TSocket;
-import org.apache.thrift.transport.TTransport;
-import org.apache.thrift.transport.TTransportException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Preconditions;
-
-public class SentryHDFSServiceClientDefaultImpl implements SentryHDFSServiceClient {
-
-  private static final Logger LOGGER = LoggerFactory.getLogger(SentryHDFSServiceClientDefaultImpl.class);
-
-  /**
-   * This transport wraps the Sasl transports to set up the right UGI context for open().
-   */
-  public static class UgiSaslClientTransport extends TSaslClientTransport {
-    protected UserGroupInformation ugi = null;
-
-    public UgiSaslClientTransport(String mechanism, String authorizationId,
-        String protocol, String serverName, Map<String, String> props,
-        CallbackHandler cbh, TTransport transport, boolean wrapUgi)
-        throws IOException {
-      super(mechanism, authorizationId, protocol, serverName, props, cbh,
-          transport);
-      if (wrapUgi) {
-        ugi = UserGroupInformation.getLoginUser();
-      }
-    }
-
-    // open the SASL transport with using the current UserGroupInformation
-    // This is needed to get the current login context stored
-    @Override
-    public void open() throws TTransportException {
-      if (ugi == null) {
-        baseOpen();
-      } else {
-        try {
-          // ensure that the ticket is valid before connecting to service. Note that
-          // checkTGTAndReloginFromKeytab() renew the ticket only when more than 80%
-          // of ticket lifetime has passed. 
-          if (ugi.isFromKeytab()) {
-            ugi.checkTGTAndReloginFromKeytab();
-          }
-
-          ugi.doAs(new PrivilegedExceptionAction<Void>() {
-            public Void run() throws TTransportException {
-              baseOpen();
-              return null;
-            }
-          });
-        } catch (IOException e) {
-          throw new TTransportException("Failed to open SASL transport", e);
-        } catch (InterruptedException e) {
-          throw new TTransportException(
-              "Interrupted while opening underlying transport", e);
-        }
-      }
-    }
-
-    private void baseOpen() throws TTransportException {
-      super.open();
-    }
-  }
-
-  private final Configuration conf;
-  private final InetSocketAddress serverAddress;
-  private final int connectionTimeout;
-  private boolean kerberos;
-  private TTransport transport;
-
-  private String[] serverPrincipalParts;
-  private Client client;
-
-  public SentryHDFSServiceClientDefaultImpl(Configuration conf) throws IOException {
-    this.conf = conf;
-    Preconditions.checkNotNull(this.conf, "Configuration object cannot be null");
-    this.serverAddress = NetUtils.createSocketAddr(Preconditions.checkNotNull(
-                           conf.get(ClientConfig.SERVER_RPC_ADDRESS), "Config key "
-                           + ClientConfig.SERVER_RPC_ADDRESS + " is required"), conf.getInt(
-                           ClientConfig.SERVER_RPC_PORT, ClientConfig.SERVER_RPC_PORT_DEFAULT));
-    this.connectionTimeout = conf.getInt(ClientConfig.SERVER_RPC_CONN_TIMEOUT,
-                                         ClientConfig.SERVER_RPC_CONN_TIMEOUT_DEFAULT);
-    kerberos = ClientConfig.SECURITY_MODE_KERBEROS.equalsIgnoreCase(
-        conf.get(ClientConfig.SECURITY_MODE, ClientConfig.SECURITY_MODE_KERBEROS).trim());
-    transport = new TSocket(serverAddress.getHostName(),
-        serverAddress.getPort(), connectionTimeout);
-    if (kerberos) {
-      String serverPrincipal = Preconditions.checkNotNull(
-          conf.get(ClientConfig.PRINCIPAL), ClientConfig.PRINCIPAL + " is required");
-
-      // Resolve server host in the same way as we are doing on server side
-      serverPrincipal = SecurityUtil.getServerPrincipal(serverPrincipal, serverAddress.getAddress());
-      LOGGER.info("Using server kerberos principal: " + serverPrincipal);
-
-      serverPrincipalParts = SaslRpcServer.splitKerberosName(serverPrincipal);
-      Preconditions.checkArgument(serverPrincipalParts.length == 3,
-           "Kerberos principal should have 3 parts: " + serverPrincipal);
-      boolean wrapUgi = "true".equalsIgnoreCase(conf
-          .get(ClientConfig.SECURITY_USE_UGI_TRANSPORT, "true"));
-      transport = new UgiSaslClientTransport(AuthMethod.KERBEROS.getMechanismName(),
-          null, serverPrincipalParts[0], serverPrincipalParts[1],
-          ClientConfig.SASL_PROPERTIES, null, transport, wrapUgi);
-    } else {
-      serverPrincipalParts = null;
-    }
-    try {
-      transport.open();
-    } catch (TTransportException e) {
-      throw new IOException("Transport exception while opening transport: " + e.getMessage(), e);
-    }
-    LOGGER.info("Successfully opened transport: " + transport + " to " + serverAddress);
-    TProtocol tProtocol = null;
-    long maxMessageSize = conf.getLong(ServiceConstants.ClientConfig.SENTRY_HDFS_THRIFT_MAX_MESSAGE_SIZE,
-        ServiceConstants.ClientConfig.SENTRY_HDFS_THRIFT_MAX_MESSAGE_SIZE_DEFAULT);
-    if (conf.getBoolean(ClientConfig.USE_COMPACT_TRANSPORT,
-        ClientConfig.USE_COMPACT_TRANSPORT_DEFAULT)) {
-      tProtocol = new TCompactProtocol(transport, maxMessageSize, maxMessageSize);
-    } else {
-      tProtocol = new TBinaryProtocol(transport, maxMessageSize, maxMessageSize, true, true);
-    }
-    TMultiplexedProtocol protocol = new TMultiplexedProtocol(
-      tProtocol, SentryHDFSServiceClient.SENTRY_HDFS_SERVICE_NAME);
-    client = new SentryHDFSService.Client(protocol);
-    LOGGER.info("Successfully created client");
-  }
-
-  public synchronized void notifyHMSUpdate(PathsUpdate update)
-      throws SentryHdfsServiceException {
-    try {
-      client.handle_hms_notification(update.toThrift());
-    } catch (Exception e) {
-      throw new SentryHdfsServiceException("Thrift Exception occurred !!", e);
-    }
-  }
-
-  public synchronized long getLastSeenHMSPathSeqNum()
-      throws SentryHdfsServiceException {
-    try {
-      return client.check_hms_seq_num(-1);
-    } catch (Exception e) {
-      throw new SentryHdfsServiceException("Thrift Exception occurred !!", e);
-    }
-  }
-
-  public synchronized SentryAuthzUpdate getAllUpdatesFrom(long permSeqNum, long pathSeqNum)
- throws SentryHdfsServiceException {
-    SentryAuthzUpdate retVal = new SentryAuthzUpdate(new LinkedList<PermissionsUpdate>(), new LinkedList<PathsUpdate>());
-    try {
-      TAuthzUpdateResponse sentryUpdates = client.get_all_authz_updates_from(permSeqNum, pathSeqNum);
-      if (sentryUpdates.getAuthzPathUpdate() != null) {
-        for (TPathsUpdate pathsUpdate : sentryUpdates.getAuthzPathUpdate()) {
-          retVal.getPathUpdates().add(new PathsUpdate(pathsUpdate));
-        }
-      }
-      if (sentryUpdates.getAuthzPermUpdate() != null) {
-        for (TPermissionsUpdate permsUpdate : sentryUpdates.getAuthzPermUpdate()) {
-          retVal.getPermUpdates().add(new PermissionsUpdate(permsUpdate));
-        }
-      }
-    } catch (Exception e) {
-      throw new SentryHdfsServiceException("Thrift Exception occurred !!", e);
-    }
-    return retVal;
-  }
-
-  public void close() {
-    if (transport != null) {
-      transport.close();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/a6266415/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientFactory.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientFactory.java b/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientFactory.java
deleted file mode 100644
index 2a18b15..0000000
--- a/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientFactory.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.hdfs;
-
-import org.apache.hadoop.conf.Configuration;
-
-/**
- * Client factory to create normal client or proxy with HA invocation handler
- */
-public class SentryHDFSServiceClientFactory {
-    
-  private SentryHDFSServiceClientFactory() {
-    // Make constructor private to avoid instantiation
-  }
-  
-  public static SentryHDFSServiceClient create(Configuration conf)
-      throws Exception {
-    return new SentryHDFSServiceClientDefaultImpl(conf);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/a6266415/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHdfsServiceException.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHdfsServiceException.java b/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHdfsServiceException.java
deleted file mode 100644
index 307d8c3..0000000
--- a/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHdfsServiceException.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.hdfs;
-
-public class SentryHdfsServiceException extends RuntimeException {
-  private static final long serialVersionUID = 1511645864949767378L;
-
-  public SentryHdfsServiceException(String message, Throwable cause) {
-    super(message, cause);
-  }
-
-  public SentryHdfsServiceException(String message) {
-    super(message);
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/a6266415/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/SentryHdfsServiceIntegrationBase.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/SentryHdfsServiceIntegrationBase.java b/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/SentryHdfsServiceIntegrationBase.java
deleted file mode 100644
index eccf83b..0000000
--- a/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/SentryHdfsServiceIntegrationBase.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.sentry.hdfs;
-
-import java.security.PrivilegedExceptionAction;
-
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.sentry.hdfs.ServiceConstants.ClientConfig;
-import org.apache.sentry.service.thrift.SentryServiceIntegrationBase;
-import org.junit.After;
-import org.junit.Before;
-
-public class SentryHdfsServiceIntegrationBase extends
-    SentryServiceIntegrationBase {
-
-  protected SentryHDFSServiceClient hdfsClient;
-
-  @Before
-  public void before() throws Exception {
-    conf.set("hadoop.security.authentication", "kerberos");
-    UserGroupInformation.setConfiguration(conf);
-    UserGroupInformation.loginUserFromKeytab(CLIENT_PRINCIPAL,
-        clientKeytab.getPath());
-
-    connectToHdfsSyncService();
-  }
-
-  @After
-  public void after() {
-    if (hdfsClient != null) {
-      hdfsClient.close();
-    }
-  }
-
-  protected void connectToHdfsSyncService() throws Exception {
-    if (hdfsClient != null) {
-      hdfsClient.close();
-    }
-
-    // SentryHdfs client configuration setup
-    conf.set(ClientConfig.SERVER_RPC_ADDRESS, server.getAddress()
-        .getHostName());
-    conf.set(ClientConfig.SERVER_RPC_ADDRESS, server.getAddress()
-        .getHostName());
-    conf.set(ClientConfig.SERVER_RPC_PORT,
-        String.valueOf(server.getAddress().getPort()));
-
-    if (kerberos) {
-      conf.set(ClientConfig.SECURITY_MODE, ClientConfig.SECURITY_MODE_KERBEROS);
-      conf.set(ClientConfig.SECURITY_USE_UGI_TRANSPORT, "true");
-      conf.set(ClientConfig.PRINCIPAL, getServerKerberosName());
-      hdfsClient = UserGroupInformation.getLoginUser().doAs(
-          new PrivilegedExceptionAction<SentryHDFSServiceClient>() {
-            @Override
-            public SentryHDFSServiceClient run() throws Exception {
-              return SentryHDFSServiceClientFactory.create(conf);
-            }
-          });
-    } else {
-      hdfsClient = SentryHDFSServiceClientFactory.create(conf);
-    }
-    hdfsClient.close();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/a6266415/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestKrbConnectionTimeout.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestKrbConnectionTimeout.java b/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestKrbConnectionTimeout.java
deleted file mode 100644
index b62a83f..0000000
--- a/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestKrbConnectionTimeout.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.sentry.hdfs;
-
-import org.apache.hadoop.minikdc.MiniKdc;
-import org.junit.Assume;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-public class TestKrbConnectionTimeout extends
- SentryHdfsServiceIntegrationBase {
-
-  @BeforeClass
-  public static void setup() throws Exception {
-    Assume.assumeTrue("true".equalsIgnoreCase(System.getProperty(
-        "sentry.hive.test.ticket.timeout", "false")));
-    kdcConfOverlay.setProperty(MiniKdc.MAX_TICKET_LIFETIME, "300001");
-    setup();
-  }
-
-  /***
-   * Test is run only when sentry.hive.test.ticket.timeout is set to "true"
-   * @throws Exception
-   */
-  @Before
-  public void beforeMethod() {
-  }
-
-  @Test
-  public void testConnectionAfterTicketTimeout() throws Exception {
-    Thread.sleep(400000);
-    connectToHdfsSyncService();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/sentry/blob/a6266415/sentry-hdfs/sentry-hdfs-namenode-plugin/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-namenode-plugin/pom.xml b/sentry-hdfs/sentry-hdfs-namenode-plugin/pom.xml
index b45490e..25ae34b 100644
--- a/sentry-hdfs/sentry-hdfs-namenode-plugin/pom.xml
+++ b/sentry-hdfs/sentry-hdfs-namenode-plugin/pom.xml
@@ -35,6 +35,10 @@ limitations under the License.
       <version>1.8.0-SNAPSHOT</version>
     </dependency>
     <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-hdfs-service</artifactId>
+    </dependency>
+    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/sentry/blob/a6266415/sentry-hdfs/sentry-hdfs-service/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-service/pom.xml b/sentry-hdfs/sentry-hdfs-service/pom.xml
index b890dd5..038d2af 100644
--- a/sentry-hdfs/sentry-hdfs-service/pom.xml
+++ b/sentry-hdfs/sentry-hdfs-service/pom.xml
@@ -107,6 +107,12 @@ limitations under the License.
       <artifactId>hive-metastore</artifactId>
       <version>${hive.version}</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.sentry</groupId>
+      <artifactId>sentry-provider-db</artifactId>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
 </project>

http://git-wip-us.apache.org/repos/asf/sentry/blob/a6266415/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClient.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClient.java b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClient.java
new file mode 100644
index 0000000..ab12bf4
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClient.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+public interface SentryHDFSServiceClient {
+  String SENTRY_HDFS_SERVICE_NAME = "SentryHDFSService";
+
+  void notifyHMSUpdate(PathsUpdate update)
+      throws SentryHdfsServiceException;
+
+  long getLastSeenHMSPathSeqNum() throws SentryHdfsServiceException;
+
+  SentryAuthzUpdate getAllUpdatesFrom(long permSeqNum, long pathSeqNum)
+      throws SentryHdfsServiceException;
+
+  void close();
+}
+

http://git-wip-us.apache.org/repos/asf/sentry/blob/a6266415/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientDefaultImpl.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientDefaultImpl.java b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientDefaultImpl.java
new file mode 100644
index 0000000..03bf39e
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientDefaultImpl.java
@@ -0,0 +1,215 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.security.PrivilegedExceptionAction;
+import java.util.LinkedList;
+import java.util.Map;
+
+import javax.security.auth.callback.CallbackHandler;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.SaslRpcServer;
+import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.sentry.hdfs.service.thrift.SentryHDFSService;
+import org.apache.sentry.hdfs.service.thrift.SentryHDFSService.Client;
+import org.apache.sentry.hdfs.service.thrift.TAuthzUpdateResponse;
+import org.apache.sentry.hdfs.service.thrift.TPathsUpdate;
+import org.apache.sentry.hdfs.service.thrift.TPermissionsUpdate;
+import org.apache.sentry.hdfs.ServiceConstants.ClientConfig;
+import org.apache.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.protocol.TCompactProtocol;
+import org.apache.thrift.protocol.TMultiplexedProtocol;
+import org.apache.thrift.protocol.TProtocol;
+import org.apache.thrift.transport.TSaslClientTransport;
+import org.apache.thrift.transport.TSocket;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+
+public class SentryHDFSServiceClientDefaultImpl implements SentryHDFSServiceClient {
+
+  private static final Logger LOGGER = LoggerFactory.getLogger(SentryHDFSServiceClientDefaultImpl.class);
+
+  /**
+   * This transport wraps the Sasl transports to set up the right UGI context for open().
+   */
+  public static class UgiSaslClientTransport extends TSaslClientTransport {
+    protected UserGroupInformation ugi = null;
+
+    public UgiSaslClientTransport(String mechanism, String authorizationId,
+        String protocol, String serverName, Map<String, String> props,
+        CallbackHandler cbh, TTransport transport, boolean wrapUgi)
+        throws IOException {
+      super(mechanism, authorizationId, protocol, serverName, props, cbh,
+          transport);
+      if (wrapUgi) {
+        ugi = UserGroupInformation.getLoginUser();
+      }
+    }
+
+    // open the SASL transport with using the current UserGroupInformation
+    // This is needed to get the current login context stored
+    @Override
+    public void open() throws TTransportException {
+      if (ugi == null) {
+        baseOpen();
+      } else {
+        try {
+          // ensure that the ticket is valid before connecting to service. Note that
+          // checkTGTAndReloginFromKeytab() renew the ticket only when more than 80%
+          // of ticket lifetime has passed. 
+          if (ugi.isFromKeytab()) {
+            ugi.checkTGTAndReloginFromKeytab();
+          }
+
+          ugi.doAs(new PrivilegedExceptionAction<Void>() {
+            public Void run() throws TTransportException {
+              baseOpen();
+              return null;
+            }
+          });
+        } catch (IOException e) {
+          throw new TTransportException("Failed to open SASL transport", e);
+        } catch (InterruptedException e) {
+          throw new TTransportException(
+              "Interrupted while opening underlying transport", e);
+        }
+      }
+    }
+
+    private void baseOpen() throws TTransportException {
+      super.open();
+    }
+  }
+
+  private final Configuration conf;
+  private final InetSocketAddress serverAddress;
+  private final int connectionTimeout;
+  private boolean kerberos;
+  private TTransport transport;
+
+  private String[] serverPrincipalParts;
+  private Client client;
+
+  public SentryHDFSServiceClientDefaultImpl(Configuration conf) throws IOException {
+    this.conf = conf;
+    Preconditions.checkNotNull(this.conf, "Configuration object cannot be null");
+    this.serverAddress = NetUtils.createSocketAddr(Preconditions.checkNotNull(
+                           conf.get(ClientConfig.SERVER_RPC_ADDRESS), "Config key "
+                           + ClientConfig.SERVER_RPC_ADDRESS + " is required"), conf.getInt(
+                           ClientConfig.SERVER_RPC_PORT, ClientConfig.SERVER_RPC_PORT_DEFAULT));
+    this.connectionTimeout = conf.getInt(ClientConfig.SERVER_RPC_CONN_TIMEOUT,
+                                         ClientConfig.SERVER_RPC_CONN_TIMEOUT_DEFAULT);
+    kerberos = ClientConfig.SECURITY_MODE_KERBEROS.equalsIgnoreCase(
+        conf.get(ClientConfig.SECURITY_MODE, ClientConfig.SECURITY_MODE_KERBEROS).trim());
+    transport = new TSocket(serverAddress.getHostName(),
+        serverAddress.getPort(), connectionTimeout);
+    if (kerberos) {
+      String serverPrincipal = Preconditions.checkNotNull(
+          conf.get(ClientConfig.PRINCIPAL), ClientConfig.PRINCIPAL + " is required");
+
+      // Resolve server host in the same way as we are doing on server side
+      serverPrincipal = SecurityUtil.getServerPrincipal(serverPrincipal, serverAddress.getAddress());
+      LOGGER.info("Using server kerberos principal: " + serverPrincipal);
+
+      serverPrincipalParts = SaslRpcServer.splitKerberosName(serverPrincipal);
+      Preconditions.checkArgument(serverPrincipalParts.length == 3,
+           "Kerberos principal should have 3 parts: " + serverPrincipal);
+      boolean wrapUgi = "true".equalsIgnoreCase(conf
+          .get(ClientConfig.SECURITY_USE_UGI_TRANSPORT, "true"));
+      transport = new UgiSaslClientTransport(AuthMethod.KERBEROS.getMechanismName(),
+          null, serverPrincipalParts[0], serverPrincipalParts[1],
+          ClientConfig.SASL_PROPERTIES, null, transport, wrapUgi);
+    } else {
+      serverPrincipalParts = null;
+    }
+    try {
+      transport.open();
+    } catch (TTransportException e) {
+      throw new IOException("Transport exception while opening transport: " + e.getMessage(), e);
+    }
+    LOGGER.info("Successfully opened transport: " + transport + " to " + serverAddress);
+    TProtocol tProtocol = null;
+    long maxMessageSize = conf.getLong(ServiceConstants.ClientConfig.SENTRY_HDFS_THRIFT_MAX_MESSAGE_SIZE,
+        ServiceConstants.ClientConfig.SENTRY_HDFS_THRIFT_MAX_MESSAGE_SIZE_DEFAULT);
+    if (conf.getBoolean(ClientConfig.USE_COMPACT_TRANSPORT,
+        ClientConfig.USE_COMPACT_TRANSPORT_DEFAULT)) {
+      tProtocol = new TCompactProtocol(transport, maxMessageSize, maxMessageSize);
+    } else {
+      tProtocol = new TBinaryProtocol(transport, maxMessageSize, maxMessageSize, true, true);
+    }
+    TMultiplexedProtocol protocol = new TMultiplexedProtocol(
+      tProtocol, SentryHDFSServiceClient.SENTRY_HDFS_SERVICE_NAME);
+    client = new SentryHDFSService.Client(protocol);
+    LOGGER.info("Successfully created client");
+  }
+
+  public synchronized void notifyHMSUpdate(PathsUpdate update)
+      throws SentryHdfsServiceException {
+    try {
+      client.handle_hms_notification(update.toThrift());
+    } catch (Exception e) {
+      throw new SentryHdfsServiceException("Thrift Exception occurred !!", e);
+    }
+  }
+
+  public synchronized long getLastSeenHMSPathSeqNum()
+      throws SentryHdfsServiceException {
+    try {
+      return client.check_hms_seq_num(-1);
+    } catch (Exception e) {
+      throw new SentryHdfsServiceException("Thrift Exception occurred !!", e);
+    }
+  }
+
+  public synchronized SentryAuthzUpdate getAllUpdatesFrom(long permSeqNum, long pathSeqNum)
+ throws SentryHdfsServiceException {
+    SentryAuthzUpdate retVal = new SentryAuthzUpdate(new LinkedList<PermissionsUpdate>(), new LinkedList<PathsUpdate>());
+    try {
+      TAuthzUpdateResponse sentryUpdates = client.get_all_authz_updates_from(permSeqNum, pathSeqNum);
+      if (sentryUpdates.getAuthzPathUpdate() != null) {
+        for (TPathsUpdate pathsUpdate : sentryUpdates.getAuthzPathUpdate()) {
+          retVal.getPathUpdates().add(new PathsUpdate(pathsUpdate));
+        }
+      }
+      if (sentryUpdates.getAuthzPermUpdate() != null) {
+        for (TPermissionsUpdate permsUpdate : sentryUpdates.getAuthzPermUpdate()) {
+          retVal.getPermUpdates().add(new PermissionsUpdate(permsUpdate));
+        }
+      }
+    } catch (Exception e) {
+      throw new SentryHdfsServiceException("Thrift Exception occurred !!", e);
+    }
+    return retVal;
+  }
+
+  public void close() {
+    if (transport != null) {
+      transport.close();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/a6266415/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientFactory.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientFactory.java b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientFactory.java
new file mode 100644
index 0000000..2a18b15
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClientFactory.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * Client factory to create normal client or proxy with HA invocation handler
+ */
+public class SentryHDFSServiceClientFactory {
+    
+  private SentryHDFSServiceClientFactory() {
+    // Make constructor private to avoid instantiation
+  }
+  
+  public static SentryHDFSServiceClient create(Configuration conf)
+      throws Exception {
+    return new SentryHDFSServiceClientDefaultImpl(conf);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/a6266415/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHdfsServiceException.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHdfsServiceException.java b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHdfsServiceException.java
new file mode 100644
index 0000000..307d8c3
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHdfsServiceException.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.hdfs;
+
+public class SentryHdfsServiceException extends RuntimeException {
+  private static final long serialVersionUID = 1511645864949767378L;
+
+  public SentryHdfsServiceException(String message, Throwable cause) {
+    super(message, cause);
+  }
+
+  public SentryHdfsServiceException(String message) {
+    super(message);
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/a6266415/sentry-hdfs/sentry-hdfs-service/src/test/java/org/apache/sentry/hdfs/SentryHdfsServiceIntegrationBase.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-service/src/test/java/org/apache/sentry/hdfs/SentryHdfsServiceIntegrationBase.java b/sentry-hdfs/sentry-hdfs-service/src/test/java/org/apache/sentry/hdfs/SentryHdfsServiceIntegrationBase.java
new file mode 100644
index 0000000..eccf83b
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-service/src/test/java/org/apache/sentry/hdfs/SentryHdfsServiceIntegrationBase.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.hdfs;
+
+import java.security.PrivilegedExceptionAction;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.sentry.hdfs.ServiceConstants.ClientConfig;
+import org.apache.sentry.service.thrift.SentryServiceIntegrationBase;
+import org.junit.After;
+import org.junit.Before;
+
+public class SentryHdfsServiceIntegrationBase extends
+    SentryServiceIntegrationBase {
+
+  protected SentryHDFSServiceClient hdfsClient;
+
+  @Before
+  public void before() throws Exception {
+    conf.set("hadoop.security.authentication", "kerberos");
+    UserGroupInformation.setConfiguration(conf);
+    UserGroupInformation.loginUserFromKeytab(CLIENT_PRINCIPAL,
+        clientKeytab.getPath());
+
+    connectToHdfsSyncService();
+  }
+
+  @After
+  public void after() {
+    if (hdfsClient != null) {
+      hdfsClient.close();
+    }
+  }
+
+  protected void connectToHdfsSyncService() throws Exception {
+    if (hdfsClient != null) {
+      hdfsClient.close();
+    }
+
+    // SentryHdfs client configuration setup
+    conf.set(ClientConfig.SERVER_RPC_ADDRESS, server.getAddress()
+        .getHostName());
+    conf.set(ClientConfig.SERVER_RPC_ADDRESS, server.getAddress()
+        .getHostName());
+    conf.set(ClientConfig.SERVER_RPC_PORT,
+        String.valueOf(server.getAddress().getPort()));
+
+    if (kerberos) {
+      conf.set(ClientConfig.SECURITY_MODE, ClientConfig.SECURITY_MODE_KERBEROS);
+      conf.set(ClientConfig.SECURITY_USE_UGI_TRANSPORT, "true");
+      conf.set(ClientConfig.PRINCIPAL, getServerKerberosName());
+      hdfsClient = UserGroupInformation.getLoginUser().doAs(
+          new PrivilegedExceptionAction<SentryHDFSServiceClient>() {
+            @Override
+            public SentryHDFSServiceClient run() throws Exception {
+              return SentryHDFSServiceClientFactory.create(conf);
+            }
+          });
+    } else {
+      hdfsClient = SentryHDFSServiceClientFactory.create(conf);
+    }
+    hdfsClient.close();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/a6266415/sentry-hdfs/sentry-hdfs-service/src/test/java/org/apache/sentry/hdfs/TestKrbConnectionTimeout.java
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-service/src/test/java/org/apache/sentry/hdfs/TestKrbConnectionTimeout.java b/sentry-hdfs/sentry-hdfs-service/src/test/java/org/apache/sentry/hdfs/TestKrbConnectionTimeout.java
new file mode 100644
index 0000000..b62a83f
--- /dev/null
+++ b/sentry-hdfs/sentry-hdfs-service/src/test/java/org/apache/sentry/hdfs/TestKrbConnectionTimeout.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.sentry.hdfs;
+
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestKrbConnectionTimeout extends
+ SentryHdfsServiceIntegrationBase {
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    Assume.assumeTrue("true".equalsIgnoreCase(System.getProperty(
+        "sentry.hive.test.ticket.timeout", "false")));
+    kdcConfOverlay.setProperty(MiniKdc.MAX_TICKET_LIFETIME, "300001");
+    setup();
+  }
+
+  /***
+   * Test is run only when sentry.hive.test.ticket.timeout is set to "true"
+   * @throws Exception
+   */
+  @Before
+  public void beforeMethod() {
+  }
+
+  @Test
+  public void testConnectionAfterTicketTimeout() throws Exception {
+    Thread.sleep(400000);
+    connectToHdfsSyncService();
+  }
+
+}