You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by ma...@apache.org on 2016/09/14 21:43:44 UTC

[46/50] [abbrv] phoenix git commit: PHOENIX-3189 Perform Kerberos login before ConnectionInfo is constructed

PHOENIX-3189 Perform Kerberos login before ConnectionInfo is constructed

Now that ConnectionInfo has the current User/UGI stored inside, we must
make sure that any automatic Kerberos login occurs before the ConnectionInfo
object is constructed. Otherwise, we will have multiple instances of
ConnectionInfo that differ only by the User, which will leak HBase/ZK
connections in the connectionQueryServicesMap. Also, protect the area
in which we perform logins to prevent concurrent clients from colliding.

Closes apache/phoenix#191


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/2dc6be4e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/2dc6be4e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/2dc6be4e

Branch: refs/heads/calcite
Commit: 2dc6be4e9ffb205aaa5b0f6a1f2746eed8d23426
Parents: 93a9c91
Author: Josh Elser <el...@apache.org>
Authored: Wed Aug 17 13:34:59 2016 -0400
Committer: Josh Elser <el...@apache.org>
Committed: Wed Aug 31 16:27:30 2016 -0400

----------------------------------------------------------------------
 phoenix-core/pom.xml                            |   5 +
 .../org/apache/phoenix/jdbc/PhoenixDriver.java  |  10 +-
 .../phoenix/jdbc/PhoenixEmbeddedDriver.java     |  89 ++++-
 .../query/ConnectionQueryServicesImpl.java      |  24 +-
 .../apache/phoenix/util/InstanceResolver.java   |   7 +
 .../phoenix/jdbc/SecureUserConnectionsTest.java | 369 +++++++++++++++++++
 .../src/test/resources/log4j.properties         |   2 +
 pom.xml                                         |  18 +
 8 files changed, 504 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/2dc6be4e/phoenix-core/pom.xml
----------------------------------------------------------------------
diff --git a/phoenix-core/pom.xml b/phoenix-core/pom.xml
index ab2f80e..cf383f2 100644
--- a/phoenix-core/pom.xml
+++ b/phoenix-core/pom.xml
@@ -461,6 +461,11 @@
       <artifactId>hadoop-minicluster</artifactId>
     </dependency>
     <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minikdc</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
         <groupId>org.jruby.joni</groupId>
         <artifactId>joni</artifactId>
         <version>${joni.version}</version>

http://git-wip-us.apache.org/repos/asf/phoenix/blob/2dc6be4e/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
index 91d25ca..fa31dd9 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDriver.java
@@ -47,10 +47,10 @@ import org.apache.phoenix.query.HBaseFactoryProvider;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.query.QueryServicesImpl;
 import org.apache.phoenix.query.QueryServicesOptions;
-import org.apache.phoenix.util.PhoenixRuntime;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 
 
@@ -212,7 +212,8 @@ public final class PhoenixDriver extends PhoenixEmbeddedDriver {
             checkClosed();
             ConnectionInfo connInfo = ConnectionInfo.create(url);
             QueryServices services = getQueryServices();
-            ConnectionInfo normalizedConnInfo = connInfo.normalize(services.getProps());
+            // Also performs the Kerberos login if the URL/properties request this
+            ConnectionInfo normalizedConnInfo = connInfo.normalize(services.getProps(), info);
             ConnectionQueryServices connectionQueryServices = connectionQueryServicesMap.get(normalizedConnInfo);
             if (connectionQueryServices == null) {
                 if (normalizedConnInfo.isConnectionless()) {
@@ -317,4 +318,9 @@ public final class PhoenixDriver extends PhoenixEmbeddedDriver {
             closeLock.writeLock().unlock();
         }
     }
+
+    @VisibleForTesting
+    protected ConcurrentMap<ConnectionInfo,ConnectionQueryServices> getCachedConnections() {
+        return this.connectionQueryServicesMap;
+    }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/2dc6be4e/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
index 375388a..272fb22 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixEmbeddedDriver.java
@@ -28,6 +28,7 @@ import java.sql.SQLFeatureNotSupportedException;
 import java.util.Map;
 import java.util.Properties;
 import java.util.StringTokenizer;
+import java.util.Map.Entry;
 import java.util.logging.Logger;
 
 import javax.annotation.concurrent.Immutable;
@@ -37,6 +38,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.phoenix.coprocessor.MetaDataProtocol;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.exception.SQLExceptionInfo;
@@ -48,6 +50,7 @@ import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.QueryUtil;
 import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SQLCloseable;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Maps;
@@ -194,6 +197,8 @@ public abstract class PhoenixEmbeddedDriver implements Driver, SQLCloseable {
      * @since 0.1.1
      */
     public static class ConnectionInfo {
+        private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ConnectionInfo.class);
+        private static final Object KERBEROS_LOGIN_LOCK = new Object();
         private static SQLException getMalFormedUrlException(String url) {
             return new SQLExceptionInfo.Builder(SQLExceptionCode.MALFORMED_CONNECTION_URL)
             .setMessage(url).build().buildException();
@@ -283,7 +288,7 @@ public abstract class PhoenixEmbeddedDriver implements Driver, SQLCloseable {
             return new ConnectionInfo(quorum,port,rootNode, principal, keytabFile);
         }
         
-        public ConnectionInfo normalize(ReadOnlyProps props) throws SQLException {
+        public ConnectionInfo normalize(ReadOnlyProps props, Properties info) throws SQLException {
             String zookeeperQuorum = this.getZookeeperQuorum();
             Integer port = this.getPort();
             String rootNode = this.getRootNode();
@@ -333,8 +338,77 @@ public abstract class PhoenixEmbeddedDriver implements Driver, SQLCloseable {
             		 keytab = props.get(QueryServices.HBASE_CLIENT_KEYTAB);
             	 }
             }
+            if (!isConnectionless()) {
+                boolean credsProvidedInUrl = null != principal && null != keytab;
+                boolean credsProvidedInProps = info.containsKey(QueryServices.HBASE_CLIENT_PRINCIPAL) && info.containsKey(QueryServices.HBASE_CLIENT_KEYTAB);
+                if (credsProvidedInUrl || credsProvidedInProps) {
+                    // PHOENIX-3189 Because ConnectionInfo is immutable, we must make sure all parts of it are correct before
+                    // construction; this also requires the Kerberos user credentials object (since they are compared by reference
+                    // and not by value. If the user provided a principal and keytab via the JDBC url, we must make sure that the
+                    // Kerberos login happens *before* we construct the ConnectionInfo object. Otherwise, the use of ConnectionInfo
+                    // to determine when ConnectionQueryServices impl's should be reused will be broken.
+                    try {
+                        // Check if we need to authenticate with kerberos so that we cache the correct ConnectionInfo
+                        UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
+                        if (!currentUser.hasKerberosCredentials() || !currentUser.getUserName().equals(principal)) {
+                            synchronized (KERBEROS_LOGIN_LOCK) {
+                                // Double check the current user, might have changed since we checked last. Don't want
+                                // to re-login if it's the same user.
+                                currentUser = UserGroupInformation.getCurrentUser();
+                                if (!currentUser.hasKerberosCredentials() || !currentUser.getUserName().equals(principal)) {
+                                    final Configuration config = getConfiguration(props, info, principal, keytab);
+                                    logger.info("Trying to connect to a secure cluster as {} with keytab {}", config.get(QueryServices.HBASE_CLIENT_PRINCIPAL),
+                                            config.get(QueryServices.HBASE_CLIENT_KEYTAB));
+                                    UserGroupInformation.setConfiguration(config);
+                                    User.login(config, QueryServices.HBASE_CLIENT_KEYTAB, QueryServices.HBASE_CLIENT_PRINCIPAL, null);
+                                    logger.info("Successful login to secure cluster");
+                                }
+                            }
+                        } else {
+                            // The user already has Kerberos creds, so there isn't anything to change in the ConnectionInfo.
+                            logger.debug("Already logged in as {}", currentUser);
+                        }
+                    } catch (IOException e) {
+                        throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION)
+                            .setRootCause(e).build().buildException();
+                    }
+                } else {
+                    logger.debug("Principal and keytab not provided, not attempting Kerberos login");
+                }
+            } // else, no connection, no need to login
+            // Will use the current User from UGI
             return new ConnectionInfo(zookeeperQuorum, port, rootNode, principal, keytab);
         }
+
+        /**
+         * Constructs a Configuration object to use when performing a Kerberos login.
+         * @param props QueryServices properties
+         * @param info User-provided properties
+         * @param principal Kerberos user principal
+         * @param keytab Path to Kerberos user keytab
+         * @return Configuration object suitable for Kerberos login
+         */
+        private Configuration getConfiguration(ReadOnlyProps props, Properties info, String principal, String keytab) {
+            final Configuration config = HBaseFactoryProvider.getConfigurationFactory().getConfiguration();
+            // Add QueryServices properties
+            for (Entry<String,String> entry : props) {
+                config.set(entry.getKey(), entry.getValue());
+            }
+            // Add any user-provided properties (via DriverManager)
+            if (info != null) {
+                for (Object key : info.keySet()) {
+                    config.set((String) key, info.getProperty((String) key));
+                }
+            }
+            // Set the principal and keytab if provided from the URL (overriding those provided in Properties)
+            if (null != principal) {
+                config.set(QueryServices.HBASE_CLIENT_PRINCIPAL, principal);
+            }
+            if (null != keytab) {
+                config.set(QueryServices.HBASE_CLIENT_KEYTAB, keytab);
+            }
+            return config;
+        }
         
         private final Integer port;
         private final String rootNode;
@@ -365,6 +439,15 @@ public abstract class PhoenixEmbeddedDriver implements Driver, SQLCloseable {
         	this(zookeeperQuorum, port, rootNode, null, null);
         }
 
+        /**
+         * Copy constructor for all members except {@link #user}.
+         *
+         * @param other The instance to copy
+         */
+        public ConnectionInfo(ConnectionInfo other) {
+            this(other.zookeeperQuorum, other.port, other.rootNode, other.principal, other.keytab);
+        }
+
         public ReadOnlyProps asProps() {
             Map<String, String> connectionProps = Maps.newHashMapWithExpectedSize(3);
             if (getZookeeperQuorum() != null) {
@@ -408,6 +491,10 @@ public abstract class PhoenixEmbeddedDriver implements Driver, SQLCloseable {
             return principal;
         }
 
+        public User getUser() {
+            return user;
+        }
+
         @Override
         public int hashCode() {
             final int prime = 31;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/2dc6be4e/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
index 524067d..04c2c7b 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/query/ConnectionQueryServicesImpl.java
@@ -96,7 +96,6 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.util.VersionInfo;
 import org.apache.hadoop.hbase.zookeeper.ZKConfig;
-import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.phoenix.compile.MutationPlan;
 import org.apache.phoenix.coprocessor.GroupedAggregateRegionObserver;
 import org.apache.phoenix.coprocessor.MetaDataEndpointImpl;
@@ -374,22 +373,13 @@ public class ConnectionQueryServicesImpl extends DelegateQueryServices implement
     
     private void openConnection() throws SQLException {
         try {
-            // check if we need to authenticate with kerberos
-            String clientKeytab = this.getProps().get(HBASE_CLIENT_KEYTAB);
-            String clientPrincipal = this.getProps().get(HBASE_CLIENT_PRINCIPAL);
-            if (clientKeytab != null && clientPrincipal != null) {
-                logger.info("Trying to connect to a secure cluster with keytab:" + clientKeytab);
-                UserGroupInformation.setConfiguration(config);
-                User.login(config, HBASE_CLIENT_KEYTAB, HBASE_CLIENT_PRINCIPAL, null);
-                logger.info("Successfull login to secure cluster!!");
-            }
-			boolean transactionsEnabled = props.getBoolean(
-					QueryServices.TRANSACTIONS_ENABLED,
-					QueryServicesOptions.DEFAULT_TRANSACTIONS_ENABLED);
-			// only initialize the tx service client if needed
-			if (transactionsEnabled) {
-				initTxServiceClient();
-			}
+            boolean transactionsEnabled = props.getBoolean(
+                    QueryServices.TRANSACTIONS_ENABLED,
+                    QueryServicesOptions.DEFAULT_TRANSACTIONS_ENABLED);
+            // only initialize the tx service client if needed
+            if (transactionsEnabled) {
+                initTxServiceClient();
+            }
             this.connection = HBaseFactoryProvider.getHConnectionFactory().createConnection(this.config);
         } catch (IOException e) {
             throw new SQLExceptionInfo.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION)

http://git-wip-us.apache.org/repos/asf/phoenix/blob/2dc6be4e/phoenix-core/src/main/java/org/apache/phoenix/util/InstanceResolver.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/util/InstanceResolver.java b/phoenix-core/src/main/java/org/apache/phoenix/util/InstanceResolver.java
index dd99d1e..4757e46 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/util/InstanceResolver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/util/InstanceResolver.java
@@ -19,6 +19,8 @@ package org.apache.phoenix.util;
 
 import org.apache.commons.collections.IteratorUtils;
 
+import com.google.common.annotations.VisibleForTesting;
+
 import java.util.Iterator;
 import java.util.List;
 import java.util.ServiceLoader;
@@ -85,4 +87,9 @@ public class InstanceResolver {
         }
         return defaultInstance;
     }
+
+    @VisibleForTesting
+    public static void clearSingletons() {
+        RESOLVED_SINGLETONS.clear();
+    }
 }

http://git-wip-us.apache.org/repos/asf/phoenix/blob/2dc6be4e/phoenix-core/src/test/java/org/apache/phoenix/jdbc/SecureUserConnectionsTest.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/java/org/apache/phoenix/jdbc/SecureUserConnectionsTest.java b/phoenix-core/src/test/java/org/apache/phoenix/jdbc/SecureUserConnectionsTest.java
new file mode 100644
index 0000000..6a33142
--- /dev/null
+++ b/phoenix-core/src/test/java/org/apache/phoenix/jdbc/SecureUserConnectionsTest.java
@@ -0,0 +1,369 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.jdbc;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.hbase.security.User;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.ConnectionInfo;
+import org.apache.phoenix.query.ConfigurationFactory;
+import org.apache.phoenix.util.InstanceResolver;
+import org.apache.phoenix.util.PhoenixRuntime;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Tests ConnectionQueryServices caching when Kerberos authentication is enabled. It's not
+ * trivial to directly test this, so we exploit the knowledge that the caching is driven by
+ * a ConcurrentHashMap. We can use a HashSet to determine when instances of ConnectionInfo
+ * collide and when they do not.
+ */
+public class SecureUserConnectionsTest {
+    private static final File TEMP_DIR = new File(getClassTempDir());
+    private static final File KEYTAB_DIR = new File(TEMP_DIR, "keytabs");
+    private static final File KDC_DIR = new File(TEMP_DIR, "kdc");
+    private static final List<File> USER_KEYTAB_FILES = new ArrayList<>();
+    private static final int NUM_USERS = 3;
+    private static final Properties EMPTY_PROPERTIES = new Properties();
+    private static final String BASE_URL = PhoenixRuntime.JDBC_PROTOCOL + ":localhost:2181";
+
+    private static MiniKdc KDC;
+
+    @BeforeClass
+    public static void setupKdc() throws Exception {
+        ensureIsEmptyDirectory(KDC_DIR);
+        ensureIsEmptyDirectory(KEYTAB_DIR);
+        // Create and start the KDC
+        Properties kdcConf = MiniKdc.createConf();
+        kdcConf.put(MiniKdc.DEBUG, true);
+        KDC = new MiniKdc(kdcConf, KDC_DIR);
+        KDC.start();
+
+        createUsers(NUM_USERS);
+
+        final Configuration conf = new Configuration(false);
+        conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
+        conf.set(User.HBASE_SECURITY_CONF_KEY, "kerberos");
+        conf.setBoolean(User.HBASE_SECURITY_AUTHORIZATION_CONF_KEY, true);
+        UserGroupInformation.setConfiguration(conf);
+
+        // Clear the cached singletons so we can inject our own.
+        InstanceResolver.clearSingletons();
+        // Make sure the ConnectionInfo doesn't try to pull a default Configuration
+        InstanceResolver.getSingleton(ConfigurationFactory.class, new ConfigurationFactory() {
+            @Override
+            public Configuration getConfiguration() {
+                return conf;
+            }
+            @Override
+            public Configuration getConfiguration(Configuration confToClone) {
+                Configuration copy = new Configuration(conf);
+                copy.addResource(confToClone);
+                return copy;
+            }
+        });
+    }
+
+    @AfterClass
+    public static void stopKdc() throws Exception {
+        // Remove our custom ConfigurationFactory for future tests
+        InstanceResolver.clearSingletons();
+        if (null != KDC) {
+            KDC.stop();
+            KDC = null;
+        }
+    }
+
+    private static String getClassTempDir() {
+        StringBuilder sb = new StringBuilder(32);
+        sb.append(System.getProperty("user.dir")).append(File.separator);
+        sb.append("target").append(File.separator);
+        sb.append(SecureUserConnectionsTest.class.getSimpleName());
+        return sb.toString();
+    }
+
+    private static void ensureIsEmptyDirectory(File f) throws IOException {
+        if (f.exists()) {
+            if (f.isDirectory()) {
+                FileUtils.deleteDirectory(f);
+            } else {
+                assertTrue("Failed to delete keytab directory", f.delete());
+            }
+        }
+        assertTrue("Failed to create keytab directory", f.mkdirs());
+    }
+
+    private static void createUsers(int numUsers) throws Exception {
+        assertNotNull("KDC is null, was setup method called?", KDC);
+        for (int i = 1; i <= numUsers; i++) {
+            String principal = "user" + i;
+            File keytabFile = new File(KEYTAB_DIR, principal + ".keytab");
+            KDC.createPrincipal(keytabFile, principal);
+            USER_KEYTAB_FILES.add(keytabFile);
+        }
+    }
+
+    /**
+     * Returns the principal for a user.
+     *
+     * @param offset The "number" user to return, based on one, not zero.
+     */
+    private static String getUserPrincipal(int offset) {
+        return "user" + offset + "@" + KDC.getRealm();
+    }
+
+    /**
+     * Returns the keytab file for the corresponding principal with the same {@code offset}.
+     * Requires {@link #createUsers(int)} to have been called with a value greater than {@code offset}.
+     *
+     * @param offset The "number" for the principal whose keytab should be returned. One-based, not zero-based.
+     */
+    public static File getUserKeytabFile(int offset) {
+        assertTrue("Invalid offset: " + offset, (offset - 1) >= 0 && (offset - 1) < USER_KEYTAB_FILES.size());
+        return USER_KEYTAB_FILES.get(offset - 1);
+    }
+
+    private String joinUserAuthentication(String origUrl, String principal, File keytab) {
+        StringBuilder sb = new StringBuilder(64);
+        // Knock off the trailing terminator if one exists
+        if (origUrl.charAt(origUrl.length() - 1) == PhoenixRuntime.JDBC_PROTOCOL_TERMINATOR) {
+            sb.append(origUrl, 0, origUrl.length() - 1);
+        } else {
+            sb.append(origUrl);
+        }
+
+        sb.append(PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR).append(principal);
+        sb.append(PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR).append(keytab.getPath());
+        return sb.append(PhoenixRuntime.JDBC_PROTOCOL_TERMINATOR).toString();
+    }
+
+    @Test
+    public void testMultipleInvocationsBySameUserAreEquivalent() throws Exception {
+        final HashSet<ConnectionInfo> connections = new HashSet<>();
+        final String princ1 = getUserPrincipal(1);
+        final File keytab1 = getUserKeytabFile(1);
+
+        UserGroupInformation ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(princ1, keytab1.getPath());
+
+        PrivilegedExceptionAction<Void> callable = new PrivilegedExceptionAction<Void>() {
+            public Void run() throws Exception {
+                String url = joinUserAuthentication(BASE_URL, princ1, keytab1);
+                connections.add(ConnectionInfo.create(url).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
+                return null;
+            }
+        };
+
+        // Using the same UGI should result in two equivalent ConnectionInfo objects
+        ugi.doAs(callable);
+        assertEquals(1, connections.size());
+        verifyAllConnectionsAreKerberosBased(connections);
+
+        ugi.doAs(callable);
+        assertEquals(1, connections.size());
+        verifyAllConnectionsAreKerberosBased(connections);
+    }
+
+    @Test
+    public void testMultipleUniqueUGIInstancesAreDisjoint() throws Exception {
+        final HashSet<ConnectionInfo> connections = new HashSet<>();
+        final String princ1 = getUserPrincipal(1);
+        final File keytab1 = getUserKeytabFile(1);
+
+        UserGroupInformation ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(princ1, keytab1.getPath());
+
+        PrivilegedExceptionAction<Void> callable = new PrivilegedExceptionAction<Void>() {
+            public Void run() throws Exception {
+                String url = joinUserAuthentication(BASE_URL, princ1, keytab1);
+                connections.add(ConnectionInfo.create(url).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
+                return null;
+            }
+        };
+
+        ugi.doAs(callable);
+        assertEquals(1, connections.size());
+        verifyAllConnectionsAreKerberosBased(connections);
+
+        // A second, but equivalent, call from the same "real" user but a different UGI instance
+        // is expected functionality (programmer error).
+        UserGroupInformation ugiCopy = UserGroupInformation.loginUserFromKeytabAndReturnUGI(princ1, keytab1.getPath());
+        ugiCopy.doAs(callable);
+        assertEquals(2, connections.size());
+        verifyAllConnectionsAreKerberosBased(connections);
+    }
+
+    @Test
+    public void testAlternatingLogins() throws Exception {
+        final HashSet<ConnectionInfo> connections = new HashSet<>();
+        final String princ1 = getUserPrincipal(1);
+        final File keytab1 = getUserKeytabFile(1);
+        final String princ2 = getUserPrincipal(2);
+        final File keytab2 = getUserKeytabFile(2);
+
+        UserGroupInformation ugi1 = UserGroupInformation.loginUserFromKeytabAndReturnUGI(princ1, keytab1.getPath());
+        UserGroupInformation ugi2 = UserGroupInformation.loginUserFromKeytabAndReturnUGI(princ2, keytab2.getPath());
+
+        // Using the same UGI should result in two equivalent ConnectionInfo objects
+        ugi1.doAs(new PrivilegedExceptionAction<Void>() {
+            public Void run() throws Exception {
+                String url = joinUserAuthentication(BASE_URL, princ1, keytab1);
+                connections.add(ConnectionInfo.create(url).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
+                return null;
+            }
+        });
+        assertEquals(1, connections.size());
+        // Sanity check
+        verifyAllConnectionsAreKerberosBased(connections);
+
+        ugi2.doAs(new PrivilegedExceptionAction<Void>() {
+            public Void run() throws Exception {
+                String url = joinUserAuthentication(BASE_URL, princ2, keytab2);
+                connections.add(ConnectionInfo.create(url).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
+                return null;
+            }
+        });
+        assertEquals(2, connections.size());
+        verifyAllConnectionsAreKerberosBased(connections);
+
+        ugi1.doAs(new PrivilegedExceptionAction<Void>() {
+            public Void run() throws Exception {
+                String url = joinUserAuthentication(BASE_URL, princ1, keytab1);
+                connections.add(ConnectionInfo.create(url).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
+                return null;
+            }
+        });
+        assertEquals(2, connections.size());
+        verifyAllConnectionsAreKerberosBased(connections);
+    }
+
+    @Test
+    public void testAlternatingDestructiveLogins() throws Exception {
+        final HashSet<ConnectionInfo> connections = new HashSet<>();
+        final String princ1 = getUserPrincipal(1);
+        final File keytab1 = getUserKeytabFile(1);
+        final String princ2 = getUserPrincipal(2);
+        final File keytab2 = getUserKeytabFile(2);
+        final String url1 = joinUserAuthentication(BASE_URL, princ1, keytab1);
+        final String url2 = joinUserAuthentication(BASE_URL, princ2, keytab2);
+
+        UserGroupInformation.loginUserFromKeytab(princ1, keytab1.getPath());
+        // Using the same UGI should result in two equivalent ConnectionInfo objects
+        connections.add(ConnectionInfo.create(url1).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
+        assertEquals(1, connections.size());
+        // Sanity check
+        verifyAllConnectionsAreKerberosBased(connections);
+
+        UserGroupInformation.loginUserFromKeytab(princ2, keytab2.getPath());
+        connections.add(ConnectionInfo.create(url2).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
+        assertEquals(2, connections.size());
+        verifyAllConnectionsAreKerberosBased(connections);
+
+        // Because the UGI instances are unique, so are the connections
+        UserGroupInformation.loginUserFromKeytab(princ1, keytab1.getPath());
+        connections.add(ConnectionInfo.create(url1).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
+        assertEquals(3, connections.size());
+        verifyAllConnectionsAreKerberosBased(connections);
+    }
+
+    @Test
+    public void testMultipleConnectionsAsSameUser() throws Exception {
+        final HashSet<ConnectionInfo> connections = new HashSet<>();
+        final String princ1 = getUserPrincipal(1);
+        final File keytab1 = getUserKeytabFile(1);
+        final String url = joinUserAuthentication(BASE_URL, princ1, keytab1);
+
+        UserGroupInformation.loginUserFromKeytab(princ1, keytab1.getPath());
+        // Using the same UGI should result in two equivalent ConnectionInfo objects
+        connections.add(ConnectionInfo.create(url).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
+        assertEquals(1, connections.size());
+        // Sanity check
+        verifyAllConnectionsAreKerberosBased(connections);
+
+        // Because the UGI instances are unique, so are the connections
+        connections.add(ConnectionInfo.create(url).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
+        assertEquals(1, connections.size());
+    }
+
+    @Test
+    public void testMultipleConnectionsAsSameUserWithoutLogin() throws Exception {
+        final HashSet<ConnectionInfo> connections = new HashSet<>();
+        final String princ1 = getUserPrincipal(1);
+        final File keytab1 = getUserKeytabFile(1);
+
+        // Using the same UGI should result in two equivalent ConnectionInfo objects
+        final String url = joinUserAuthentication(BASE_URL, princ1, keytab1);
+        connections.add(ConnectionInfo.create(url).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
+        assertEquals(1, connections.size());
+        // Sanity check
+        verifyAllConnectionsAreKerberosBased(connections);
+
+        // Because the UGI instances are unique, so are the connections
+        connections.add(ConnectionInfo.create(url).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
+        assertEquals(1, connections.size());
+    }
+
+    @Test
+    public void testAlternatingConnectionsWithoutLogin() throws Exception {
+        final HashSet<ConnectionInfo> connections = new HashSet<>();
+        final String princ1 = getUserPrincipal(1);
+        final File keytab1 = getUserKeytabFile(1);
+        final String princ2 = getUserPrincipal(2);
+        final File keytab2 = getUserKeytabFile(2);
+        final String url1 = joinUserAuthentication(BASE_URL, princ1, keytab1);
+        final String url2 = joinUserAuthentication(BASE_URL, princ2, keytab2);
+
+        // Using the same UGI should result in two equivalent ConnectionInfo objects
+        connections.add(ConnectionInfo.create(url1).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
+        assertEquals(1, connections.size());
+        // Sanity check
+        verifyAllConnectionsAreKerberosBased(connections);
+
+        // Because the UGI instances are unique, so are the connections
+        connections.add(ConnectionInfo.create(url2).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
+        assertEquals(2, connections.size());
+        verifyAllConnectionsAreKerberosBased(connections);
+
+        // Using the same UGI should result in two equivalent ConnectionInfo objects
+        connections.add(ConnectionInfo.create(url1).normalize(ReadOnlyProps.EMPTY_PROPS, EMPTY_PROPERTIES));
+        assertEquals(3, connections.size());
+        // Sanity check
+        verifyAllConnectionsAreKerberosBased(connections);
+    }
+
+    private void verifyAllConnectionsAreKerberosBased(Collection<ConnectionInfo> connections) {
+        for (ConnectionInfo cnxnInfo : connections) {
+            assertTrue("ConnectionInfo does not have kerberos credentials: " + cnxnInfo, cnxnInfo.getUser().getUGI().hasKerberosCredentials());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/2dc6be4e/phoenix-core/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/phoenix-core/src/test/resources/log4j.properties b/phoenix-core/src/test/resources/log4j.properties
index 8e54793..85706b4 100644
--- a/phoenix-core/src/test/resources/log4j.properties
+++ b/phoenix-core/src/test/resources/log4j.properties
@@ -61,3 +61,5 @@ log4j.logger.org.mortbay.log=WARN
 log4j.logger.org.apache.hadoop=WARN
 log4j.logger.org.apache.zookeeper=ERROR
 log4j.logger.org.apache.hadoop.hbase=DEBUG
+log4j.logger.org.apache.directory=WARN
+log4j.logger.net.sf.ehcache=WARN

http://git-wip-us.apache.org/repos/asf/phoenix/blob/2dc6be4e/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 142595f..b5edb6c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -322,6 +322,12 @@
           <artifactId>maven-shade-plugin</artifactId>
           <version>2.4.3</version>
         </plugin>
+        <plugin>
+          <!-- Allows us to get the apache-ds bundle artifacts -->
+          <groupId>org.apache.felix</groupId>
+          <artifactId>maven-bundle-plugin</artifactId>
+          <version>2.5.3</version>
+        </plugin>
       </plugins>
     </pluginManagement>
 
@@ -449,6 +455,13 @@
           </excludes>
         </configuration>
       </plugin>
+      <plugin>
+        <!-- Allows us to get the apache-ds bundle artifacts -->
+        <groupId>org.apache.felix</groupId>
+        <artifactId>maven-bundle-plugin</artifactId>
+        <extensions>true</extensions>
+        <inherited>true</inherited>
+      </plugin>
     </plugins>
   </build>
 
@@ -659,6 +672,11 @@
         <type>test-jar</type> <!-- this does not work which is typical for maven.-->
         <scope>test</scope>
       </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-minikdc</artifactId>
+        <version>${hadoop-two.version}</version>
+      </dependency>
 
       <!-- General Dependencies -->
       <dependency>