You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by ja...@apache.org on 2017/07/17 19:06:16 UTC

phoenix git commit: Revert "PHOENIX-3598 Implement HTTP parameter impersonation for PQS"

Repository: phoenix
Updated Branches:
  refs/heads/4.x-HBase-0.98 f5f5810ec -> c5ae34779


Revert "PHOENIX-3598 Implement HTTP parameter impersonation for PQS"

This reverts commit 1ab0273989f4b454e2463e229c636c60ff8d0a47.


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/c5ae3477
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/c5ae3477
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/c5ae3477

Branch: refs/heads/4.x-HBase-0.98
Commit: c5ae34779793e0dbe5b8df6faab1510f21f60b4c
Parents: f5f5810
Author: James Taylor <ja...@apache.org>
Authored: Mon Jul 17 12:05:41 2017 -0700
Committer: James Taylor <ja...@apache.org>
Committed: Mon Jul 17 12:05:41 2017 -0700

----------------------------------------------------------------------
 .../phoenix/jdbc/PhoenixDatabaseMetaData.java   |   4 -
 .../org/apache/phoenix/query/QueryServices.java |   3 -
 .../phoenix/query/QueryServicesOptions.java     |   3 -
 phoenix-queryserver/pom.xml                     |  10 -
 .../HttpParamImpersonationQueryServerIT.java    | 434 -------------------
 .../phoenix/end2end/SecureQueryServerIT.java    | 320 --------------
 .../src/it/resources/log4j.properties           |   9 +-
 .../phoenix/queryserver/server/QueryServer.java |  75 +---
 .../server/PhoenixRemoteUserExtractorTest.java  | 108 -----
 9 files changed, 5 insertions(+), 961 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/c5ae3477/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
index 9acec96..5fd1e0d 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/jdbc/PhoenixDatabaseMetaData.java
@@ -92,7 +92,6 @@ public class PhoenixDatabaseMetaData implements DatabaseMetaData {
     public static final byte[] SYSTEM_CATALOG_SCHEMA_BYTES = QueryConstants.SYSTEM_SCHEMA_NAME_BYTES;
     public static final String SYSTEM_SCHEMA_NAME = QueryConstants.SYSTEM_SCHEMA_NAME;
     public static final byte[] SYSTEM_SCHEMA_NAME_BYTES = QueryConstants.SYSTEM_SCHEMA_NAME_BYTES;
-    public static final TableName SYSTEM_SCHEMA_HBASE_TABLE_NAME = TableName.valueOf(SYSTEM_SCHEMA_NAME);
 
     public static final String SYSTEM_CATALOG_TABLE = "CATALOG";
     public static final byte[] SYSTEM_CATALOG_TABLE_BYTES = Bytes.toBytes(SYSTEM_CATALOG_TABLE);
@@ -107,7 +106,6 @@ public class PhoenixDatabaseMetaData implements DatabaseMetaData {
     public static final byte[] IS_NAMESPACE_MAPPED_BYTES = Bytes.toBytes(IS_NAMESPACE_MAPPED);
     public static final byte[] SYSTEM_STATS_NAME_BYTES = Bytes.toBytes(SYSTEM_STATS_NAME);
     public static final byte[] SYSTEM_STATS_TABLE_BYTES = Bytes.toBytes(SYSTEM_STATS_TABLE);
-    public static final TableName SYSTEM_STATS_HBASE_TABLE_NAME = TableName.valueOf(SYSTEM_STATS_NAME);
     public static final String SYSTEM_CATALOG_ALIAS = "\"SYSTEM.TABLE\"";
 
     public static final byte[] SYSTEM_SEQUENCE_FAMILY_BYTES = QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES;
@@ -118,7 +116,6 @@ public class PhoenixDatabaseMetaData implements DatabaseMetaData {
     public static final String SYSTEM_SEQUENCE = SYSTEM_CATALOG_SCHEMA + ".\"" + SYSTEM_SEQUENCE_TABLE + "\"";
     public static final String SYSTEM_SEQUENCE_NAME = SchemaUtil.getTableName(SYSTEM_SEQUENCE_SCHEMA, SYSTEM_SEQUENCE_TABLE);
     public static final byte[] SYSTEM_SEQUENCE_NAME_BYTES = Bytes.toBytes(SYSTEM_SEQUENCE_NAME);
-    public static final TableName SYSTEM_SEQUENCE_HBASE_TABLE_NAME = TableName.valueOf(SYSTEM_SEQUENCE_NAME);
     
     public static final String TABLE_NAME = "TABLE_NAME";
     public static final byte[] TABLE_NAME_BYTES = Bytes.toBytes(TABLE_NAME);
@@ -218,7 +215,6 @@ public class PhoenixDatabaseMetaData implements DatabaseMetaData {
     public static final String SYSTEM_FUNCTION_TABLE = "FUNCTION";
     public static final String SYSTEM_FUNCTION_NAME = SchemaUtil.getTableName(SYSTEM_CATALOG_SCHEMA, SYSTEM_FUNCTION_TABLE);
     public static final byte[] SYSTEM_FUNCTION_NAME_BYTES = Bytes.toBytes(SYSTEM_FUNCTION_NAME);
-    public static final TableName SYSTEM_FUNCTION_HBASE_TABLE_NAME = TableName.valueOf(SYSTEM_FUNCTION_NAME);
 
     public static final String FUNCTION_NAME = "FUNCTION_NAME";
     public static final byte[] FUNCTION_NAME_BYTES = Bytes.toBytes(FUNCTION_NAME);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c5ae3477/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServices.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServices.java b/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServices.java
index 7df90f4..b4589bb 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServices.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServices.java
@@ -228,9 +228,6 @@ public interface QueryServices extends SQLCloseable {
     public static final String QUERY_SERVER_UGI_CACHE_CONCURRENCY = "phoenix.queryserver.ugi.cache.concurrency";
     public static final String QUERY_SERVER_KERBEROS_ALLOWED_REALMS = "phoenix.queryserver.kerberos.allowed.realms";
     public static final String QUERY_SERVER_SPNEGO_AUTH_DISABLED_ATTRIB = "phoenix.queryserver.spnego.auth.disabled";
-    public static final String QUERY_SERVER_WITH_REMOTEUSEREXTRACTOR_ATTRIB = "phoenix.queryserver.withRemoteUserExtractor";
-    public static final String QUERY_SERVER_REMOTEUSEREXTRACTOR_PARAM = "phoenix.queryserver.remoteUserExtractor.param";
-    public static final String QUERY_SERVER_DISABLE_KERBEROS_LOGIN = "phoenix.queryserver.disable.kerberos.login";
 
     public static final String RENEW_LEASE_ENABLED = "phoenix.scanner.lease.renew.enabled";
     public static final String RUN_RENEW_LEASE_FREQUENCY_INTERVAL_MILLISECONDS = "phoenix.scanner.lease.renew.interval";

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c5ae3477/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java b/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
index d5a1eed..32618f0 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/query/QueryServicesOptions.java
@@ -260,9 +260,6 @@ public class QueryServicesOptions {
     public static final int DEFAULT_QUERY_SERVER_UGI_CACHE_INITIAL_SIZE = 100;
     public static final int DEFAULT_QUERY_SERVER_UGI_CACHE_CONCURRENCY = 10;
     public static final boolean DEFAULT_QUERY_SERVER_SPNEGO_AUTH_DISABLED = false;
-    public static final boolean DEFAULT_QUERY_SERVER_WITH_REMOTEUSEREXTRACTOR = false;
-    public static final String DEFAULT_QUERY_SERVER_REMOTEUSEREXTRACTOR_PARAM = "doAs";
-    public static final boolean DEFAULT_QUERY_SERVER_DISABLE_KERBEROS_LOGIN = false;
 
     public static final boolean DEFAULT_RENEW_LEASE_ENABLED = true;
     public static final int DEFAULT_RUN_RENEW_LEASE_FREQUENCY_INTERVAL_MILLISECONDS =

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c5ae3477/phoenix-queryserver/pom.xml
----------------------------------------------------------------------
diff --git a/phoenix-queryserver/pom.xml b/phoenix-queryserver/pom.xml
index 789f5b2..12f37e3 100644
--- a/phoenix-queryserver/pom.xml
+++ b/phoenix-queryserver/pom.xml
@@ -149,11 +149,6 @@
     </dependency>
     <!-- for tests -->
     <dependency>
-      <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
       <groupId>org.apache.phoenix</groupId>
       <artifactId>phoenix-core</artifactId>
       <classifier>tests</classifier>
@@ -181,10 +176,5 @@
       <type>test-jar</type>
       <scope>test</scope>
     </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-minikdc</artifactId>
-      <scope>test</scope>
-    </dependency>
   </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c5ae3477/phoenix-queryserver/src/it/java/org/apache/phoenix/end2end/HttpParamImpersonationQueryServerIT.java
----------------------------------------------------------------------
diff --git a/phoenix-queryserver/src/it/java/org/apache/phoenix/end2end/HttpParamImpersonationQueryServerIT.java b/phoenix-queryserver/src/it/java/org/apache/phoenix/end2end/HttpParamImpersonationQueryServerIT.java
deleted file mode 100644
index ef9ff68..0000000
--- a/phoenix-queryserver/src/it/java/org/apache/phoenix/end2end/HttpParamImpersonationQueryServerIT.java
+++ /dev/null
@@ -1,434 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.end2end;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-import java.io.IOException;
-import java.lang.reflect.Field;
-import java.security.PrivilegedAction;
-import java.security.PrivilegedExceptionAction;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map.Entry;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.LocalHBaseCluster;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
-import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
-import org.apache.hadoop.hbase.security.access.AccessControlClient;
-import org.apache.hadoop.hbase.security.access.AccessController;
-import org.apache.hadoop.hbase.security.access.Permission.Action;
-import org.apache.hadoop.hbase.security.token.TokenProvider;
-import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.minikdc.MiniKdc;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authentication.util.KerberosName;
-import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
-import org.apache.phoenix.query.ConfigurationFactory;
-import org.apache.phoenix.query.QueryServices;
-import org.apache.phoenix.query.QueryServicesOptions;
-import org.apache.phoenix.queryserver.client.Driver;
-import org.apache.phoenix.queryserver.client.ThinClientUtil;
-import org.apache.phoenix.queryserver.server.QueryServer;
-import org.apache.phoenix.util.InstanceResolver;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Maps;
-
-@Category(NeedsOwnMiniClusterTest.class)
-public class HttpParamImpersonationQueryServerIT {
-    private static final Log LOG = LogFactory.getLog(HttpParamImpersonationQueryServerIT.class);
-    private static final List<TableName> SYSTEM_TABLE_NAMES = Arrays.asList(PhoenixDatabaseMetaData.SYSTEM_CATALOG_HBASE_TABLE_NAME,
-        PhoenixDatabaseMetaData.SYSTEM_MUTEX_HBASE_TABLE_NAME,
-        PhoenixDatabaseMetaData.SYSTEM_FUNCTION_HBASE_TABLE_NAME,
-        PhoenixDatabaseMetaData.SYSTEM_SCHEMA_HBASE_TABLE_NAME,
-        PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_HBASE_TABLE_NAME,
-        PhoenixDatabaseMetaData.SYSTEM_STATS_HBASE_TABLE_NAME);
-
-    private static final File TEMP_DIR = new File(getTempDirForClass());
-    private static final File KEYTAB_DIR = new File(TEMP_DIR, "keytabs");
-    private static final List<File> USER_KEYTAB_FILES = new ArrayList<>();
-
-    private static final String SPNEGO_PRINCIPAL = "HTTP/localhost";
-    private static final String SERVICE_PRINCIPAL = "securecluster/localhost";
-    private static File KEYTAB;
-
-    private static MiniKdc KDC;
-    private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
-    private static LocalHBaseCluster HBASE_CLUSTER;
-    private static int NUM_CREATED_USERS;
-
-    private static ExecutorService PQS_EXECUTOR;
-    private static QueryServer PQS;
-    private static int PQS_PORT;
-    private static String PQS_URL;
-
-    private static String getTempDirForClass() {
-        StringBuilder sb = new StringBuilder(32);
-        sb.append(System.getProperty("user.dir")).append(File.separator);
-        sb.append("target").append(File.separator);
-        sb.append(HttpParamImpersonationQueryServerIT.class.getSimpleName());
-        return sb.toString();
-    }
-
-    private static void updateDefaultRealm() throws Exception {
-        // (at least) one other phoenix test triggers the caching of this field before the KDC is up
-        // which causes principal parsing to fail.
-        Field f = KerberosName.class.getDeclaredField("defaultRealm");
-        f.setAccessible(true);
-        // Default realm for MiniKDC
-        f.set(null, "EXAMPLE.COM");
-    }
-
-    private static void createUsers(int numUsers) throws Exception {
-        assertNotNull("KDC is null, was setup method called?", KDC);
-        NUM_CREATED_USERS = numUsers;
-        for (int i = 1; i <= numUsers; i++) {
-            String principal = "user" + i;
-            File keytabFile = new File(KEYTAB_DIR, principal + ".keytab");
-            KDC.createPrincipal(keytabFile, principal);
-            USER_KEYTAB_FILES.add(keytabFile);
-        }
-    }
-
-    private static Entry<String,File> getUser(int offset) {
-        Preconditions.checkArgument(offset > 0 && offset <= NUM_CREATED_USERS);
-        return Maps.immutableEntry("user" + offset, USER_KEYTAB_FILES.get(offset - 1));
-    }
-
-    /**
-     * Setup the security configuration for hdfs.
-     */
-    private static void setHdfsSecuredConfiguration(Configuration conf) throws Exception {
-        // Set principal+keytab configuration for HDFS
-        conf.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, SERVICE_PRINCIPAL + "@" + KDC.getRealm());
-        conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, KEYTAB.getAbsolutePath());
-        conf.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, SERVICE_PRINCIPAL + "@" + KDC.getRealm());
-        conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, KEYTAB.getAbsolutePath());
-        conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, SPNEGO_PRINCIPAL + "@" + KDC.getRealm());
-        // Enable token access for HDFS blocks
-        conf.setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
-        // Only use HTTPS (required because we aren't using "secure" ports)
-        conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
-        // Bind on localhost for spnego to have a chance at working
-        conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
-        conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
-
-        // Generate SSL certs
-        File keystoresDir = new File(UTIL.getDataTestDir("keystore").toUri().getPath());
-        keystoresDir.mkdirs();
-        String sslConfDir = KeyStoreTestUtil.getClasspathDir(HttpParamImpersonationQueryServerIT.class);
-        KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir, conf, false);
-
-        // Magic flag to tell hdfs to not fail on using ports above 1024
-        conf.setBoolean("ignore.secure.ports.for.testing", true);
-    }
-
-    private static void ensureIsEmptyDirectory(File f) throws IOException {
-        if (f.exists()) {
-            if (f.isDirectory()) {
-                FileUtils.deleteDirectory(f);
-            } else {
-                assertTrue("Failed to delete keytab directory", f.delete());
-            }
-        }
-        assertTrue("Failed to create keytab directory", f.mkdirs());
-    }
-
-    /**
-     * Setup and start kerberos, hbase
-     */
-    @BeforeClass
-    public static void setUp() throws Exception {
-        final Configuration conf = UTIL.getConfiguration();
-        // Ensure the dirs we need are created/empty
-        ensureIsEmptyDirectory(TEMP_DIR);
-        ensureIsEmptyDirectory(KEYTAB_DIR);
-        KEYTAB = new File(KEYTAB_DIR, "test.keytab");
-        // Start a MiniKDC
-        KDC = UTIL.setupMiniKdc(KEYTAB);
-        // Create a service principal and spnego principal in one keytab
-        // NB. Due to some apparent limitations between HDFS and HBase in the same JVM, trying to
-        //     use separate identies for HBase and HDFS results in a GSS initiate error. The quick
-        //     solution is to just use a single "service" principal instead of "hbase" and "hdfs"
-        //     (or "dn" and "nn") per usual.
-        KDC.createPrincipal(KEYTAB, SPNEGO_PRINCIPAL, SERVICE_PRINCIPAL);
-        // Start ZK by hand
-        UTIL.startMiniZKCluster();
-
-        // Create a number of unprivileged users
-        createUsers(2);
-
-        // Set configuration for HBase
-        HBaseKerberosUtils.setPrincipalForTesting(SERVICE_PRINCIPAL + "@" + KDC.getRealm());
-        HBaseKerberosUtils.setSecuredConfiguration(conf);
-        setHdfsSecuredConfiguration(conf);
-        UserGroupInformation.setConfiguration(conf);
-        conf.setInt(HConstants.MASTER_PORT, 0);
-        conf.setInt(HConstants.MASTER_INFO_PORT, 0);
-        conf.setInt(HConstants.REGIONSERVER_PORT, 0);
-        conf.setInt(HConstants.REGIONSERVER_INFO_PORT, 0);
-        conf.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, AccessController.class.getName());
-        conf.setStrings(CoprocessorHost.REGIONSERVER_COPROCESSOR_CONF_KEY, AccessController.class.getName());
-        conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, AccessController.class.getName(), TokenProvider.class.getName());
-
-        // Secure Phoenix setup
-        conf.set("phoenix.queryserver.kerberos.principal", SPNEGO_PRINCIPAL);
-        conf.set("phoenix.queryserver.keytab.file", KEYTAB.getAbsolutePath());
-        conf.setBoolean(QueryServices.QUERY_SERVER_DISABLE_KERBEROS_LOGIN, true);
-        conf.setInt(QueryServices.QUERY_SERVER_HTTP_PORT_ATTRIB, 0);
-        // Required so that PQS can impersonate the end-users to HBase
-        conf.set("hadoop.proxyuser.HTTP.groups", "*");
-        conf.set("hadoop.proxyuser.HTTP.hosts", "*");
-        // user1 is allowed to impersonate others, user2 is not
-        conf.set("hadoop.proxyuser.user1.groups", "*");
-        conf.set("hadoop.proxyuser.user1.hosts", "*");
-        conf.setBoolean(QueryServices.QUERY_SERVER_WITH_REMOTEUSEREXTRACTOR_ATTRIB, true);
-
-        // Clear the cached singletons so we can inject our own.
-        InstanceResolver.clearSingletons();
-        // Make sure the ConnectionInfo doesn't try to pull a default Configuration
-        InstanceResolver.getSingleton(ConfigurationFactory.class, new ConfigurationFactory() {
-            @Override
-            public Configuration getConfiguration() {
-                return conf;
-            }
-            @Override
-            public Configuration getConfiguration(Configuration confToClone) {
-                Configuration copy = new Configuration(conf);
-                copy.addResource(confToClone);
-                return copy;
-            }
-        });
-        updateDefaultRealm();
-
-        // Start HDFS
-        UTIL.startMiniDFSCluster(1);
-        // Use LocalHBaseCluster to avoid HBaseTestingUtility from doing something wrong
-        // NB. I'm not actually sure what HTU does incorrect, but this was pulled from some test
-        //     classes in HBase itself. I couldn't get HTU to work myself (2017/07/06)
-        Path rootdir = UTIL.getDataTestDirOnTestFS(HttpParamImpersonationQueryServerIT.class.getSimpleName());
-        FSUtils.setRootDir(conf, rootdir);
-        HBASE_CLUSTER = new LocalHBaseCluster(conf, 1);
-        HBASE_CLUSTER.startup();
-
-        // Then fork a thread with PQS in it.
-        startQueryServer();
-    }
-
-    private static void startQueryServer() throws Exception {
-        PQS = new QueryServer(new String[0], UTIL.getConfiguration());
-        // Get the SPNEGO ident for PQS to use
-        final UserGroupInformation ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(SPNEGO_PRINCIPAL, KEYTAB.getAbsolutePath());
-        PQS_EXECUTOR = Executors.newSingleThreadExecutor();
-        // Launch PQS, doing in the Kerberos login instead of letting PQS do it itself (which would
-        // break the HBase/HDFS logins also running in the same test case).
-        PQS_EXECUTOR.submit(new Runnable() {
-            @Override public void run() {
-                ugi.doAs(new PrivilegedAction<Void>() {
-                    @Override public Void run() {
-                        PQS.run();
-                        return null;
-                    }
-                });
-            }
-        });
-        PQS.awaitRunning();
-        PQS_PORT = PQS.getPort();
-        PQS_URL = ThinClientUtil.getConnectionUrl("localhost", PQS_PORT) + ";authentication=SPNEGO";
-    }
-
-    @AfterClass
-    public static void stopKdc() throws Exception {
-        // Remove our custom ConfigurationFactory for future tests
-        InstanceResolver.clearSingletons();
-        if (PQS_EXECUTOR != null) {
-            PQS.stop();
-            PQS_EXECUTOR.shutdown();
-            if (!PQS_EXECUTOR.awaitTermination(5, TimeUnit.SECONDS)) {
-                LOG.info("PQS didn't exit in 5 seconds, proceeding anyways.");
-            }
-        }
-        if (HBASE_CLUSTER != null) {
-            HBASE_CLUSTER.shutdown();
-            HBASE_CLUSTER.join();
-        }
-        if (UTIL != null) {
-            UTIL.shutdownMiniZKCluster();
-        }
-        if (KDC != null) {
-            KDC.stop();
-        }
-    }
-
-    @Test
-    public void testSuccessfulImpersonation() throws Exception {
-        final Entry<String,File> user1 = getUser(1);
-        final Entry<String,File> user2 = getUser(2);
-        // Build the JDBC URL by hand with the doAs
-        final String doAsUrlTemplate = Driver.CONNECT_STRING_PREFIX + "url=http://localhost:" + PQS_PORT + "?"
-            + QueryServicesOptions.DEFAULT_QUERY_SERVER_REMOTEUSEREXTRACTOR_PARAM + "=%s;authentication=SPNEGO;serialization=PROTOBUF";
-        final String tableName = "POSITIVE_IMPERSONATION";
-        final int numRows = 5;
-        final UserGroupInformation serviceUgi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(SERVICE_PRINCIPAL, KEYTAB.getAbsolutePath());
-        serviceUgi.doAs(new PrivilegedExceptionAction<Void>() {
-            @Override public Void run() throws Exception {
-                createTable(tableName, numRows);
-                grantUsersToPhoenixSystemTables(Arrays.asList(user1.getKey(), user2.getKey()));
-                return null;
-            }
-        });
-        UserGroupInformation user1Ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(user1.getKey(), user1.getValue().getAbsolutePath());
-        user1Ugi.doAs(new PrivilegedExceptionAction<Void>() {
-            @Override public Void run() throws Exception {
-                // This user should not be able to read the table
-                readAndExpectPermissionError(PQS_URL, tableName, numRows);
-                // Run the same query with the same credentials, but with a doAs. We should be permitted since the user we're impersonating can run the query
-                final String doAsUrl = String.format(doAsUrlTemplate, serviceUgi.getShortUserName());
-                try (Connection conn = DriverManager.getConnection(doAsUrl);
-                        Statement stmt = conn.createStatement()) {
-                    conn.setAutoCommit(true);
-                    readRows(stmt, tableName, numRows);
-                }
-                return null;
-            }
-        });
-    }
-
-    @Test
-    public void testDisallowedImpersonation() throws Exception {
-        final Entry<String,File> user2 = getUser(2);
-        // Build the JDBC URL by hand with the doAs
-        final String doAsUrlTemplate = Driver.CONNECT_STRING_PREFIX + "url=http://localhost:" + PQS_PORT + "?"
-            + QueryServicesOptions.DEFAULT_QUERY_SERVER_REMOTEUSEREXTRACTOR_PARAM + "=%s;authentication=SPNEGO;serialization=PROTOBUF";
-        final String tableName = "DISALLOWED_IMPERSONATION";
-        final int numRows = 5;
-        final UserGroupInformation serviceUgi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(SERVICE_PRINCIPAL, KEYTAB.getAbsolutePath());
-        serviceUgi.doAs(new PrivilegedExceptionAction<Void>() {
-            @Override public Void run() throws Exception {
-                createTable(tableName, numRows);
-                grantUsersToPhoenixSystemTables(Arrays.asList(user2.getKey()));
-                return null;
-            }
-        });
-        UserGroupInformation user2Ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(user2.getKey(), user2.getValue().getAbsolutePath());
-        user2Ugi.doAs(new PrivilegedExceptionAction<Void>() {
-            @Override public Void run() throws Exception {
-                // This user is disallowed to read this table
-                readAndExpectPermissionError(PQS_URL, tableName, numRows);
-                // This user is also not allowed to impersonate
-                final String doAsUrl = String.format(doAsUrlTemplate, serviceUgi.getShortUserName());
-                try (Connection conn = DriverManager.getConnection(doAsUrl);
-                        Statement stmt = conn.createStatement()) {
-                    conn.setAutoCommit(true);
-                    readRows(stmt, tableName, numRows);
-                    fail("user2 should not be allowed to impersonate the service user");
-                } catch (Exception e) {
-                    LOG.info("Caught expected exception", e);
-                }
-                return null;
-            }
-        });
-    }
-
-    void createTable(String tableName, int numRows) throws Exception {
-        try (Connection conn = DriverManager.getConnection(PQS_URL);
-            Statement stmt = conn.createStatement()) {
-            conn.setAutoCommit(true);
-            assertFalse(stmt.execute("CREATE TABLE " + tableName + "(pk integer not null primary key)"));
-            for (int i = 0; i < numRows; i++) {
-                assertEquals(1, stmt.executeUpdate("UPSERT INTO " + tableName + " values(" + i + ")"));
-            }
-            readRows(stmt, tableName, numRows);
-        }
-    }
-
-    void grantUsersToPhoenixSystemTables(List<String> usersToGrant) throws Exception {
-        // Grant permission to the user to access the system tables
-        try {
-            for (String user : usersToGrant) {
-                for (TableName tn : SYSTEM_TABLE_NAMES) {
-                    AccessControlClient.grant(UTIL.getConnection(), tn, user, null, null, Action.READ, Action.EXEC);
-                }
-            }
-        } catch (Throwable e) {
-            throw new Exception(e);
-        }
-    }
-
-    void readAndExpectPermissionError(String jdbcUrl, String tableName, int numRows) {
-        try (Connection conn = DriverManager.getConnection(jdbcUrl);
-            Statement stmt = conn.createStatement()) {
-            conn.setAutoCommit(true);
-            readRows(stmt, tableName, numRows);
-            fail("Expected an exception reading another user's table");
-        } catch (Exception e) {
-            LOG.debug("Caught expected exception", e);
-            // Avatica doesn't re-create new exceptions across the wire. Need to just look at the contents of the message.
-            String errorMessage = e.getMessage();
-            assertTrue("Expected the error message to contain an HBase AccessDeniedException", errorMessage.contains("org.apache.hadoop.hbase.security.AccessDeniedException"));
-            // Expecting an error message like: "Insufficient permissions for user 'user1' (table=POSITIVE_IMPERSONATION, action=READ)"
-            // Being overly cautious to make sure we don't inadvertently pass the test due to permission errors on phoenix system tables.
-            assertTrue("Expected message to contain " + tableName + " and READ", errorMessage.contains(tableName) && errorMessage.contains("READ"));
-        }
-    }
-
-    void readRows(Statement stmt, String tableName, int numRows) throws SQLException {
-        try (ResultSet rs = stmt.executeQuery("SELECT * FROM " + tableName)) {
-            for (int i = 0; i < numRows; i++) {
-                assertTrue(rs.next());
-                assertEquals(i, rs.getInt(1));
-            }
-            assertFalse(rs.next());
-        }
-    }
-
-    byte[] copyBytes(byte[] src, int offset, int length) {
-        byte[] dest = new byte[length];
-        System.arraycopy(src, offset, dest, 0, length);
-        return dest;
-    }
-}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c5ae3477/phoenix-queryserver/src/it/java/org/apache/phoenix/end2end/SecureQueryServerIT.java
----------------------------------------------------------------------
diff --git a/phoenix-queryserver/src/it/java/org/apache/phoenix/end2end/SecureQueryServerIT.java b/phoenix-queryserver/src/it/java/org/apache/phoenix/end2end/SecureQueryServerIT.java
deleted file mode 100644
index 9e12444..0000000
--- a/phoenix-queryserver/src/it/java/org/apache/phoenix/end2end/SecureQueryServerIT.java
+++ /dev/null
@@ -1,320 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.end2end;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-import java.io.IOException;
-import java.lang.reflect.Field;
-import java.security.PrivilegedAction;
-import java.security.PrivilegedExceptionAction;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map.Entry;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.LocalHBaseCluster;
-import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
-import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
-import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
-import org.apache.hadoop.hbase.security.token.TokenProvider;
-import org.apache.hadoop.hbase.util.FSUtils;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.minikdc.MiniKdc;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authentication.util.KerberosName;
-import org.apache.phoenix.query.ConfigurationFactory;
-import org.apache.phoenix.query.QueryServices;
-import org.apache.phoenix.queryserver.client.ThinClientUtil;
-import org.apache.phoenix.queryserver.server.QueryServer;
-import org.apache.phoenix.util.InstanceResolver;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Maps;
-
-@Category(NeedsOwnMiniClusterTest.class)
-public class SecureQueryServerIT {
-    private static final Log LOG = LogFactory.getLog(SecureQueryServerIT.class);
-
-    private static final File TEMP_DIR = new File(getTempDirForClass());
-    private static final File KEYTAB_DIR = new File(TEMP_DIR, "keytabs");
-    private static final List<File> USER_KEYTAB_FILES = new ArrayList<>();
-
-    private static final String SPNEGO_PRINCIPAL = "HTTP/localhost";
-    private static final String SERVICE_PRINCIPAL = "securecluster/localhost";
-    private static File KEYTAB;
-
-    private static MiniKdc KDC;
-    private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
-    private static LocalHBaseCluster HBASE_CLUSTER;
-    private static int NUM_CREATED_USERS;
-
-    private static ExecutorService PQS_EXECUTOR;
-    private static QueryServer PQS;
-    private static int PQS_PORT;
-    private static String PQS_URL;
-
-    private static String getTempDirForClass() {
-        StringBuilder sb = new StringBuilder(32);
-        sb.append(System.getProperty("user.dir")).append(File.separator);
-        sb.append("target").append(File.separator);
-        sb.append(SecureQueryServerIT.class.getSimpleName());
-        return sb.toString();
-    }
-
-    private static void updateDefaultRealm() throws Exception {
-        // (at least) one other phoenix test triggers the caching of this field before the KDC is up
-        // which causes principal parsing to fail.
-        Field f = KerberosName.class.getDeclaredField("defaultRealm");
-        f.setAccessible(true);
-        // Default realm for MiniKDC
-        f.set(null, "EXAMPLE.COM");
-    }
-
-    private static void createUsers(int numUsers) throws Exception {
-        assertNotNull("KDC is null, was setup method called?", KDC);
-        NUM_CREATED_USERS = numUsers;
-        for (int i = 1; i <= numUsers; i++) {
-            String principal = "user" + i;
-            File keytabFile = new File(KEYTAB_DIR, principal + ".keytab");
-            KDC.createPrincipal(keytabFile, principal);
-            USER_KEYTAB_FILES.add(keytabFile);
-        }
-    }
-
-    private static Entry<String,File> getUser(int offset) {
-        Preconditions.checkArgument(offset > 0 && offset <= NUM_CREATED_USERS);
-        return Maps.immutableEntry("user" + offset, USER_KEYTAB_FILES.get(offset - 1));
-    }
-
-    /**
-     * Setup the security configuration for hdfs.
-     */
-    private static void setHdfsSecuredConfiguration(Configuration conf) throws Exception {
-        // Set principal+keytab configuration for HDFS
-        conf.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, SERVICE_PRINCIPAL + "@" + KDC.getRealm());
-        conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, KEYTAB.getAbsolutePath());
-        conf.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, SERVICE_PRINCIPAL + "@" + KDC.getRealm());
-        conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, KEYTAB.getAbsolutePath());
-        conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, SPNEGO_PRINCIPAL + "@" + KDC.getRealm());
-        // Enable token access for HDFS blocks
-        conf.setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
-        // Only use HTTPS (required because we aren't using "secure" ports)
-        conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
-        // Bind on localhost for spnego to have a chance at working
-        conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
-        conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
-
-        // Generate SSL certs
-        File keystoresDir = new File(UTIL.getDataTestDir("keystore").toUri().getPath());
-        keystoresDir.mkdirs();
-        String sslConfDir = KeyStoreTestUtil.getClasspathDir(SecureQueryServerIT.class);
-        KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir, conf, false);
-
-        // Magic flag to tell hdfs to not fail on using ports above 1024
-        conf.setBoolean("ignore.secure.ports.for.testing", true);
-    }
-
-    private static void ensureIsEmptyDirectory(File f) throws IOException {
-        if (f.exists()) {
-            if (f.isDirectory()) {
-                FileUtils.deleteDirectory(f);
-            } else {
-                assertTrue("Failed to delete keytab directory", f.delete());
-            }
-        }
-        assertTrue("Failed to create keytab directory", f.mkdirs());
-    }
-
-    /**
-     * Setup and start kerberos, hbase
-     */
-    @BeforeClass
-    public static void setUp() throws Exception {
-        final Configuration conf = UTIL.getConfiguration();
-        // Ensure the dirs we need are created/empty
-        ensureIsEmptyDirectory(TEMP_DIR);
-        ensureIsEmptyDirectory(KEYTAB_DIR);
-        KEYTAB = new File(KEYTAB_DIR, "test.keytab");
-        // Start a MiniKDC
-        KDC = UTIL.setupMiniKdc(KEYTAB);
-        // Create a service principal and spnego principal in one keytab
-        // NB. Due to some apparent limitations between HDFS and HBase in the same JVM, trying to
-        //     use separate identies for HBase and HDFS results in a GSS initiate error. The quick
-        //     solution is to just use a single "service" principal instead of "hbase" and "hdfs"
-        //     (or "dn" and "nn") per usual.
-        KDC.createPrincipal(KEYTAB, SPNEGO_PRINCIPAL, SERVICE_PRINCIPAL);
-        // Start ZK by hand
-        UTIL.startMiniZKCluster();
-
-        // Create a number of unprivileged users
-        createUsers(3);
-
-        // Set configuration for HBase
-        HBaseKerberosUtils.setPrincipalForTesting(SERVICE_PRINCIPAL + "@" + KDC.getRealm());
-        HBaseKerberosUtils.setSecuredConfiguration(conf);
-        setHdfsSecuredConfiguration(conf);
-        UserGroupInformation.setConfiguration(conf);
-        conf.setInt(HConstants.MASTER_PORT, 0);
-        conf.setInt(HConstants.MASTER_INFO_PORT, 0);
-        conf.setInt(HConstants.REGIONSERVER_PORT, 0);
-        conf.setInt(HConstants.REGIONSERVER_INFO_PORT, 0);
-        conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
-            TokenProvider.class.getName());
-
-        // Secure Phoenix setup
-        conf.set("phoenix.queryserver.kerberos.principal", SPNEGO_PRINCIPAL);
-        conf.set("phoenix.queryserver.keytab.file", KEYTAB.getAbsolutePath());
-        conf.setBoolean(QueryServices.QUERY_SERVER_DISABLE_KERBEROS_LOGIN, true);
-        conf.setInt(QueryServices.QUERY_SERVER_HTTP_PORT_ATTRIB, 0);
-        // Required so that PQS can impersonate the end-users to HBase
-        conf.set("hadoop.proxyuser.HTTP.groups", "*");
-        conf.set("hadoop.proxyuser.HTTP.hosts", "*");
-
-        // Clear the cached singletons so we can inject our own.
-        InstanceResolver.clearSingletons();
-        // Make sure the ConnectionInfo doesn't try to pull a default Configuration
-        InstanceResolver.getSingleton(ConfigurationFactory.class, new ConfigurationFactory() {
-            @Override
-            public Configuration getConfiguration() {
-                return conf;
-            }
-            @Override
-            public Configuration getConfiguration(Configuration confToClone) {
-                Configuration copy = new Configuration(conf);
-                copy.addResource(confToClone);
-                return copy;
-            }
-        });
-        updateDefaultRealm();
-
-        // Start HDFS
-        UTIL.startMiniDFSCluster(1);
-        // Use LocalHBaseCluster to avoid HBaseTestingUtility from doing something wrong
-        // NB. I'm not actually sure what HTU does incorrect, but this was pulled from some test
-        //     classes in HBase itself. I couldn't get HTU to work myself (2017/07/06)
-        Path rootdir = UTIL.getDataTestDirOnTestFS(SecureQueryServerIT.class.getSimpleName());
-        FSUtils.setRootDir(conf, rootdir);
-        HBASE_CLUSTER = new LocalHBaseCluster(conf, 1);
-        HBASE_CLUSTER.startup();
-
-        // Then fork a thread with PQS in it.
-        startQueryServer();
-    }
-
-    private static void startQueryServer() throws Exception {
-        PQS = new QueryServer(new String[0], UTIL.getConfiguration());
-        // Get the SPNEGO ident for PQS to use
-        final UserGroupInformation ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(SPNEGO_PRINCIPAL, KEYTAB.getAbsolutePath());
-        PQS_EXECUTOR = Executors.newSingleThreadExecutor();
-        // Launch PQS, doing in the Kerberos login instead of letting PQS do it itself (which would
-        // break the HBase/HDFS logins also running in the same test case).
-        PQS_EXECUTOR.submit(new Runnable() {
-            @Override public void run() {
-                ugi.doAs(new PrivilegedAction<Void>() {
-                    @Override public Void run() {
-                        PQS.run();
-                        return null;
-                    }
-                });
-            }
-        });
-        PQS.awaitRunning();
-        PQS_PORT = PQS.getPort();
-        PQS_URL = ThinClientUtil.getConnectionUrl("localhost", PQS_PORT) + ";authentication=SPNEGO";
-    }
-
-    @AfterClass
-    public static void stopKdc() throws Exception {
-        // Remove our custom ConfigurationFactory for future tests
-        InstanceResolver.clearSingletons();
-        if (PQS_EXECUTOR != null) {
-            PQS.stop();
-            PQS_EXECUTOR.shutdown();
-            if (!PQS_EXECUTOR.awaitTermination(5, TimeUnit.SECONDS)) {
-                LOG.info("PQS didn't exit in 5 seconds, proceeding anyways.");
-            }
-        }
-        if (HBASE_CLUSTER != null) {
-            HBASE_CLUSTER.shutdown();
-            HBASE_CLUSTER.join();
-        }
-        if (UTIL != null) {
-            UTIL.shutdownMiniZKCluster();
-        }
-        if (KDC != null) {
-            KDC.stop();
-        }
-    }
-
-    @Test
-    public void testBasicReadWrite() throws Exception {
-        final Entry<String,File> user1 = getUser(1);
-        UserGroupInformation user1Ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(user1.getKey(), user1.getValue().getAbsolutePath());
-        user1Ugi.doAs(new PrivilegedExceptionAction<Void>() {
-            @Override public Void run() throws Exception {
-                // Phoenix
-                final String tableName = "phx_table1";
-                try (java.sql.Connection conn = DriverManager.getConnection(PQS_URL);
-                        Statement stmt = conn.createStatement()) {
-                    conn.setAutoCommit(true);
-                    assertFalse(stmt.execute("CREATE TABLE " + tableName + "(pk integer not null primary key)"));
-                    final int numRows = 5;
-                    for (int i = 0; i < numRows; i++) {
-                      assertEquals(1, stmt.executeUpdate("UPSERT INTO " + tableName + " values(" + i + ")"));
-                    }
-
-                    try (ResultSet rs = stmt.executeQuery("SELECT * FROM " + tableName)) {
-                        for (int i = 0; i < numRows; i++) {
-                            assertTrue(rs.next());
-                            assertEquals(i, rs.getInt(1));
-                        }
-                        assertFalse(rs.next());
-                    }
-                }
-                return null;
-            }
-        });
-    }
-
-    byte[] copyBytes(byte[] src, int offset, int length) {
-        byte[] dest = new byte[length];
-        System.arraycopy(src, offset, dest, 0, length);
-        return dest;
-    }
-}

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c5ae3477/phoenix-queryserver/src/it/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/phoenix-queryserver/src/it/resources/log4j.properties b/phoenix-queryserver/src/it/resources/log4j.properties
index f90cf16..6b1ce50 100644
--- a/phoenix-queryserver/src/it/resources/log4j.properties
+++ b/phoenix-queryserver/src/it/resources/log4j.properties
@@ -58,11 +58,6 @@ log4j.appender.console.layout.ConversionPattern=%d %-5p [%t] %C{2}(%L): %m%n
 
 #log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
 
+log4j.logger.org.apache.hadoop=WARN
 log4j.logger.org.apache.zookeeper=ERROR
-
-# Suppresses junk from minikdc
-log4j.logger.org.mortbay.log=WARN
-log4j.logger.org.apache.directory=WARN
-log4j.logger.net.sf.ehcache=WARN
-# Suppress the "no group for user" spamming
-log4j.logger.org.apache.hadoop.security.UserGroupInformation=ERROR
+log4j.logger.org.apache.hadoop.hbase=DEBUG

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c5ae3477/phoenix-queryserver/src/main/java/org/apache/phoenix/queryserver/server/QueryServer.java
----------------------------------------------------------------------
diff --git a/phoenix-queryserver/src/main/java/org/apache/phoenix/queryserver/server/QueryServer.java b/phoenix-queryserver/src/main/java/org/apache/phoenix/queryserver/server/QueryServer.java
index 86aa686..60d3f86 100644
--- a/phoenix-queryserver/src/main/java/org/apache/phoenix/queryserver/server/QueryServer.java
+++ b/phoenix-queryserver/src/main/java/org/apache/phoenix/queryserver/server/QueryServer.java
@@ -28,10 +28,6 @@ import org.apache.calcite.avatica.remote.LocalService;
 import org.apache.calcite.avatica.remote.Service;
 import org.apache.calcite.avatica.server.DoAsRemoteUserCallback;
 import org.apache.calcite.avatica.server.HttpServer;
-import org.apache.calcite.avatica.server.RemoteUserExtractor;
-import org.apache.calcite.avatica.server.RemoteUserExtractionException;
-import org.apache.calcite.avatica.server.HttpRequestRemoteUserExtractor;
-import org.apache.calcite.avatica.server.HttpQueryStringParameterRemoteUserExtractor;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -42,7 +38,6 @@ import org.apache.hadoop.net.DNS;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
@@ -63,8 +58,6 @@ import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
 
-import javax.servlet.http.HttpServletRequest;
-
 /**
  * A query server for Phoenix over Calcite's Avatica.
  */
@@ -182,11 +175,10 @@ public final class QueryServer extends Configured implements Tool, Runnable {
           QueryServices.QUERY_SERVER_HBASE_SECURITY_CONF_ATTRIB));
       final boolean disableSpnego = getConf().getBoolean(QueryServices.QUERY_SERVER_SPNEGO_AUTH_DISABLED_ATTRIB,
               QueryServicesOptions.DEFAULT_QUERY_SERVER_SPNEGO_AUTH_DISABLED);
-      final boolean disableLogin = getConf().getBoolean(QueryServices.QUERY_SERVER_DISABLE_KERBEROS_LOGIN,
-              QueryServicesOptions.DEFAULT_QUERY_SERVER_DISABLE_KERBEROS_LOGIN);
+
 
       // handle secure cluster credentials
-      if (isKerberos && !disableSpnego && !disableLogin) {
+      if (isKerberos && !disableSpnego) {
         String hostname = Strings.domainNamePointerToHostName(DNS.getDefaultHost(
             getConf().get(QueryServices.QUERY_SERVER_DNS_INTERFACE_ATTRIB, "default"),
             getConf().get(QueryServices.QUERY_SERVER_DNS_NAMESERVER_ATTRIB, "default")));
@@ -218,12 +210,7 @@ public final class QueryServer extends Configured implements Tool, Runnable {
 
       // Enable SPNEGO and Impersonation when using Kerberos
       if (isKerberos) {
-        UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
-        LOG.debug("Current user is " + ugi);
-        if (!ugi.hasKerberosCredentials()) {
-          ugi = UserGroupInformation.getLoginUser();
-          LOG.debug("Current user does not have Kerberos credentials, using instead " + ugi);
-        }
+        UserGroupInformation ugi = UserGroupInformation.getLoginUser();
 
         // Make sure the proxyuser configuration is up to date
         ProxyUsers.refreshSuperUserGroupsConfiguration(getConf());
@@ -241,9 +228,7 @@ public final class QueryServer extends Configured implements Tool, Runnable {
         builder.withSpnego(ugi.getUserName(), additionalAllowedRealms)
             .withAutomaticLogin(keytab)
             .withImpersonation(new PhoenixDoAsCallback(ugi, getConf()));
-
       }
-      setRemoteUserExtractorIfNecessary(builder, getConf());
 
       // Build and start the HttpServer
       server = builder.build();
@@ -258,10 +243,6 @@ public final class QueryServer extends Configured implements Tool, Runnable {
     }
   }
 
-  public synchronized void stop() {
-    server.stop();
-  }
-
   /**
    * Parses the serialization method from the configuration.
    *
@@ -292,56 +273,6 @@ public final class QueryServer extends Configured implements Tool, Runnable {
     }
   }
 
-  // add remoteUserExtractor to builder if enabled
-  @VisibleForTesting
-  public void setRemoteUserExtractorIfNecessary(HttpServer.Builder builder, Configuration conf) {
-    if (conf.getBoolean(QueryServices.QUERY_SERVER_WITH_REMOTEUSEREXTRACTOR_ATTRIB,
-            QueryServicesOptions.DEFAULT_QUERY_SERVER_WITH_REMOTEUSEREXTRACTOR)) {
-      builder.withRemoteUserExtractor(new PhoenixRemoteUserExtractor(conf));
-    }
-  }
-
-  /**
-   * Use the correctly way to extract end user.
-   */
-
-  static class PhoenixRemoteUserExtractor implements RemoteUserExtractor{
-    private final HttpQueryStringParameterRemoteUserExtractor paramRemoteUserExtractor;
-    private final HttpRequestRemoteUserExtractor requestRemoteUserExtractor;
-    private final String userExtractParam;
-
-    public PhoenixRemoteUserExtractor(Configuration conf) {
-      this.requestRemoteUserExtractor = new HttpRequestRemoteUserExtractor();
-      this.userExtractParam = conf.get(QueryServices.QUERY_SERVER_REMOTEUSEREXTRACTOR_PARAM,
-              QueryServicesOptions.DEFAULT_QUERY_SERVER_REMOTEUSEREXTRACTOR_PARAM);
-      this.paramRemoteUserExtractor = new HttpQueryStringParameterRemoteUserExtractor(userExtractParam);
-    }
-
-    @Override
-    public String extract(HttpServletRequest request) throws RemoteUserExtractionException {
-      if (request.getParameter(userExtractParam) != null) {
-        String extractedUser = paramRemoteUserExtractor.extract(request);
-        UserGroupInformation ugi = UserGroupInformation.createRemoteUser(request.getRemoteUser());
-        UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(extractedUser, ugi);
-
-        // Check if this user is allowed to be impersonated.
-        // Will throw AuthorizationException if the impersonation as this user is not allowed
-        try {
-          ProxyUsers.authorize(proxyUser, request.getRemoteAddr());
-          return extractedUser;
-        } catch (AuthorizationException e) {
-          throw new RemoteUserExtractionException(e.getMessage(), e);
-        }
-      } else {
-        if (LOG.isDebugEnabled()) {
-          LOG.debug("The parameter (" + userExtractParam + ") used to extract the remote user doesn't exist in the request.");
-        }
-        return requestRemoteUserExtractor.extract(request);
-      }
-
-    }
-  }
-
   /**
    * Callback to run the Avatica server action as the remote (proxy) user instead of the server.
    */

http://git-wip-us.apache.org/repos/asf/phoenix/blob/c5ae3477/phoenix-queryserver/src/test/java/org/apache/phoenix/queryserver/server/PhoenixRemoteUserExtractorTest.java
----------------------------------------------------------------------
diff --git a/phoenix-queryserver/src/test/java/org/apache/phoenix/queryserver/server/PhoenixRemoteUserExtractorTest.java b/phoenix-queryserver/src/test/java/org/apache/phoenix/queryserver/server/PhoenixRemoteUserExtractorTest.java
deleted file mode 100644
index 9351989..0000000
--- a/phoenix-queryserver/src/test/java/org/apache/phoenix/queryserver/server/PhoenixRemoteUserExtractorTest.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to you under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.phoenix.queryserver.server;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotEquals;
-import static org.mockito.Mockito.any;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-import org.apache.calcite.avatica.server.HttpServer;
-import org.apache.calcite.avatica.server.RemoteUserExtractionException;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authorize.AuthorizationException;
-import org.apache.hadoop.security.authorize.ProxyUsers;
-import org.apache.phoenix.queryserver.server.QueryServer.PhoenixRemoteUserExtractor;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.servlet.http.HttpServletRequest;
-
-/**
- * Tests for the RemoteUserExtractor Method Avatica provides for Phoenix to implement.
- */
-public class PhoenixRemoteUserExtractorTest {
-  private static final Logger LOG = LoggerFactory.getLogger(PhoenixRemoteUserExtractorTest.class);
-
-  @Test
-  public void testWithRemoteUserExtractorSuccess() {
-    HttpServletRequest request = mock(HttpServletRequest.class);
-    when(request.getRemoteUser()).thenReturn("proxyserver");
-    when(request.getParameter("doAs")).thenReturn("enduser");
-    when(request.getRemoteAddr()).thenReturn("localhost:1234");
-
-    Configuration conf = new Configuration(false);
-    conf.set("hadoop.proxyuser.proxyserver.groups", "*");
-    conf.set("hadoop.proxyuser.proxyserver.hosts", "*");
-    conf.set("phoenix.queryserver.withRemoteUserExtractor", "true");
-    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
-
-    PhoenixRemoteUserExtractor extractor = new PhoenixRemoteUserExtractor(conf);
-    try {
-      assertEquals("enduser", extractor.extract(request));
-    } catch (RemoteUserExtractionException e) {
-      LOG.info(e.getMessage());
-    }
-  }
-
-  @Test
-  public void testNoRemoteUserExtractorParam() {
-    HttpServletRequest request = mock(HttpServletRequest.class);
-    when(request.getRemoteUser()).thenReturn("proxyserver");
-    when(request.getRemoteAddr()).thenReturn("localhost:1234");
-
-    Configuration conf = new Configuration(false);
-    conf.set("hadoop.proxyuser.proxyserver.groups", "*");
-    conf.set("hadoop.proxyuser.proxyserver.hosts", "*");
-    conf.set("phoenix.queryserver.withRemoteUserExtractor", "true");
-    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
-
-    PhoenixRemoteUserExtractor extractor = new PhoenixRemoteUserExtractor(conf);
-    try {
-      assertEquals("proxyserver", extractor.extract(request));
-    } catch (RemoteUserExtractionException e) {
-      LOG.info(e.getMessage());
-    }
-  }
-
-  @Test
-  public void testDoNotUseRemoteUserExtractor() {
-
-    HttpServer.Builder builder = mock(HttpServer.Builder.class);
-    Configuration conf = new Configuration(false);
-    QueryServer queryServer = new QueryServer();
-    queryServer.setRemoteUserExtractorIfNecessary(builder, conf);
-    verify(builder, never()).withRemoteUserExtractor(any(PhoenixRemoteUserExtractor.class));
-  }
-
-  @Test
-  public void testUseRemoteUserExtractor() {
-
-    HttpServer.Builder builder = mock(HttpServer.Builder.class);
-    Configuration conf = new Configuration(false);
-    conf.set("phoenix.queryserver.withRemoteUserExtractor", "true");
-    QueryServer queryServer = new QueryServer();
-    queryServer.setRemoteUserExtractorIfNecessary(builder, conf);
-    verify(builder).withRemoteUserExtractor(any(PhoenixRemoteUserExtractor.class));
-  }
-
-}