You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by st...@apache.org on 2020/11/25 14:23:59 UTC

[phoenix-connectors] branch master updated: PHOENIX-6180 Investigate flaky tests in Phoenix Connectors

This is an automated email from the ASF dual-hosted git repository.

stoty pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix-connectors.git


The following commit(s) were added to refs/heads/master by this push:
     new ad05445  PHOENIX-6180 Investigate flaky tests in Phoenix Connectors
ad05445 is described below

commit ad05445501992766b45d76533332ef3d9257c238
Author: Istvan Toth <st...@apache.org>
AuthorDate: Wed Nov 18 10:01:21 2020 +0100

    PHOENIX-6180 Investigate flaky tests in Phoenix Connectors
    
    make HivePhoenixStoreIT abstract
    start only one HBase minicluster for Hive tests
    do not run hive tests in parallel
    set separate DFS work dir for Hive miniclsuter
    restore github workflows test
---
 .github/workflows/maven.yml                        |  36 +++++
 phoenix-hive-base/phoenix5-hive/pom.xml            |  19 +--
 phoenix-hive-base/pom.xml                          |  35 ++++-
 .../phoenix/hive/BaseHivePhoenixStoreIT.java       |  87 ++++++------
 .../org/apache/phoenix/hive/HiveMapReduceIT.java   |   4 -
 .../apache/phoenix/hive/HivePhoenixStoreIT.java    | 153 ++++++++++++---------
 .../src/test/resources/hbase-site.xml              |  17 +++
 phoenix-kafka-base/pom.xml                         |  35 ++---
 phoenix-spark-base/phoenix5-spark/pom.xml          |   1 -
 phoenix4-connectors-assembly/pom.xml               |  75 ++++++----
 phoenix5-connectors-assembly/pom.xml               |  64 +++++----
 pom.xml                                            |  36 ++---
 12 files changed, 329 insertions(+), 233 deletions(-)

diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml
new file mode 100644
index 0000000..5839864
--- /dev/null
+++ b/.github/workflows/maven.yml
@@ -0,0 +1,36 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+name: phoenix-connectors preCommit Build
+
+on:
+  pull_request:
+
+jobs:
+  build:
+    runs-on: ubuntu-18.04
+    steps:
+    - uses: actions/checkout@v1
+    - name: Set up JDK 1.8
+      uses: actions/setup-java@v1
+      with:
+        java-version: 1.8
+    - name: Build
+      run: mvn -B clean install -DskipTests
+    # The build doesn't seem to pick up the result of the install from above, so just re-compile
+    # and the Maven reactor will find it just fine.
+    - name: Test
+      run: mvn -B verify
diff --git a/phoenix-hive-base/phoenix5-hive/pom.xml b/phoenix-hive-base/phoenix5-hive/pom.xml
index 9167f18..22566e9 100644
--- a/phoenix-hive-base/phoenix5-hive/pom.xml
+++ b/phoenix-hive-base/phoenix5-hive/pom.xml
@@ -34,13 +34,10 @@
 
   <properties>
     <test.tmp.dir>${project.build.directory}/tmp</test.tmp.dir>
-    <netty.version>4.1.47.Final</netty.version>
     <phoenix.version>${phoenix-five.version}</phoenix.version>
     <hbase.version>${hbase-two.version}</hbase.version>
     <hadoop.version>${hadoop-three.version}</hadoop.version>
-    <avatica.version>1.12.0</avatica.version>
     <hive.version>${hive3.version}</hive.version>
-    <jetty.version>9.3.8.v20160314</jetty.version>
     <jdk.version>1.8</jdk.version>
     <phoenix.main.version>5</phoenix.main.version>
   </properties>
@@ -70,12 +67,6 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-mapreduce-client-core</artifactId>
-      <exclusions>
-        <exclusion>
-          <groupId>io.netty</groupId>
-          <artifactId>netty</artifactId>
-        </exclusion>
-      </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.logging.log4j</groupId>
@@ -83,11 +74,11 @@
       <version>2.14.0</version>
       <scope>test</scope>
     </dependency>
-           <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-minicluster</artifactId>
-            <scope>test</scope>
-        </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
   <build>
     <plugins>
diff --git a/phoenix-hive-base/pom.xml b/phoenix-hive-base/pom.xml
index f4066ed..dc1c72f 100644
--- a/phoenix-hive-base/pom.xml
+++ b/phoenix-hive-base/pom.xml
@@ -40,10 +40,7 @@
 
   <properties>
     <test.tmp.dir>${project.build.directory}/tmp</test.tmp.dir>
-    <netty.version>4.1.47.Final</netty.version>
-    <avatica.version>1.8.0</avatica.version>
     <tez.version>0.9.1</tez.version>
-    <jetty.version>8.1.7.v20120910</jetty.version>
     <jdk.version>1.8</jdk.version>
     <commons-lang3.version>3.9</commons-lang3.version>
   </properties>
@@ -247,15 +244,21 @@
               <ignoredUnusedDeclaredDependency>
                 org.apache.tez:tez-dag
               </ignoredUnusedDeclaredDependency>
-              <!-- These are added for phoenix5 only -->
               <ignoredUnusedDeclaredDependency>
                 org.apache.logging.log4j:log4j-core
               </ignoredUnusedDeclaredDependency>
               <ignoredUnusedDeclaredDependency>
                 org.apache.hadoop:hadoop-minicluster
               </ignoredUnusedDeclaredDependency>
+              <ignoredUnusedDeclaredDependency>
+                org.apache.hadoop:hadoop-hdfs
+              </ignoredUnusedDeclaredDependency>
             </ignoredUnusedDeclaredDependencies>
             <ignoredUsedUndeclaredDependencies>
+              <!-- I couldn't find it referenced anywhere in the phoenix-hive codebase -->
+              <ignoredUnusedDeclaredDependency>
+                com.google.code.findbugs:jsr305
+              </ignoredUnusedDeclaredDependency>
               <!-- This one is real, but I don't want to force users to specify both
                 HBase AND Zookeeper versions so we'll just pretend that it's declared properly, 
                 and take whatever version we get -->
@@ -297,6 +300,30 @@
           </executions>
         </plugin>
         <plugin>
+          <artifactId>maven-failsafe-plugin</artifactId>
+          <configuration>
+            <!-- The Hbase + Hive minicluster setup seems very fragile, so
+            we make sure to run everything as serially as possible -->
+            <forkCount>1</forkCount>
+            <reuseForks>false</reuseForks>
+          </configuration>
+          <executions>
+            <execution>
+              <id>ParallelStatsDisabledTest</id>
+              <configuration>
+                <forkCount>1</forkCount>
+                <reuseForks>false</reuseForks>
+                <argLine>-Xmx3000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine>
+                <groups>org.apache.phoenix.end2end.ParallelStatsDisabledTest</groups>
+              </configuration>
+              <goals>
+                <goal>integration-test</goal>
+                <goal>verify</goal>
+              </goals>
+            </execution>
+          </executions>
+        </plugin>
+        <plugin>
           <artifactId>maven-resources-plugin</artifactId>
           <executions>
             <execution>
diff --git a/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java b/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
index 66e016b..caf8872 100644
--- a/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
+++ b/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
@@ -25,60 +25,81 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT;
+import org.apache.phoenix.query.BaseTest;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.QTestProcessExecResult;
+import org.apache.phoenix.end2end.ParallelStatsDisabledTest;
+import org.apache.phoenix.execute.UpsertSelectOverlappingBatchesIT.SlowBatchRegionObserver;
 import org.apache.phoenix.jdbc.PhoenixDriver;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.TestUtil;
 import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.experimental.categories.Category;
 
 import org.apache.phoenix.thirdparty.com.google.common.base.Throwables;
+import org.apache.phoenix.thirdparty.com.google.common.collect.Maps;
 
 import java.io.File;
 import java.io.IOException;
 import java.sql.*;
 import java.util.Properties;
+import java.util.HashMap;
+import java.util.Map;
 
+import javax.annotation.concurrent.NotThreadSafe;
+
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 /**
  * Base class for all Hive Phoenix integration tests that may be run with Tez or MR mini cluster
  */
-public class BaseHivePhoenixStoreIT extends BaseHBaseManagedTimeIT {
+@NotThreadSafe
+@Category(ParallelStatsDisabledTest.class)
+public class BaseHivePhoenixStoreIT extends BaseTest {
 
     private static final Log LOG = LogFactory.getLog(BaseHivePhoenixStoreIT.class);
-    protected static HBaseTestingUtility hbaseTestUtil;
-    protected static MiniHBaseCluster hbaseCluster;
-    private static String zkQuorum;
-    protected static Connection conn;
-    private static Configuration conf;
     protected static HiveTestUtil qt;
     protected static String hiveOutputDir;
     protected static String hiveLogDir;
 
-    public static void setup(HiveTestUtil.MiniClusterType clusterType)throws Exception {
+    public static void setup(HiveTestUtil.MiniClusterType clusterType) throws Exception {
+        System.clearProperty("test.build.data");
+
+        //Setup Hbase minicluster + Phoenix first
+        Map<String, String> serverProps = Maps.newHashMapWithExpectedSize(3);
+        serverProps.put(QueryServices.DROP_METADATA_ATTRIB, Boolean.toString(true));
+        serverProps.put("hive.metastore.schema.verification","false");
+        setUpTestDriver(new ReadOnlyProps(serverProps.entrySet().iterator()));
+
         String hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
         if (null != hadoopConfDir && !hadoopConfDir.isEmpty()) {
           LOG.warn("WARNING: HADOOP_CONF_DIR is set in the environment which may cause "
               + "issues with test execution via MiniDFSCluster");
         }
-        hbaseTestUtil = new HBaseTestingUtility();
-        conf = hbaseTestUtil.getConfiguration();
-        setUpConfigForMiniCluster(conf);
-        conf.set(QueryServices.DROP_METADATA_ATTRIB, Boolean.toString(true));
-        conf.set("hive.metastore.schema.verification","false");
-        hiveOutputDir = new Path(hbaseTestUtil.getDataTestDir(), "hive_output").toString();
+
+        // Setup Hive mini Server
+        hiveOutputDir = new Path(utility.getDataTestDir(), "hive_output").toString();
         File outputDir = new File(hiveOutputDir);
         outputDir.mkdirs();
-        hiveLogDir = new Path(hbaseTestUtil.getDataTestDir(), "hive_log").toString();
+
+        hiveLogDir = new Path(utility.getDataTestDir(), "hive_log").toString();
         File logDir = new File(hiveLogDir);
         logDir.mkdirs();
-        // Setup Hive mini Server
-        Path testRoot = hbaseTestUtil.getDataTestDir();
+
+        Path testRoot = utility.getDataTestDir();
+        String hiveBuildDataDir = new Path(utility.getDataTestDir(), "hive/build/data/").toString();
+        File buildDataDir = new File(hiveBuildDataDir);
+        buildDataDir.mkdirs();
+
+        //Separate data dir for the Hive test cluster's DFS, so that it doesn't nuke HBase's DFS
+        System.setProperty("test.build.data", hiveBuildDataDir.toString());
+
         System.setProperty("test.tmp.dir", testRoot.toString());
         System.setProperty("test.warehouse.dir", (new Path(testRoot, "warehouse")).toString());
         System.setProperty(HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString(), "false");
@@ -92,19 +113,12 @@ public class BaseHivePhoenixStoreIT extends BaseHBaseManagedTimeIT {
             fail("Unexpected exception in setup"+Throwables.getStackTraceAsString(e));
         }
 
-        //Start HBase cluster
-        hbaseCluster = hbaseTestUtil.startMiniCluster(1);
-        MiniDFSCluster x = hbaseTestUtil.getDFSCluster();
-        Class.forName(PhoenixDriver.class.getName());
-        zkQuorum = "localhost:" + hbaseTestUtil.getZkCluster().getClientPort();
-        Properties props = PropertiesUtil.deepCopy(TestUtil.TEST_PROPERTIES);
-        props.put(QueryServices.DROP_METADATA_ATTRIB, Boolean.toString(true));
-        conn = DriverManager.getConnection(PhoenixRuntime.JDBC_PROTOCOL +
-                PhoenixRuntime.JDBC_PROTOCOL_SEPARATOR + zkQuorum, props);
-        // Setup Hive Output Folder
-
-        Statement stmt = conn.createStatement();
-        stmt.execute("create table t(a integer primary key,b varchar)");
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        try (Connection conn = DriverManager.getConnection(getUrl(), props);
+                Statement stmt = conn.createStatement()) {
+            stmt.execute("create table t(a integer primary key,b varchar)");
+        }
+
     }
 
     protected void runTest(String fname, String fpath) throws Exception {
@@ -147,19 +161,6 @@ public class BaseHivePhoenixStoreIT extends BaseHBaseManagedTimeIT {
 
     @AfterClass
     public static synchronized void tearDownAfterClass() throws Exception {
-        try {
-            conn.close();
-        } finally {
-            try {
-                PhoenixDriver.INSTANCE.close();
-            } finally {
-                try {
-                    DriverManager.deregisterDriver(PhoenixDriver.INSTANCE);
-                } finally {
-                    hbaseTestUtil.shutdownMiniCluster();
-                }
-            }
-        }
         // Shutdowns down the filesystem -- do this after stopping HBase.
         if (qt != null) {
           try {
diff --git a/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java b/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
index 7b6fbbb..f10c138 100644
--- a/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
+++ b/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HiveMapReduceIT.java
@@ -28,10 +28,6 @@ public class HiveMapReduceIT extends HivePhoenixStoreIT {
 
     @BeforeClass
     public static void setUpBeforeClass() throws Exception {
-        final String hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
-        if (hadoopConfDir != null && hadoopConfDir.length() != 0) {
-            fail("HADOOP_CONF_DIR is non-empty in the current shell environment which will very likely cause this test to fail.");
-        }
         setup(HiveTestUtil.MiniClusterType.mr);
     }
 
diff --git a/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java b/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
index dea79d6..bdd10e5 100644
--- a/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
+++ b/phoenix-hive-base/src/it/java/org/apache/phoenix/hive/HivePhoenixStoreIT.java
@@ -18,20 +18,25 @@
 package org.apache.phoenix.hive;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.phoenix.util.PropertiesUtil;
 import org.apache.phoenix.util.StringUtil;
 import org.junit.Ignore;
 import org.junit.Test;
 
+import java.sql.Connection;
+import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
+import java.util.Properties;
 
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
 import static org.junit.Assert.assertTrue;
 
 /**
  * Test methods only. All supporting methods should be placed to BaseHivePhoenixStoreIT
  */
 @Ignore("This class contains only test methods and should not be executed directly")
-public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
+public abstract class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
 
     /**
      * Create a table with two column, insert 1 row, check that phoenix table is created and
@@ -42,7 +47,7 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
     @Test
     public void simpleTest() throws Exception {
         String testName = "simpleTest";
-        hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
+        utility.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
         createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + ".out").toString());
         createFile(StringUtil.EMPTY_STRING, new Path(hiveOutputDir, testName + ".out").toString());
         StringBuilder sb = new StringBuilder();
@@ -53,21 +58,25 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
                 "   'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.client.port'='" +
-                hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
+                utility.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
                 "   'phoenix.rowkeys'='id');");
         sb.append("INSERT INTO TABLE phoenix_table" + HiveTestUtil.CRLF +
                 "VALUES ('10', '1000');" + HiveTestUtil.CRLF);
-        String fullPath = new Path(hbaseTestUtil.getDataTestDir(), testName).toString();
+        String fullPath = new Path(utility.getDataTestDir(), testName).toString();
         createFile(sb.toString(), fullPath);
         runTest(testName, fullPath);
 
         String phoenixQuery = "SELECT * FROM phoenix_table";
-        PreparedStatement statement = conn.prepareStatement(phoenixQuery);
-        ResultSet rs = statement.executeQuery();
-        assert (rs.getMetaData().getColumnCount() == 2);
-        assertTrue(rs.next());
-        assert (rs.getString(1).equals("10"));
-        assert (rs.getString(2).equals("1000"));
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        try (Connection conn = DriverManager.getConnection(getUrl(), props);
+                PreparedStatement statement = conn.prepareStatement(phoenixQuery)) {
+            conn.setAutoCommit(true);
+            ResultSet rs = statement.executeQuery();
+            assert (rs.getMetaData().getColumnCount() == 2);
+            assertTrue(rs.next());
+            assert (rs.getString(1).equals("10"));
+            assert (rs.getString(2).equals("1000"));
+        }
     }
 
     /**
@@ -78,7 +87,7 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
     @Test
     public void simpleColumnMapTest() throws Exception {
         String testName = "cmTest";
-        hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
+        utility.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
         createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + ".out").toString());
         createFile(StringUtil.EMPTY_STRING, new Path(hiveOutputDir, testName + ".out").toString());
         StringBuilder sb = new StringBuilder();
@@ -90,23 +99,27 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
                 "   'phoenix.column.mapping' = 'id:C1, p1:c2, p2:C3'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.client.port'='" +
-                hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
+                utility.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
                 "   'phoenix.rowkeys'='id');");
         sb.append("INSERT INTO TABLE column_table" + HiveTestUtil.CRLF +
                 "VALUES ('1', '2', '3');" + HiveTestUtil.CRLF);
-        String fullPath = new Path(hbaseTestUtil.getDataTestDir(), testName).toString();
+        String fullPath = new Path(utility.getDataTestDir(), testName).toString();
         createFile(sb.toString(), fullPath);
         runTest(testName, fullPath);
 
         String phoenixQuery = "SELECT C1, \"c2\", C3 FROM column_table";
-        PreparedStatement statement = conn.prepareStatement(phoenixQuery);
-        ResultSet rs = statement.executeQuery();
-        assert (rs.getMetaData().getColumnCount() == 3);
-        assertTrue(rs.next());
-        assert (rs.getString(1).equals("1"));
-        assert (rs.getString(2).equals("2"));
-        assert (rs.getString(3).equals("3"));
-
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        try (Connection conn = DriverManager.getConnection(getUrl(), props);
+                PreparedStatement statement = conn.prepareStatement(phoenixQuery)) {
+            conn.setAutoCommit(true);
+        
+            ResultSet rs = statement.executeQuery();
+            assert (rs.getMetaData().getColumnCount() == 3);
+            assertTrue(rs.next());
+            assert (rs.getString(1).equals("1"));
+            assert (rs.getString(2).equals("2"));
+            assert (rs.getString(3).equals("3"));
+        }
     }
 
 
@@ -118,7 +131,7 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
     @Test
     public void dataTypeTest() throws Exception {
         String testName = "dataTypeTest";
-        hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
+        utility.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
         createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + ".out").toString());
         createFile(StringUtil.EMPTY_STRING, new Path(hiveOutputDir, testName + ".out").toString());
         StringBuilder sb = new StringBuilder();
@@ -130,24 +143,28 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
                 "   'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.client.port'='" +
-                hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
+                utility.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
                 "   'phoenix.rowkeys'='id');");
         sb.append("INSERT INTO TABLE phoenix_datatype" + HiveTestUtil.CRLF +
                 "VALUES (10, \"foodesc\", \"2013-01-05 01:01:01\", 200,2.0,-1);" + HiveTestUtil.CRLF);
-        String fullPath = new Path(hbaseTestUtil.getDataTestDir(), testName).toString();
+        String fullPath = new Path(utility.getDataTestDir(), testName).toString();
         createFile(sb.toString(), fullPath);
         runTest(testName, fullPath);
 
         String phoenixQuery = "SELECT * FROM phoenix_datatype";
-        PreparedStatement statement = conn.prepareStatement(phoenixQuery);
-        ResultSet rs = statement.executeQuery();
-        assert (rs.getMetaData().getColumnCount() == 6);
-        while (rs.next()) {
-            assert (rs.getInt(1) == 10);
-            assert (rs.getString(2).equalsIgnoreCase("foodesc"));
-            assert (rs.getDouble(4) == 200);
-            assert (rs.getFloat(5) == 2.0);
-            assert (rs.getInt(6) == -1);
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        try (Connection conn = DriverManager.getConnection(getUrl(), props);
+                PreparedStatement statement = conn.prepareStatement(phoenixQuery)) {
+            conn.setAutoCommit(true);
+            ResultSet rs = statement.executeQuery();
+            assert (rs.getMetaData().getColumnCount() == 6);
+            while (rs.next()) {
+                assert (rs.getInt(1) == 10);
+                assert (rs.getString(2).equalsIgnoreCase("foodesc"));
+                assert (rs.getDouble(4) == 200);
+                assert (rs.getFloat(5) == 2.0);
+                assert (rs.getInt(6) == -1);
+            }
         }
     }
 
@@ -159,7 +176,7 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
     @Test
     public void MultiKey() throws Exception {
         String testName = "MultiKey";
-        hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
+        utility.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
         createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + ".out").toString());
         createFile(StringUtil.EMPTY_STRING, new Path(hiveOutputDir, testName + ".out").toString());
         StringBuilder sb = new StringBuilder();
@@ -172,25 +189,29 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
                 "   'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.client.port'='" +
-                hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
+                utility.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
                 "   'phoenix.rowkeys'='id,id2');" + HiveTestUtil.CRLF);
         sb.append("INSERT INTO TABLE phoenix_MultiKey" + HiveTestUtil.CRLF +"VALUES (10, \'part2\',\'foodesc\',200,2.0,-1);" +
                 HiveTestUtil.CRLF);
-        String fullPath = new Path(hbaseTestUtil.getDataTestDir(), testName).toString();
+        String fullPath = new Path(utility.getDataTestDir(), testName).toString();
         createFile(sb.toString(), fullPath);
         runTest(testName, fullPath);
 
         String phoenixQuery = "SELECT * FROM phoenix_MultiKey";
-        PreparedStatement statement = conn.prepareStatement(phoenixQuery);
-        ResultSet rs = statement.executeQuery();
-        assert (rs.getMetaData().getColumnCount() == 6);
-        while (rs.next()) {
-            assert (rs.getInt(1) == 10);
-            assert (rs.getString(2).equalsIgnoreCase("part2"));
-            assert (rs.getString(3).equalsIgnoreCase("foodesc"));
-            assert (rs.getDouble(4) == 200);
-            assert (rs.getFloat(5) == 2.0);
-            assert (rs.getInt(6) == -1);
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        try (Connection conn = DriverManager.getConnection(getUrl(), props);
+                PreparedStatement statement = conn.prepareStatement(phoenixQuery)) {
+            conn.setAutoCommit(true);
+            ResultSet rs = statement.executeQuery();
+            assert (rs.getMetaData().getColumnCount() == 6);
+            while (rs.next()) {
+                assert (rs.getInt(1) == 10);
+                assert (rs.getString(2).equalsIgnoreCase("part2"));
+                assert (rs.getString(3).equalsIgnoreCase("foodesc"));
+                assert (rs.getDouble(4) == 200);
+                assert (rs.getFloat(5) == 2.0);
+                assert (rs.getInt(6) == -1);
+            }
         }
     }
 
@@ -202,7 +223,7 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
     @Test
     public void testJoinNoColumnMaps() throws Exception {
         String testName = "testJoin";
-        hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
+        utility.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
         createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + ".out").toString());
         createFile("#### A masked pattern was here ####\n10\tpart2\tfoodesc\t200.0\t2.0\t-1\t10\tpart2\tfoodesc\t200.0\t2.0\t-1\n",
                 new Path(hiveOutputDir, testName + ".out").toString());
@@ -216,7 +237,7 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
                 "   'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.client.port'='" +
-                hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
+                utility.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
                 "   'phoenix.rowkeys'='id,id2');" + HiveTestUtil.CRLF);
         sb.append("CREATE EXTERNAL TABLE joinTable2(ID int, ID2 String,description STRING," +
                 "db DOUBLE,fl FLOAT, us INT)" + HiveTestUtil.CRLF +
@@ -227,7 +248,7 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
                 "   'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.client.port'='" +
-                hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
+                utility.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
                 "   'phoenix.rowkeys'='id,id2');" + HiveTestUtil.CRLF);
 
         sb.append("INSERT INTO TABLE joinTable1" + HiveTestUtil.CRLF +"VALUES (5, \'part2\',\'foodesc\',200,2.0,-1);" + HiveTestUtil.CRLF);
@@ -239,7 +260,7 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
         sb.append("SELECT  * from joinTable1 A join joinTable2 B on A.id = B.id WHERE A.ID=10;" +
                 HiveTestUtil.CRLF);
 
-        String fullPath = new Path(hbaseTestUtil.getDataTestDir(), testName).toString();
+        String fullPath = new Path(utility.getDataTestDir(), testName).toString();
         createFile(sb.toString(), fullPath);
         runTest(testName, fullPath);
     }
@@ -252,7 +273,7 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
     @Test
     public void testJoinColumnMaps() throws Exception {
         String testName = "testJoin";
-        hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
+        utility.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
         createFile("#### A masked pattern was here ####\n10\t200.0\tpart2\n", new Path(hiveOutputDir, testName + ".out").toString());
         createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + ".out").toString());
 
@@ -266,7 +287,7 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
                 "   'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.client.port'='" +
-                hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
+                utility.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
                 "   'phoenix.column.mapping' = 'id:i1, id2:I2, db:db'," + HiveTestUtil.CRLF +
                 "   'phoenix.rowkeys'='id,id2');" + HiveTestUtil.CRLF);
         sb.append("CREATE EXTERNAL TABLE joinTable4(ID int, ID2 String,description STRING," +
@@ -278,7 +299,7 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
                 "   'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.client.port'='" +
-                hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
+                utility.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
                 "   'phoenix.column.mapping' = 'id:i1, id2:I2, db:db'," + HiveTestUtil.CRLF +
                 "   'phoenix.rowkeys'='id,id2');" + HiveTestUtil.CRLF);
 
@@ -291,26 +312,30 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
         sb.append("SELECT A.ID, a.db, B.ID2 from joinTable3 A join joinTable4 B on A.ID = B.ID WHERE A.ID=10;" +
                 HiveTestUtil.CRLF);
 
-        String fullPath = new Path(hbaseTestUtil.getDataTestDir(), testName).toString();
+        String fullPath = new Path(utility.getDataTestDir(), testName).toString();
         createFile(sb.toString(), fullPath);
         runTest(testName, fullPath);
         //Test that Phoenix has correctly mapped columns. We are checking both, primary key and
         // regular columns mapped and not mapped
         String phoenixQuery = "SELECT \"i1\", \"I2\", \"db\" FROM joinTable3 where \"i1\" = 10 AND \"I2\" = 'part1' AND \"db\" = 200";
-        PreparedStatement statement = conn.prepareStatement(phoenixQuery);
-        ResultSet rs = statement.executeQuery();
-        assert (rs.getMetaData().getColumnCount() == 3);
-        while (rs.next()) {
-            assert (rs.getInt(1) == 10);
-            assert (rs.getString(2).equalsIgnoreCase("part1"));
-            assert (rs.getDouble(3) == 200);
+        Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+        try (Connection conn = DriverManager.getConnection(getUrl(), props);
+                PreparedStatement statement = conn.prepareStatement(phoenixQuery)) {
+            conn.setAutoCommit(true);
+            ResultSet rs = statement.executeQuery();
+            assert (rs.getMetaData().getColumnCount() == 3);
+            while (rs.next()) {
+                assert (rs.getInt(1) == 10);
+                assert (rs.getString(2).equalsIgnoreCase("part1"));
+                assert (rs.getDouble(3) == 200);
+            }
         }
     }
 
     @Test
     public void testTimestampPredicate() throws Exception {
         String testName = "testTimeStampPredicate";
-        hbaseTestUtil.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
+        utility.getTestFileSystem().createNewFile(new Path(hiveLogDir, testName + ".out"));
         createFile("10\t2013-01-02 01:01:01.123456\n", new Path(hiveOutputDir, testName + ".out").toString());
         createFile(StringUtil.EMPTY_STRING, new Path(hiveLogDir, testName + ".out").toString());
 
@@ -323,7 +348,7 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
                 "   'phoenix.zookeeper.znode.parent'='/hbase'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.quorum'='localhost'," + HiveTestUtil.CRLF +
                 "   'phoenix.zookeeper.client.port'='" +
-                hbaseTestUtil.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
+                utility.getZkCluster().getClientPort() + "'," + HiveTestUtil.CRLF +
                 "   'phoenix.column.mapping' = 'id:ID, ts:TS'," + HiveTestUtil.CRLF +
                 "   'phoenix.rowkeys'='id');" + HiveTestUtil.CRLF);
         /*
@@ -333,7 +358,7 @@ public class HivePhoenixStoreIT extends BaseHivePhoenixStoreIT {
         sb.append("SELECT * from timeStampTable WHERE ts between '2012-01-02 01:01:01.123455' and " +
                 " '2015-01-02 12:01:02.123457789' AND id = 10;" + HiveTestUtil.CRLF);
 
-        String fullPath = new Path(hbaseTestUtil.getDataTestDir(), testName).toString();
+        String fullPath = new Path(utility.getDataTestDir(), testName).toString();
         createFile(sb.toString(), fullPath);
         runTest(testName, fullPath);
     }
diff --git a/phoenix-hive-base/src/test/resources/hbase-site.xml b/phoenix-hive-base/src/test/resources/hbase-site.xml
index d185eb7..8ebb99e 100644
--- a/phoenix-hive-base/src/test/resources/hbase-site.xml
+++ b/phoenix-hive-base/src/test/resources/hbase-site.xml
@@ -1,3 +1,20 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
 <configuration>
   <property>
     <name>hbase.wal.provider</name>
diff --git a/phoenix-kafka-base/pom.xml b/phoenix-kafka-base/pom.xml
index 8282506..7b61e60 100644
--- a/phoenix-kafka-base/pom.xml
+++ b/phoenix-kafka-base/pom.xml
@@ -87,7 +87,7 @@
       <groupId>com.101tec</groupId>
       <artifactId>zkclient</artifactId>
       <version>${com-101tek-zkclient.version}</version>
-    </dependency>    
+    </dependency>
     <!-- Test dependencies -->
     <dependency>
       <groupId>org.apache.phoenix</groupId>
@@ -203,27 +203,18 @@
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-shade-plugin</artifactId>
-          <executions>
-            <execution>
-              <phase>package</phase>
-              <goals>
-                <goal>shade</goal>
-              </goals>
-              <configuration>
-                <finalName>phoenix${phoenix.main.version}-kafka-${project.version}-minimal</finalName>
-                <shadedArtifactAttached>false</shadedArtifactAttached>
-                <promoteTransitiveDependencies>true</promoteTransitiveDependencies>
-                <shadeTestJar>false</shadeTestJar>
-                <artifactSet>
-                  <includes>
-                    <include>org.apache.phoenix:phoenix4-kafka</include>
-                    <include>org.apache.kafka:kafka-clients</include>
-                    <include>org.apache.phoenix:phoenix4-flume</include>
-                  </includes>
-                </artifactSet>
-              </configuration>
-            </execution>
-          </executions>
+          <configuration>
+            <finalName>phoenix${phoenix.main.version}-kafka-${project.version}-minimal</finalName>
+            <promoteTransitiveDependencies>true</promoteTransitiveDependencies>
+            <shadeTestJar>false</shadeTestJar>
+            <artifactSet>
+              <includes>
+                <include>org.apache.phoenix:phoenix${phoenix.main.version}-kafka</include>
+                <include>org.apache.kafka:kafka-clients</include>
+                <include>org.apache.phoenix:phoenix${phoenix.main.version}-flume</include>
+              </includes>
+            </artifactSet>
+          </configuration>
         </plugin>
       </plugins>
     </pluginManagement>
diff --git a/phoenix-spark-base/phoenix5-spark/pom.xml b/phoenix-spark-base/phoenix5-spark/pom.xml
index faa8bc8..3fd1a9e 100644
--- a/phoenix-spark-base/phoenix5-spark/pom.xml
+++ b/phoenix-spark-base/phoenix5-spark/pom.xml
@@ -38,7 +38,6 @@
     <hbase.version>${hbase-two.version}</hbase.version>
     <hadoop.version>${hadoop-three.version}</hadoop.version>
     <jdk.version>1.8</jdk.version>
-    <jetty.version>9.3.19.v20170502</jetty.version>
     <fasterxml.jackson.version>2.10.0</fasterxml.jackson.version>
     <codehaus.jackson.version>1.9.13</codehaus.jackson.version>
     <phoenix.main.version>5</phoenix.main.version>
diff --git a/phoenix4-connectors-assembly/pom.xml b/phoenix4-connectors-assembly/pom.xml
index 5388849..4da2d8b 100644
--- a/phoenix4-connectors-assembly/pom.xml
+++ b/phoenix4-connectors-assembly/pom.xml
@@ -32,36 +32,38 @@
   <packaging>pom</packaging>
   <name>Phoenix 4 Connectors Distribution Assembly</name>
 
+ <dependencies>
+    <dependency>
+      <groupId>org.apache.phoenix</groupId>
+      <artifactId>phoenix4-flume</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.phoenix</groupId>
+      <artifactId>phoenix4-hive</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.phoenix</groupId>
+      <artifactId>phoenix4-kafka</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.phoenix</groupId>
+      <artifactId>phoenix4-pig</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.phoenix</groupId>
+      <artifactId>phoenix4-spark</artifactId>
+    </dependency>
+  </dependencies>
+
   <build>
     <plugins>
       <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>package-phoenix4-to-tar</id>
-            <phase>package</phase>
-            <goals>
-              <goal>single</goal>
-            </goals>
-            <configuration>
-              <skipAssembly>${skip-phoenix4}</skipAssembly>
-              <descriptors>
-                <descriptor>src/build/package-phoenix4-connectors-to-tar-all.xml</descriptor>
-              </descriptors>
-              <finalName>phoenix4-connectors-${project.version}</finalName>
-              <tarLongFileMode>posix</tarLongFileMode>
-              <appendAssemblyId>false</appendAssemblyId>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
         <artifactId>exec-maven-plugin</artifactId>
         <groupId>org.codehaus.mojo</groupId>
         <executions>
           <execution>
             <id>flume without version</id>
-            <phase>compile</phase>
+            <phase>package</phase>
             <goals>
               <goal>exec</goal>
             </goals>
@@ -81,7 +83,7 @@
           </execution>
           <execution>
             <id>hive without version</id>
-            <phase>compile</phase>
+            <phase>package</phase>
             <goals>
               <goal>exec</goal>
             </goals>
@@ -101,7 +103,7 @@
           </execution>
           <execution>
             <id>pig without version</id>
-            <phase>compile</phase>
+            <phase>package</phase>
             <goals>
               <goal>exec</goal>
             </goals>
@@ -121,7 +123,7 @@
           </execution>
           <execution>
             <id>spark without version</id>
-            <phase>compile</phase>
+            <phase>package</phase>
             <goals>
               <goal>exec</goal>
             </goals>
@@ -141,7 +143,7 @@
           </execution>
           <execution>
             <id>kafka without version</id>
-            <phase>compile</phase>
+            <phase>package</phase>
             <goals>
               <goal>exec</goal>
             </goals>
@@ -161,6 +163,27 @@
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>package-phoenix4-to-tar</id>
+            <phase>package</phase>
+            <goals>
+              <goal>single</goal>
+            </goals>
+            <configuration>
+              <skipAssembly>${skip-phoenix4}</skipAssembly>
+              <descriptors>
+                <descriptor>src/build/package-phoenix4-connectors-to-tar-all.xml</descriptor>
+              </descriptors>
+              <finalName>phoenix4-connectors-${project.version}</finalName>
+              <tarLongFileMode>posix</tarLongFileMode>
+              <appendAssemblyId>false</appendAssemblyId>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
   </build>
 </project>
diff --git a/phoenix5-connectors-assembly/pom.xml b/phoenix5-connectors-assembly/pom.xml
index 6bd7b1e..0b06458 100644
--- a/phoenix5-connectors-assembly/pom.xml
+++ b/phoenix5-connectors-assembly/pom.xml
@@ -35,52 +35,35 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix4-flume</artifactId>
+      <artifactId>phoenix5-flume</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix4-pig</artifactId>
+      <artifactId>phoenix5-hive</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix4-spark</artifactId>
+      <artifactId>phoenix5-kafka</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.phoenix</groupId>
-      <artifactId>phoenix4-hive</artifactId>
+      <artifactId>phoenix5-pig</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.phoenix</groupId>
+      <artifactId>phoenix5-spark</artifactId>
     </dependency>
   </dependencies>
 
   <build>
     <plugins>
       <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>package-phoenix5-to-tar</id>
-            <phase>package</phase>
-            <goals>
-              <goal>single</goal>
-            </goals>
-            <configuration>
-              <skipAssembly>${skip-phoenix5}</skipAssembly>
-              <descriptors>
-                <descriptor>src/build/package-phoenix5-connectors-to-tar-all.xml</descriptor>
-              </descriptors>
-              <finalName>phoenix5-connectors-${project.version}</finalName>
-              <tarLongFileMode>posix</tarLongFileMode>
-              <appendAssemblyId>false</appendAssemblyId>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
         <artifactId>exec-maven-plugin</artifactId>
         <groupId>org.codehaus.mojo</groupId>
         <executions>
           <execution>
             <id>flume without version</id>
-            <phase>compile</phase>
+            <phase>package</phase>
             <goals>
               <goal>exec</goal>
             </goals>
@@ -100,7 +83,7 @@
           </execution>
           <execution>
             <id>hive without version</id>
-            <phase>compile</phase>
+            <phase>package</phase>
             <goals>
               <goal>exec</goal>
             </goals>
@@ -120,7 +103,7 @@
           </execution>
           <execution>
             <id>pig without version</id>
-            <phase>compile</phase>
+            <phase>package</phase>
             <goals>
               <goal>exec</goal>
             </goals>
@@ -140,7 +123,7 @@
           </execution>
           <execution>
             <id>spark without version</id>
-            <phase>compile</phase>
+            <phase>package</phase>
             <goals>
               <goal>exec</goal>
             </goals>
@@ -160,7 +143,7 @@
           </execution>
           <execution>
             <id>kafka without version</id>
-            <phase>compile</phase>
+            <phase>package</phase>
             <goals>
               <goal>exec</goal>
             </goals>
@@ -180,6 +163,27 @@
           </execution>
         </executions>
       </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>package-phoenix5-to-tar</id>
+            <phase>package</phase>
+            <goals>
+              <goal>single</goal>
+            </goals>
+            <configuration>
+              <skipAssembly>${skip-phoenix5}</skipAssembly>
+              <descriptors>
+                <descriptor>src/build/package-phoenix5-connectors-to-tar-all.xml</descriptor>
+              </descriptors>
+              <finalName>phoenix5-connectors-${project.version}</finalName>
+              <tarLongFileMode>posix</tarLongFileMode>
+              <appendAssemblyId>false</appendAssemblyId>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
     </plugins>
   </build>
 </project>
diff --git a/pom.xml b/pom.xml
index 169a585..3243b90 100644
--- a/pom.xml
+++ b/pom.xml
@@ -256,31 +256,23 @@
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-failsafe-plugin</artifactId>
           <version>${maven-failsafe-plugin.version}</version>
+          <!-- Common settings for all executions -->
+          <configuration>
+            <encoding>UTF-8</encoding>
+            <forkCount>${numForkedIT}</forkCount>
+            <runOrder>alphabetical</runOrder>
+            <redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
+            <shutdown>kill</shutdown>
+            <testSourceDirectory>${basedir}/src/it/java</testSourceDirectory>
+            <trimStackTrace>false</trimStackTrace>
+          </configuration>
           <executions>
             <execution>
               <id>ParallelStatsDisabledTest</id>
               <configuration>
-                <encoding>UTF-8</encoding>
-                <forkCount>${numForkedIT}</forkCount>
-                <runOrder>alphabetical</runOrder>
                 <reuseForks>true</reuseForks>
-                <runOrder>alphabetical</runOrder>
-                <!--parallel>methods</parallel>
-                <threadCount>20</threadCount-->
-                <!-- We're intermittantly hitting this assertion when running in parallel:
-                     Caused by: java.lang.AssertionError: we should never remove a different context
-	                 at org.apache.hadoop.hbase.regionserver.HRegion$RowLockContext.cleanUp(HRegion.java:5206)
-	                 at org.apache.hadoop.hbase.regionserver.HRegion$RowLockImpl.release(HRegion.java:5246)
-	                 at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2898)
-	                 at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.doGetTable(MetaDataEndpointImpl.java:2835)
-	                 at org.apache.phoenix.coprocessor.MetaDataEndpointImpl.getTable(MetaDataEndpointImpl.java:490) -->
-		<!--enableAssertions>false</enableAssertions-->
                 <argLine>-Xmx3000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine>
-                <redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
-                <shutdown>kill</shutdown>
-                <testSourceDirectory>${basedir}/src/it/java</testSourceDirectory>
                 <groups>org.apache.phoenix.end2end.ParallelStatsDisabledTest</groups>
-                <trimStackTrace>false</trimStackTrace>
               </configuration>
               <goals>
                 <goal>integration-test</goal>
@@ -290,16 +282,9 @@
             <execution>
               <id>HBaseManagedTimeTests</id>
               <configuration>
-                <encoding>UTF-8</encoding>
-                <forkCount>${numForkedIT}</forkCount>
-                <runOrder>alphabetical</runOrder>
                 <reuseForks>true</reuseForks>
                 <argLine>-enableassertions -Xmx3000m -XX:MaxPermSize=256m -Djava.security.egd=file:/dev/./urandom "-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}" -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine>
-                <redirectTestOutputToFile>${test.output.tofile}</redirectTestOutputToFile>
-                <testSourceDirectory>${basedir}/src/it/java</testSourceDirectory>
                 <groups>org.apache.phoenix.end2end.HBaseManagedTimeTest</groups>
-                <shutdown>kill</shutdown>
-                <trimStackTrace>false</trimStackTrace>
               </configuration>
               <goals>
                 <goal>integration-test</goal>
@@ -429,6 +414,7 @@
             <!-- precommit? -->
             <exclude>**/patchprocess/**</exclude>
             <exclude>**/derby.log</exclude>
+            <exclude>**/target/*</exclude>
           </excludes>
         </configuration>
       </plugin>