You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by yw...@apache.org on 2018/02/03 04:13:42 UTC

[1/2] bigtop git commit: BIGTOP-2894 Bump hbase to 1.3.1

Repository: bigtop
Updated Branches:
  refs/heads/master 3f723b7d0 -> 943ea913d


BIGTOP-2894 Bump hbase to 1.3.1

There are a number of issues with licenses check when hbase
building with Hadoop 2.8.x

That patch bump hbase to latest release and add couple patches.

Signed-off-by: Youngwoo Kim <yw...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/eafac8e4
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/eafac8e4
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/eafac8e4

Branch: refs/heads/master
Commit: eafac8e4aba13d69a5dcf16e7fd51670cb291744
Parents: 3f723b7
Author: Anton Chevychalov <ca...@arenadata.io>
Authored: Fri Sep 8 19:52:14 2017 +0300
Committer: Youngwoo Kim <yw...@apache.org>
Committed: Sat Feb 3 10:48:25 2018 +0900

----------------------------------------------------------------------
 ...1-Partial-backport-HBASE-16712-to-1.3.1.diff |  60 ++++
 ...patch2-0002-Backport-HBASE-17893-to-1.3.diff | 278 +++++++++++++++++++
 bigtop.bom                                      |   2 +-
 3 files changed, 339 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/eafac8e4/bigtop-packages/src/common/hbase/patch1-0001-Partial-backport-HBASE-16712-to-1.3.1.diff
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/common/hbase/patch1-0001-Partial-backport-HBASE-16712-to-1.3.1.diff b/bigtop-packages/src/common/hbase/patch1-0001-Partial-backport-HBASE-16712-to-1.3.1.diff
new file mode 100644
index 0000000..ed79b35
--- /dev/null
+++ b/bigtop-packages/src/common/hbase/patch1-0001-Partial-backport-HBASE-16712-to-1.3.1.diff
@@ -0,0 +1,60 @@
+From 7e3b1d7f830b6e6abd5c4ee6a775f4063b648b60 Mon Sep 17 00:00:00 2001
+From: Anton Chevychalov <pu...@mnu.pp.ru>
+Date: Fri, 8 Sep 2017 19:09:23 +0300
+Subject: [PATCH 1/2] Partial backport HBASE-16712 to 1.3.1
+
+That partial backport should fix net:jcip package licence trouble
+when we are building hbase with Hadoop 2.8.x
+
+https://issues.apache.org/jira/browse/HBASE-17893?focusedCommentId=15963617
+https://issues.apache.org/jira/browse/HBASE-16712
+---
+ .../src/main/resources/META-INF/LICENSE.vm             |  4 +++-
+ .../src/main/resources/supplemental-models.xml         | 18 ++++++++++++++++++
+ 2 files changed, 21 insertions(+), 1 deletion(-)
+
+diff --git a/hbase-resource-bundle/src/main/resources/META-INF/LICENSE.vm b/hbase-resource-bundle/src/main/resources/META-INF/LICENSE.vm
+index f403c89..0a4a23e 100644
+--- a/hbase-resource-bundle/src/main/resources/META-INF/LICENSE.vm
++++ b/hbase-resource-bundle/src/main/resources/META-INF/LICENSE.vm
+@@ -1561,7 +1561,9 @@ You can redistribute it and/or modify it under either the terms of the GPL
+ ## Whitelist of licenses that it's safe to not aggregate as above.
+ ## Note that this doesn't include ALv2 or the aforementioned aggregate
+ ## license mentions.
+-#set($non_aggregate_fine = [ 'Public Domain', 'New BSD license', 'BSD license', 'Mozilla Public License Version 2.0' ])
++##
++## See this FAQ link for justifications: https://www.apache.org/legal/resolved.html
++#set($non_aggregate_fine = [ 'Public Domain', 'New BSD license', 'BSD license', 'Mozilla Public License Version 2.0', 'Creative Commons Attribution License, Version 2.5', 'MPL 1.1'])
+ ## include LICENSE sections for anything not under ASL2.0
+ #foreach( ${dep} in ${projects} )
+ #if(${debug-print-included-work-info.equalsIgnoreCase("true")})
+diff --git a/hbase-resource-bundle/src/main/resources/supplemental-models.xml b/hbase-resource-bundle/src/main/resources/supplemental-models.xml
+index d6237d0..d5495cb 100644
+--- a/hbase-resource-bundle/src/main/resources/supplemental-models.xml
++++ b/hbase-resource-bundle/src/main/resources/supplemental-models.xml
+@@ -2047,4 +2047,22 @@ Copyright (c) 2007-2011 The JRuby project
+       </licenses>
+     </project>
+   </supplement>
++  <supplement>
++    <project>
++      <groupId>net.jcip</groupId>
++      <artifactId>jcip-annotations</artifactId>
++      <version>1.0</version>
++      <organization>
++        <name>Brian Goetz and Tim Peierls</name>
++        <url>http://www.jcip.net</url>
++      </organization>
++      <licenses>
++        <license>
++          <name>Creative Commons Attribution License, Version 2.5</name>
++          <url>http://creativecommons.org/licenses/by/2.5</url>
++          <distribution>repo</distribution>
++        </license>
++      </licenses>
++    </project>
++  </supplement>
+ </supplementalDataModels>
+-- 
+2.7.4
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/eafac8e4/bigtop-packages/src/common/hbase/patch2-0002-Backport-HBASE-17893-to-1.3.diff
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/common/hbase/patch2-0002-Backport-HBASE-17893-to-1.3.diff b/bigtop-packages/src/common/hbase/patch2-0002-Backport-HBASE-17893-to-1.3.diff
new file mode 100644
index 0000000..8bfaec0
--- /dev/null
+++ b/bigtop-packages/src/common/hbase/patch2-0002-Backport-HBASE-17893-to-1.3.diff
@@ -0,0 +1,278 @@
+From 1a58e47067d64866917b8834eabc37e4c2887ebb Mon Sep 17 00:00:00 2001
+From: Anton Chevychalov <pu...@mnu.pp.ru>
+Date: Fri, 8 Sep 2017 19:20:48 +0300
+Subject: [PATCH 2/2] Backport HBASE-17893 to 1.3
+
+---
+ .../src/main/resources/supplemental-models.xml     | 222 +++++++++++++++++++++
+ 1 file changed, 222 insertions(+)
+
+diff --git a/hbase-resource-bundle/src/main/resources/supplemental-models.xml b/hbase-resource-bundle/src/main/resources/supplemental-models.xml
+index d5495cb..e8e6bbb 100644
+--- a/hbase-resource-bundle/src/main/resources/supplemental-models.xml
++++ b/hbase-resource-bundle/src/main/resources/supplemental-models.xml
+@@ -644,6 +644,21 @@ under the License.
+     </project>
+   </supplement>
+   <supplement>
++    <project> <!-- hadoop.profile=3.0 from hadoop-3.0.0-alpha1 -->
++      <groupId>org.apache.commons</groupId>
++      <artifactId>commons-csv</artifactId>
++      <version>1.0</version>
++      <licenses>
++        <license>
++          <name>Apache License, Version 2.0</name>
++          <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
++          <distribution>repo</distribution>
++        </license>
++      </licenses>
++    </project>
++  </supplement>
++
++  <supplement>
+     <project>
+       <groupId>org.apache.commons</groupId>
+       <artifactId>commons-math</artifactId>
+@@ -714,6 +729,21 @@ under the License.
+     </project>
+   </supplement>
+   <supplement>
++    <project>   <!-- hadoop.profile=3.0 from hadoop-3.0.0-alpha1 -->
++      <groupId>org.apache.curator</groupId>
++      <artifactId>curator-test</artifactId>
++      <version>2.7.1</version>
++
++      <licenses>
++        <license>
++          <name>Apache License, Version 2.0</name>
++          <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
++          <distribution>repo</distribution>
++        </license>
++      </licenses>
++    </project>
++  </supplement>
++  <supplement>
+     <project>
+       <groupId>org.apache.directory.api</groupId>
+       <artifactId>api-asn1-api</artifactId>
+@@ -783,6 +813,21 @@ under the License.
+       </licenses>
+     </project>
+   </supplement>
++  <supplement>
++    <project>   <!-- hadoop.profile=3.0 from hadoop-3.0.0-alpha1 -->
++      <groupId>org.apache.htrace</groupId>
++      <artifactId>htrace-core4</artifactId>
++      <version>4.0.1-incubating</version>
++      <licenses>
++        <license>
++          <name>Apache License, Version 2.0</name>
++          <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
++          <distribution>repo</distribution>
++        </license>
++      </licenses>
++    </project>
++  </supplement>
++
+   <!-- Used by Hadoop 2.6 -->
+   <supplement>
+     <project>
+@@ -980,6 +1025,7 @@ under the License.
+       </licenses>
+     </project>
+   </supplement>
++
+ <!-- Ambiguous license names in server and not in client -->
+   <supplement>
+     <project>
+@@ -1082,6 +1128,143 @@ Copyright 2006 Envoi Solutions LLC
+       </licenses>
+     </project>
+   </supplement>
++  <supplement>
++    <project>   <!-- hadoop.profile=3.0 from hadoop-3.0.0-alpha1 -->
++      <groupId>com.codahale.metrics</groupId>
++      <artifactId>metrics-core</artifactId>
++      <version>3.0.1</version>
++      <licenses>
++        <license>
++          <name>Apache License, Version 2.0</name>
++          <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
++          <distribution>repo</distribution>
++        </license>
++      </licenses>
++    </project>
++  </supplement>
++  <supplement>
++    <project>   <!-- hadoop.profile=3.0 from hadoop-3.0.0-alpha1 -->
++      <groupId>com.nimbusds</groupId>
++      <artifactId>nimbus-jose-jwt</artifactId>
++      <version>3.9</version>
++      <licenses>
++        <license>
++          <name>Apache License, Version 2.0</name>
++          <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
++          <distribution>repo</distribution>
++        </license>
++      </licenses>
++    </project>
++  </supplement>
++  <supplement>
++    <project>  <!-- hadoop.profile=3.0 from hadoop-3.0.0-alpha1 -->
++      <groupId>com.squareup.okhttp</groupId>
++      <artifactId>okhttp</artifactId>
++      <version>2.4.0</version>
++      <licenses>
++        <license>
++          <name>Apache License, Version 2.0</name>
++          <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
++          <distribution>repo</distribution>
++        </license>
++      </licenses>
++    </project>
++  </supplement>
++  <supplement>
++    <project>  <!-- hadoop.profile=3.0 from hadoop-3.0.0-alpha1 -->
++      <groupId>com.squareup.okio</groupId>
++      <artifactId>okio</artifactId>
++      <version>1.4.0</version>
++      <licenses>
++        <license>
++          <name>Apache License, Version 2.0</name>
++          <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
++          <distribution>repo</distribution>
++        </license>
++      </licenses>
++    </project>
++  </supplement>
++  <supplement>
++    <project>   <!-- hadoop.profile=3.0 from hadoop-3.0.0-alpha1 -->
++      <groupId>com.twitter</groupId>
++      <artifactId>hpack</artifactId>
++      <version>0.11.0</version>
++      <licenses>
++        <license>
++          <name>Apache License, Version 2.0</name>
++          <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
++          <distribution>repo</distribution>
++        </license>
++      </licenses>
++    </project>
++  </supplement>
++  <supplement>
++    <project>   <!-- hadoop.profile=3.0 from hadoop-3.0.0-alpha1 -->
++      <groupId>net.minidev</groupId>
++      <artifactId>json-smart</artifactId>
++      <version>1.1.1</version>
++      <licenses>
++        <license>
++          <name>Apache License, Version 2.0</name>
++          <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
++          <distribution>repo</distribution>
++        </license>
++      </licenses>
++    </project>
++  </supplement> 
++
++  <supplement>
++    <project>   <!-- hadoop.profile=3.0 from hadoop-3.0.0-alpha1 -->
++      <groupId>de.ruedigermoeller</groupId>
++      <artifactId>fst</artifactId>
++      <version>2.24</version>
++      <!-- versions 2.17+ are ASFv2 though pom says LGPL 2.1 until 2.45+ -->
++      <!-- https://github.com/RuedigerMoeller/fast-serialization/blob/master/LICENSE.md -->
++      <!-- https://github.com/RuedigerMoeller/fast-serialization/commit/526dd4#diff-600376-->
++
++      <licenses>
++        <license>
++          <name>Apache License, Version 2.0</name>
++          <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
++          <distribution>repo</distribution>
++        </license>
++      </licenses>
++    </project>
++  </supplement>
++
++  <supplement>
++    <project>   <!-- hadoop.profile=3.0 from hadoop-3.0.0-alpha1 -->
++      <groupId>org.objenesis</groupId>
++      <artifactId>objenesis</artifactId>
++      <version>2.1</version>
++      <licenses>
++        <license>
++          <name>Apache License, Version 2.0</name>
++          <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
++          <distribution>repo</distribution>
++        </license>
++      </licenses>
++    </project>
++  </supplement>
++
++  <!-- xercesImpl is necessary when using -Dhadoop.profile=3.0 due to a bug in maven. (last tested with mvn 3.3.9)
++       See HBASE-16712 for more details.-->
++  <supplement>
++    <project>
++      <groupId>xerces</groupId>
++      <artifactId>xercesImpl</artifactId>
++      <version>2.9.1</version>
++      <licenses>
++        <license>
++          <name>Apache License, Version 2.0</name>
++          <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
++          <distribution>repo</distribution>
++        </license>
++      </licenses>
++    </project>
++  </supplement>
++
++
+ <!-- Permissive licenses that need text in LICENSE -->
+   <supplement>
+     <project>
+@@ -2065,4 +2248,43 @@ Copyright (c) 2007-2011 The JRuby project
+       </licenses>
+     </project>
+   </supplement>
++
++  <supplement>
++    <project>
++      <groupId>net.jcip</groupId>
++      <artifactId>jcip-annotations</artifactId>
++      <version>1.0</version>
++      <organization>
++        <name>Brian Goetz and Tim Peierls</name>
++        <url>http://www.jcip.net</url>
++      </organization>
++      <licenses>
++        <license>
++          <name>Creative Commons Attribution License, Version 2.5</name>
++          <url>http://creativecommons.org/licenses/by/2.5</url>
++          <distribution>repo</distribution>
++        </license>
++      </licenses>
++    </project>
++  </supplement>
++  <supplement>
++    <project>
++      <groupId>com.google.re2j</groupId>
++      <artifactId>re2j</artifactId>
++      <version>1.0</version>
++
++      <organization>
++        <name>The Go Authors</name>
++        <url>https://github.com/google/re2j</url>
++      </organization>
++      <licenses>
++        <license>
++          <name>BSD license</name> <!-- the Go license is BDS 3 clause verbatim -->
++          <url>https://github.com/google/re2j/blob/master/LICENSE</url>
++          <distribution>repo</distribution>
++        </license>
++      </licenses>
++    </project>
++  </supplement>
++
+ </supplementalDataModels>
+-- 
+2.7.4
+

http://git-wip-us.apache.org/repos/asf/bigtop/blob/eafac8e4/bigtop.bom
----------------------------------------------------------------------
diff --git a/bigtop.bom b/bigtop.bom
index e7144c7..1c129b1 100644
--- a/bigtop.bom
+++ b/bigtop.bom
@@ -165,7 +165,7 @@ bigtop {
     'hbase' {
       name    = 'hbase'
       relNotes = 'Apache HBase'
-      version { base = '1.1.12'; pkg = base; release = 1 }
+      version { base = '1.3.1'; pkg = base; release = 1 }
       tarball { destination = "${name}-${version.base}.tar.gz"
                 source      = "${name}-${version.base}-src.tar.gz" }
       url     { download_path = "/$name/${version.base}/"


[2/2] bigtop git commit: BIGTOP-2990: Upgrade Phoenix version to 4.13.1-HBase-1.3

Posted by yw...@apache.org.
BIGTOP-2990: Upgrade Phoenix version to 4.13.1-HBase-1.3


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/943ea913
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/943ea913
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/943ea913

Branch: refs/heads/master
Commit: 943ea913de5ab7b6cbc4772d12e4347cd914950f
Parents: eafac8e
Author: Youngwoo Kim <yw...@apache.org>
Authored: Sat Feb 3 12:53:09 2018 +0900
Committer: Youngwoo Kim <yw...@apache.org>
Committed: Sat Feb 3 12:53:09 2018 +0900

----------------------------------------------------------------------
 .../src/common/phoenix/patch1-PHOENIX-4423.diff | 433 +++++++++++++++++++
 bigtop.bom                                      |   4 +-
 2 files changed, 435 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/943ea913/bigtop-packages/src/common/phoenix/patch1-PHOENIX-4423.diff
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/common/phoenix/patch1-PHOENIX-4423.diff b/bigtop-packages/src/common/phoenix/patch1-PHOENIX-4423.diff
new file mode 100644
index 0000000..ca76713
--- /dev/null
+++ b/bigtop-packages/src/common/phoenix/patch1-PHOENIX-4423.diff
@@ -0,0 +1,433 @@
+diff --git a/phoenix-hive/pom.xml b/phoenix-hive/pom.xml
+index f75009e32..cb28c76b4 100644
+--- a/phoenix-hive/pom.xml
++++ b/phoenix-hive/pom.xml
+@@ -141,6 +141,33 @@
+       <version>${mockito-all.version}</version>
+       <scope>test</scope>
+     </dependency>
++    <dependency>
++      <groupId>org.apache.calcite.avatica</groupId>
++      <artifactId>avatica</artifactId>
++      <!-- Overriding the version of Avatica that PQS uses so that Hive will work -->
++      <version>1.8.0</version>
++      <scope>test</scope>
++      <!-- And removing a bunch of dependencies that haven't been shaded in this older
++           Avatica version which conflict with HDFS -->
++      <exclusions>
++        <exclusion>
++          <groupId>org.hsqldb</groupId>
++          <artifactId>hsqldb</artifactId>
++        </exclusion>
++        <exclusion>
++          <groupId>com.fasterxml.jackson.core</groupId>
++          <artifactId>jackson-databind</artifactId>
++        </exclusion>
++        <exclusion>
++          <groupId>com.fasterxml.jackson.core</groupId>
++          <artifactId>jackson-annotations</artifactId>
++        </exclusion>
++        <exclusion>
++          <groupId>com.fasterxml.jackson.core</groupId>
++          <artifactId>jackson-core</artifactId>
++        </exclusion>
++      </exclusions>
++    </dependency>
+   </dependencies>
+ 
+   <build>
+diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
+index c705e2db7..3210409ce 100644
+--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
++++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/BaseHivePhoenixStoreIT.java
+@@ -84,7 +84,7 @@ public class BaseHivePhoenixStoreIT {
+         try {
+             qt = new HiveTestUtil(hiveOutputDir, hiveLogDir, clusterType, null);
+         } catch (Exception e) {
+-            LOG.error("Unexpected exception in setup", e);
++            LOG.error("Unexpected exception in setup: " + e.getMessage(), e);
+             fail("Unexpected exception in setup");
+         }
+ 
+@@ -143,14 +143,6 @@ public class BaseHivePhoenixStoreIT {
+ 
+     @AfterClass
+     public static void tearDownAfterClass() throws Exception {
+-        if (qt != null) {
+-            try {
+-                qt.shutdown();
+-            } catch (Exception e) {
+-                LOG.error("Unexpected exception in setup", e);
+-                fail("Unexpected exception in tearDown");
+-            }
+-        }
+         try {
+             conn.close();
+         } finally {
+@@ -164,5 +156,14 @@ public class BaseHivePhoenixStoreIT {
+                 }
+             }
+         }
++        // Shutdowns down the filesystem -- do this after stopping HBase.
++        if (qt != null) {
++          try {
++              qt.shutdown();
++          } catch (Exception e) {
++              LOG.error("Unexpected exception in setup", e);
++              fail("Unexpected exception in tearDown");
++          }
++      }
+     }
+ }
+diff --git a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
+index b4c4e4624..295e8b491 100644
+--- a/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
++++ b/phoenix-hive/src/it/java/org/apache/phoenix/hive/HiveTestUtil.java
+@@ -23,6 +23,8 @@ import org.apache.commons.io.FileUtils;
+ import org.apache.commons.io.IOUtils;
+ import org.apache.commons.logging.Log;
+ import org.apache.commons.logging.LogFactory;
++import org.apache.hadoop.conf.Configuration;
++import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+ import org.apache.hadoop.fs.FileStatus;
+ import org.apache.hadoop.fs.FileSystem;
+ import org.apache.hadoop.fs.Path;
+@@ -36,8 +38,10 @@ import org.apache.hadoop.hive.common.io.SortPrintStream;
+ import org.apache.hadoop.hive.conf.HiveConf;
+ import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+ import org.apache.hadoop.hive.metastore.api.Index;
++import org.apache.hadoop.hive.ql.QueryState;
+ import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+ import org.apache.hadoop.hive.ql.exec.Utilities;
++import org.apache.hadoop.hive.ql.exec.tez.TezSessionState;
+ import org.apache.hadoop.hive.ql.lockmgr.zookeeper.ZooKeeperHiveLockManager;
+ import org.apache.hadoop.hive.ql.metadata.Hive;
+ import org.apache.hadoop.hive.ql.metadata.Table;
+@@ -71,6 +75,10 @@ import java.io.OutputStream;
+ import java.io.OutputStreamWriter;
+ import java.io.PrintStream;
+ import java.io.StringWriter;
++import java.lang.reflect.Constructor;
++import java.lang.reflect.InvocationTargetException;
++import java.lang.reflect.Method;
++import java.lang.reflect.Modifier;
+ import java.net.URL;
+ import java.util.ArrayList;
+ import java.util.Arrays;
+@@ -113,6 +121,7 @@ public class HiveTestUtil {
+     private ParseDriver pd;
+     protected Hive db;
+     protected HiveConf conf;
++    protected QueryState queryState;
+     private BaseSemanticAnalyzer sem;
+     protected final boolean overWrite;
+     private CliDriver cliDriver;
+@@ -120,6 +129,7 @@ public class HiveTestUtil {
+     private HadoopShims.MiniDFSShim dfs = null;
+     private String hadoopVer = null;
+     private HiveTestSetup setup = null;
++    private TezSessionState tezSessionState = null;
+     private boolean isSessionStateStarted = false;
+     private static final String javaVersion = getJavaVersion();
+ 
+@@ -224,7 +234,7 @@ public class HiveTestUtil {
+             // set fs.default.name to the uri of mini-dfs
+             String dfsUriString = WindowsPathUtil.getHdfsUriString(dfs.getFileSystem().getUri()
+                     .toString());
+-            conf.setVar(HiveConf.ConfVars.HADOOPFS, dfsUriString);
++            conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, dfsUriString);
+             // hive.metastore.warehouse.dir needs to be set relative to the mini-dfs
+             conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE,
+                     (new Path(dfsUriString,
+@@ -270,10 +280,18 @@ public class HiveTestUtil {
+             LOG.info("Setting hive-site: " + HiveConf.getHiveSiteLocation());
+         }
+         conf = new HiveConf();
++        queryState = createQueryState(conf);
++        // Make sure QueryState didn't make a copy of our HiveConf
++        conf = queryState.getConf();
++        // Make sure YARN doesn't abort startup because of a near-full disk.
++        conf.setIfUnset(
++                "yarn.nodemanager.disk-health-checker.max-disk-utilization-per-disk-percentage",
++                "99.0");
+         String tmpBaseDir = System.getProperty("test.tmp.dir");
+         if (tmpBaseDir == null || tmpBaseDir == "") {
+             tmpBaseDir = System.getProperty("java.io.tmpdir");
+         }
++        LOG.info("Writing metastore database to " + tmpBaseDir);
+         String metaStoreURL = "jdbc:derby:" + tmpBaseDir + File.separator + "metastore_dbtest;" +
+                 "create=true";
+         conf.set(ConfVars.METASTORECONNECTURLKEY.varname, metaStoreURL);
+@@ -307,11 +325,14 @@ public class HiveTestUtil {
+             String uriString = WindowsPathUtil.getHdfsUriString(fs.getUri().toString());
+             if (clusterType == MiniClusterType.tez) {
+                 conf.set("hive.execution.engine", "tez");
+-                mr = shims.getMiniTezCluster(conf, 1, uriString, 1);
++                mr = getMiniTezCluster(shims, conf, uriString);
+             } else {
+                 conf.set("hive.execution.engine", "mr");
+                 mr = shims.getMiniMrCluster(conf, 1, uriString, 1);
+-
++                conf.setLong("mapreduce.map.memory.mb", 512*1024*1024);
++                conf.set("mapreduce.map.java.opts", "-Xmx512m");
++                conf.setInt("mapreduce.reduce.memory.mb", 512*1024*1024);
++                conf.set("mapreduce.reduce.java.opts", "-Xmx512m");
+             }
+         }
+ 
+@@ -344,6 +365,60 @@ public class HiveTestUtil {
+         init();
+     }
+ 
++    private static HadoopShims.MiniMrShim getMiniTezCluster(HadoopShims shim, HiveConf conf, String uriString) {
++        // Hive <2.2
++        try {
++            Method m = shim.getClass().getMethod("getMiniTezCluster", Configuration.class, int.class, String.class, int.class);
++            // shims.getMiniTezCluster(conf, 1, uriString, 1);
++            return (HadoopShims.MiniMrShim) m.invoke(shim, conf, 1, uriString, 1);
++        } catch (Exception e) {
++            if (e instanceof RuntimeException) {
++                throw (RuntimeException) e;
++            }
++            // Pass
++        }
++        // Hive >=2.2
++        try {
++            Method m = shim.getClass().getMethod("getMiniTezCluster", Configuration.class, int.class, String.class, boolean.class);
++            // shims.getMiniTezCluster(conf, 1, uriString, false);
++            return (HadoopShims.MiniMrShim) m.invoke(shim, conf, 1, uriString, false);
++        } catch (Exception e) {
++            if (e instanceof RuntimeException) {
++                throw (RuntimeException) e;
++            }
++            throw new RuntimeException(e);
++        }
++    }
++
++    private static QueryState createQueryState(HiveConf conf) {
++        // Don't really care about caching the Class and Method objects as this is test code.
++        try {
++            Class<?> clz = Class.forName("org.apache.hadoop.hive.ql.QueryState");
++            // Hive <3.0 return new QueryState(conf);
++            try {
++                Constructor<?> cons = clz.getConstructor(HiveConf.class);
++                // But, this constructor also exists in Hive3 as private.
++                if (Modifier.isPublic(cons.getModifiers())) {
++                    return (QueryState) cons.newInstance(conf);
++                }
++                LOG.warn("Constructor was not public: " + cons);
++            } catch (NoSuchMethodException | SecurityException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
++                LOG.warn("Failed to invoke `new QueryState(HiveConf)` via reflection", e);
++            }
++
++            // Hive 3 return new QueryState.Builder().withHiveConf(conf).build();
++            clz = Class.forName("org.apache.hadoop.hive.ql.QueryState$Builder");
++            Object builder = clz.newInstance();
++            Method withHiveConfMethod = clz.getMethod("withHiveConf", HiveConf.class);
++            withHiveConfMethod.invoke(builder, conf);
++            Method buildMethod = clz.getMethod("build");
++            return (QueryState) buildMethod.invoke(builder);
++        } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | NoSuchMethodException | SecurityException | IllegalArgumentException | InvocationTargetException e) {
++            LOG.warn("Failed to invoke `new QueryState.Builder().withHiveConf(conf).build()` via reflection", e);
++            throw new RuntimeException(e);
++        }
++    }
++
+     public void shutdown() throws Exception {
+         cleanUp();
+         setup.tearDown();
+@@ -571,13 +646,15 @@ public class HiveTestUtil {
+     public void init() throws Exception {
+         testWarehouse = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
+         conf.setBoolVar(HiveConf.ConfVars.SUBMITLOCALTASKVIACHILD, false);
++        conf.setBoolean("datanucleus.schema.autoCreateTables", true);
+         String execEngine = conf.get("hive.execution.engine");
++        conf.setBoolean("hive.metastore.schema.verification", false);
+         conf.set("hive.execution.engine", "mr");
+         SessionState.start(conf);
+         conf.set("hive.execution.engine", execEngine);
+         db = Hive.get(conf);
+         pd = new ParseDriver();
+-        sem = new SemanticAnalyzer(conf);
++        sem = new SemanticAnalyzer(queryState);
+     }
+ 
+     public void init(String tname) throws Exception {
+@@ -598,7 +675,7 @@ public class HiveTestUtil {
+ 
+         HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
+                 "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator");
+-        Utilities.clearWorkMap();
++        Utilities.clearWorkMapForConf(conf);
+         CliSessionState ss = new CliSessionState(conf);
+         assert ss != null;
+         ss.in = System.in;
+@@ -627,6 +704,14 @@ public class HiveTestUtil {
+         ss.setIsSilent(true);
+         SessionState oldSs = SessionState.get();
+ 
++        if (oldSs != null && clusterType == MiniClusterType.tez) {
++          // Copy the tezSessionState from the old CliSessionState.
++          tezSessionState = oldSs.getTezSession();
++          oldSs.setTezSession(null);
++          ss.setTezSession(tezSessionState);
++          oldSs.close();
++        }
++
+         if (oldSs != null && clusterType == MiniClusterType.tez) {
+             oldSs.close();
+         }
+@@ -1019,7 +1104,7 @@ public class HiveTestUtil {
+ 
+     public void resetParser() throws SemanticException {
+         pd = new ParseDriver();
+-        sem = new SemanticAnalyzer(conf);
++        sem = new SemanticAnalyzer(queryState);
+     }
+ 
+     public TreeMap<String, String> getQMap() {
+diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
+index 4e9f46522..0f8ee93ac 100644
+--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
++++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/PhoenixStorageHandler.java
+@@ -35,8 +35,8 @@ import org.apache.hadoop.hive.ql.metadata.InputEstimator;
+ import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+ import org.apache.hadoop.hive.ql.plan.TableDesc;
+ import org.apache.hadoop.hive.ql.session.SessionState;
++import org.apache.hadoop.hive.serde2.AbstractSerDe;
+ import org.apache.hadoop.hive.serde2.Deserializer;
+-import org.apache.hadoop.hive.serde2.SerDe;
+ import org.apache.hadoop.hive.shims.ShimLoader;
+ import org.apache.hadoop.mapred.InputFormat;
+ import org.apache.hadoop.mapred.JobConf;
+@@ -242,7 +242,7 @@ public class PhoenixStorageHandler extends DefaultStorageHandler implements
+     }
+ 
+     @Override
+-    public Class<? extends SerDe> getSerDeClass() {
++    public Class<? extends AbstractSerDe> getSerDeClass() {
+         return PhoenixSerDe.class;
+     }
+ 
+diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
+index f0a5dd696..a8315e2a1 100644
+--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
++++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/mapreduce/PhoenixInputFormat.java
+@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.client.Scan;
+ import org.apache.hadoop.hbase.util.Bytes;
+ import org.apache.hadoop.hbase.util.RegionSizeCalculator;
+ import org.apache.hadoop.hive.conf.HiveConf;
+-import org.apache.hadoop.hive.ql.exec.Utilities;
++import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
+ import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+ import org.apache.hadoop.hive.ql.plan.TableScanDesc;
+ import org.apache.hadoop.hive.serde.serdeConstants;
+@@ -103,7 +103,7 @@ public class PhoenixInputFormat<T extends DBWritable> implements InputFormat<Wri
+             String filterExprSerialized = jobConf.get(TableScanDesc.FILTER_EXPR_CONF_STR);
+             if (filterExprSerialized != null) {
+                 ExprNodeGenericFuncDesc filterExpr =
+-                        Utilities.deserializeExpression(filterExprSerialized);
++                        SerializationUtilities.deserializeExpression(filterExprSerialized);
+                 PhoenixPredicateDecomposer predicateDecomposer =
+                         PhoenixPredicateDecomposer.create(Arrays.asList(jobConf.get(serdeConstants.LIST_COLUMNS).split(",")));
+                 predicateDecomposer.decomposePredicate(filterExpr);
+diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/IndexPredicateAnalyzer.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/IndexPredicateAnalyzer.java
+index 659983a71..4ba1d79d3 100644
+--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/IndexPredicateAnalyzer.java
++++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/ql/index/IndexPredicateAnalyzer.java
+@@ -344,18 +344,12 @@ public class IndexPredicateAnalyzer {
+             searchConditions, Object... nodeOutputs) throws SemanticException {
+ 
+         if (FunctionRegistry.isOpAnd(expr)) {
+-            assert (nodeOutputs.length == 2);
+-            ExprNodeDesc residual1 = (ExprNodeDesc) nodeOutputs[0];
+-            ExprNodeDesc residual2 = (ExprNodeDesc) nodeOutputs[1];
+-            if (residual1 == null) {
+-                return residual2;
+-            }
+-            if (residual2 == null) {
+-                return residual1;
+-            }
+             List<ExprNodeDesc> residuals = new ArrayList<ExprNodeDesc>();
+-            residuals.add(residual1);
+-            residuals.add(residual2);
++            for(Object obj : nodeOutputs) {
++                if(obj!=null) {
++                    residuals.add((ExprNodeDesc) obj);
++                }
++            }
+             return new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, FunctionRegistry
+                     .getGenericUDFForAnd(), residuals);
+         }
+diff --git a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
+index 19c26e55b..22f459858 100644
+--- a/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
++++ b/phoenix-hive/src/main/java/org/apache/phoenix/hive/util/PhoenixStorageHandlerUtil.java
+@@ -22,6 +22,8 @@ import com.google.common.collect.Maps;
+ import java.io.ByteArrayInputStream;
+ import java.io.IOException;
+ import java.lang.reflect.Array;
++import java.lang.reflect.InvocationTargetException;
++import java.lang.reflect.Method;
+ import java.math.BigDecimal;
+ import java.net.InetAddress;
+ import java.net.InetSocketAddress;
+@@ -35,8 +37,11 @@ import java.util.List;
+ import java.util.Map;
+ import java.util.Map.Entry;
+ import java.util.Properties;
++import java.util.concurrent.atomic.AtomicReference;
++
+ import javax.naming.NamingException;
+ import org.apache.commons.logging.Log;
++import org.apache.commons.logging.LogFactory;
+ import org.apache.hadoop.conf.Configuration;
+ import org.apache.hadoop.hbase.HRegionLocation;
+ import org.apache.hadoop.hbase.util.Strings;
+@@ -60,6 +65,9 @@ import org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil;
+  */
+ 
+ public class PhoenixStorageHandlerUtil {
++    private static final Log LOG = LogFactory.getLog(PhoenixStorageHandlerUtil.class);
++    private static final AtomicReference<Method> GET_BUCKET_METHOD_REF = new AtomicReference<>();
++    private static final AtomicReference<Method> GET_BUCKET_ID_METHOD_REF = new AtomicReference<>();
+ 
+     public static String getTargetTableName(Table table) {
+         Map<String, String> tableParameterMap = table.getParameters();
+@@ -268,7 +276,7 @@ public class PhoenixStorageHandlerUtil {
+     public static String getOptionsValue(Options options) {
+         StringBuilder content = new StringBuilder();
+ 
+-        int bucket = options.getBucket();
++        int bucket = getBucket(options);
+         String inspectorInfo = options.getInspector().getCategory() + ":" + options.getInspector()
+                 .getTypeName();
+         long maxTxnId = options.getMaximumTransactionId();
+@@ -285,4 +293,27 @@ public class PhoenixStorageHandlerUtil {
+ 
+         return content.toString();
+     }
++
++    private static int getBucket(Options options) {
++        Method getBucketMethod = GET_BUCKET_METHOD_REF.get();
++        try {
++            if (getBucketMethod == null) {
++                getBucketMethod = Options.class.getMethod("getBucket");
++                GET_BUCKET_METHOD_REF.set(getBucketMethod);
++            }
++            return (int) getBucketMethod.invoke(options);
++        } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException e) {
++            LOG.trace("Failed to invoke Options.getBucket()", e);
++        }
++        Method getBucketIdMethod = GET_BUCKET_ID_METHOD_REF.get();
++        try {
++            if (getBucketIdMethod == null) {
++                getBucketIdMethod = Options.class.getMethod("getBucketId");
++                GET_BUCKET_ID_METHOD_REF.set(getBucketMethod);
++            }
++            return (int) getBucketIdMethod.invoke(options);
++        } catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException e) {
++            throw new RuntimeException("Failed to invoke Options.getBucketId()", e);
++        }
++    }
+ }

http://git-wip-us.apache.org/repos/asf/bigtop/blob/943ea913/bigtop.bom
----------------------------------------------------------------------
diff --git a/bigtop.bom b/bigtop.bom
index 1c129b1..1cf7f0a 100644
--- a/bigtop.bom
+++ b/bigtop.bom
@@ -347,8 +347,8 @@ bigtop {
        * to the base HBase version. Update as needed whenever changing the
        * HBase version in the BOM.
        */
-      phoenix.hbase ='HBase-1.1'
-      version { base = "4.9.0-${phoenix.hbase}"; pkg = '4.9.0'; release = 1 }
+      phoenix.hbase ='HBase-1.3'
+      version { base = "4.13.1-${phoenix.hbase}"; pkg = '4.13.1'; release = 1 }
       tarball { destination = "$name-${version.base}-src.tar.gz"
                 source      = "apache-$name-${version.base}-src.tar.gz" }
       url     { download_path = "/$name/apache-$name-${version.base}/src"