You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@phoenix.apache.org by st...@apache.org on 2022/08/31 10:51:42 UTC
[phoenix-omid] branch master updated: OMID-231 Build and test Omid with Hadoop 3
This is an automated email from the ASF dual-hosted git repository.
stoty pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix-omid.git
The following commit(s) were added to refs/heads/master by this push:
new 41cf1935 OMID-231 Build and test Omid with Hadoop 3
41cf1935 is described below
commit 41cf193570e97e065ee159f297ed9b85d34d3880
Author: Istvan Toth <st...@apache.org>
AuthorDate: Mon Aug 29 10:49:15 2022 +0200
OMID-231 Build and test Omid with Hadoop 3
---
.gitignore | 3 +
.travis.yml | 2 +
dev-support/cache-apache-project-artifact.sh | 142 +++++++++++++++++++++++++++
dev-support/rebuild_hbase.sh | 77 +++++++++++++++
pom.xml | 39 +++++---
5 files changed, 249 insertions(+), 14 deletions(-)
diff --git a/.gitignore b/.gitignore
index 84db5847..2741304c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -15,6 +15,9 @@ lib/
*.iws
*~
*.swp
+/dev-support/artifacts/**
+/dev-support/work/**
+
# Generated website files
generated-website/
diff --git a/.travis.yml b/.travis.yml
index b19f9a19..4d81466f 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -38,5 +38,7 @@ script:
else
git checkout -b tmp-build-branch
&&
+ dev-support/rebuild_hbase.sh detect
+ &&
mvn clean test ;
fi
\ No newline at end of file
diff --git a/dev-support/cache-apache-project-artifact.sh b/dev-support/cache-apache-project-artifact.sh
new file mode 100755
index 00000000..21c1d869
--- /dev/null
+++ b/dev-support/cache-apache-project-artifact.sh
@@ -0,0 +1,142 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This was lovingly copied from Apache HBase
+
+set -e
+function usage {
+ echo "Usage: ${0} [options] /path/to/download/file.tar.gz download/fragment/eg/project/subdir/some-artifact-version.tar.gz"
+ echo ""
+ echo " --force for a redownload even if /path/to/download/file.tar.gz exists."
+ echo " --working-dir /path/to/use Path for writing tempfiles. must exist."
+ echo " defaults to making a directory via mktemp that we clean."
+ echo " --keys url://to/project/KEYS where to get KEYS. needed to check signature on download."
+ echo ""
+ exit 1
+}
+# if no args specified, show usage
+if [ $# -lt 2 ]; then
+ usage
+fi
+
+
+# Get arguments
+declare done_if_cached="true"
+declare working_dir
+declare cleanup="true"
+declare keys
+while [ $# -gt 0 ]
+do
+ case "$1" in
+ --force) shift; done_if_cached="false";;
+ --working-dir) shift; working_dir=$1; cleanup="false"; shift;;
+ --keys) shift; keys=$1; shift;;
+ --) shift; break;;
+ -*) usage ;;
+ *) break;; # terminate while loop
+ esac
+done
+
+# should still have required args
+if [ $# -lt 2 ]; then
+ usage
+fi
+
+target="$1"
+artifact="$2"
+
+if [ -f "${target}" ] && [ "true" = "${done_if_cached}" ]; then
+ echo "Reusing existing download of '${artifact}'."
+ exit 0
+fi
+
+if [ -z "${working_dir}" ]; then
+ if ! working_dir="$(mktemp -d -t hbase-download-apache-artifact)" ; then
+ echo "Failed to create temporary working directory. Please specify via --working-dir" >&2
+ exit 1
+ fi
+else
+ # absolutes please
+ working_dir="$(cd "$(dirname "${working_dir}")"; pwd)/$(basename "${working_dir}")"
+ if [ ! -d "${working_dir}" ]; then
+ echo "passed working directory '${working_dir}' must already exist." >&2
+ exit 1
+ fi
+fi
+
+function cleanup {
+ if [ -n "${keys}" ]; then
+ echo "Stopping gpg agent daemon"
+ gpgconf --homedir "${working_dir}/.gpg" --kill gpg-agent
+ echo "Stopped gpg agent daemon"
+ fi
+
+ if [ "true" = "${cleanup}" ]; then
+ echo "cleaning up temp space."
+ rm -rf "${working_dir}"
+ fi
+}
+trap cleanup EXIT SIGQUIT
+
+echo "New download of '${artifact}'"
+
+# N.B. this comes first so that if gpg falls over we skip the expensive download.
+if [ -n "${keys}" ]; then
+ if [ ! -d "${working_dir}/.gpg" ]; then
+ rm -rf "${working_dir}/.gpg"
+ mkdir -p "${working_dir}/.gpg"
+ chmod -R 700 "${working_dir}/.gpg"
+ fi
+ gpgconf --homedir "${working_dir}/.gpg" --create-socketdir || true
+ #shellcheck disable=SC2086
+ echo "socketdir is $(gpgconf --homedir ${working_dir}/.gpg --list-dirs socketdir)"
+ echo "installing project KEYS"
+ curl -L --fail -o "${working_dir}/KEYS" "${keys}"
+ if ! gpg --homedir "${working_dir}/.gpg" --import "${working_dir}/KEYS" ; then
+ echo "ERROR importing the keys via gpg failed. If the output above mentions this error:" >&2
+ echo " gpg: can't connect to the agent: File name too long" >&2
+ # we mean to give them the command to run, not to run it.
+ #shellcheck disable=SC2016
+ echo 'then you prolly need to create /var/run/user/$(id -u)' >&2
+ echo "see this thread on gnupg-users: https://s.apache.org/uI7x" >&2
+ exit 2
+ fi
+
+ echo "downloading signature"
+ curl -L --fail -o "${working_dir}/artifact.asc" "https://archive.apache.org/dist/${artifact}.asc"
+fi
+
+echo "downloading artifact"
+if ! curl --dump-header "${working_dir}/artifact_download_headers.txt" -L --fail -o "${working_dir}/artifact" "https://www.apache.org/dyn/closer.lua/${artifact}?action=download" ; then
+ echo "Artifact wasn't in mirror system. falling back to archive.a.o."
+ curl --dump-header "${working_dir}/artifact_fallback_headers.txt" -L --fail -o "${working_dir}/artifact" "http://archive.apache.org/dist/${artifact}"
+fi
+
+if [ -n "${keys}" ]; then
+ echo "verifying artifact signature"
+ gpg --homedir "${working_dir}/.gpg" --verify "${working_dir}/artifact.asc"
+ echo "signature good."
+fi
+
+echo "moving artifact into place at '${target}'"
+# ensure we're on the same filesystem
+mv "${working_dir}/artifact" "${target}.copying"
+# attempt atomic move
+mv "${target}.copying" "${target}"
+echo "all done!"
\ No newline at end of file
diff --git a/dev-support/rebuild_hbase.sh b/dev-support/rebuild_hbase.sh
new file mode 100755
index 00000000..7fd66a36
--- /dev/null
+++ b/dev-support/rebuild_hbase.sh
@@ -0,0 +1,77 @@
+#!/usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Rebuilds HBase with -Dhadoop.profile=3.0 locally, to work around PHOENIX-5993
+# Intended mainly for CI jobs, but can simplify manual rebuilds as well.
+
+
+DEV_SUPPORT="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
+ARTIFACTS_DIR="$DEV_SUPPORT/artifacts"
+WORK_DIR="$DEV_SUPPORT/work"
+
+if [[ ! -z "$MAVEN_SETTINGS_FILE" ]]; then
+ SETTINGS=( "--settings" "$MAVEN_SETTINGS_FILE" )
+fi
+
+if [[ ! -z "$MAVEN_LOCAL_REPO" ]]; then
+ LOCALREPO="-Dmaven.repo.local=${MAVEN_LOCAL_REPO}"
+fi
+
+if [[ "$1" == "detect" ]]; then
+ set -e
+ cd "$DEV_SUPPORT/.."
+ HBASE_VERSION=$(mvn ${SETTINGS[@]} help:evaluate -Dexpression=hbase.version -q -DforceStdout $LOCALREPO)
+ echo "HBASE_VERSION=$HBASE_VERSION"
+ cd "$DEV_SUPPORT"
+ set +e
+else
+ HBASE_VERSION="$1"
+fi
+
+# The name of the Apache Hbase source file
+HBASE_SOURCE_NAME="hbase-$HBASE_VERSION-src.tar.gz"
+# The relative path on the ASF mirrors for the Hbase source file
+HBASE_SOURCE_MIRROR_NAME="hbase/$HBASE_VERSION/$HBASE_SOURCE_NAME"
+
+# Downloads the specified HBase version source, extracts it,
+# then rebuilds and installs the maven artifacts locally with -Dhadoop.profile=3.0
+
+if [ $# -ne 1 ]
+ then
+ echo "Supply the Hbase version as paramater i.e.: rebuild_hbase.sh 2.2.6 "
+fi
+
+mkdir "$ARTIFACTS_DIR"
+mkdir "$WORK_DIR"
+
+$DEV_SUPPORT/cache-apache-project-artifact.sh --keys https://downloads.apache.org/hbase/KEYS \
+ --working-dir "$WORK_DIR" "$ARTIFACTS_DIR/$HBASE_SOURCE_NAME" "$HBASE_SOURCE_MIRROR_NAME"
+
+if [[ ! -z "$MAVEN_SETTINGS_FILE" ]]; then
+ SETTINGS=( "--settings" "$MAVEN_SETTINGS_FILE" )
+fi
+
+STARTDIR=$PWD
+cd $ARTIFACTS_DIR
+tar xfz hbase-$HBASE_VERSION-src.tar.gz
+cd hbase-$HBASE_VERSION
+echo mvn ${SETTINGS[@]} clean install -Dhadoop.profile=3.0 -DskipTests -B $LOCALREPO
+mvn ${SETTINGS[@]} clean install -Dhadoop.profile=3.0 -DskipTests -B $LOCALREPO
+cd ${STARTDIR}
+
diff --git a/pom.xml b/pom.xml
index 1335ffcc..d26f00b1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -152,8 +152,8 @@
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<!-- 3rd-Party Library Versioning -->
- <hbase.version>2.4.10</hbase.version>
- <hadoop.version>2.10.0</hadoop.version>
+ <hbase.version>2.4.13</hbase.version>
+ <hadoop.version>3.1.4</hadoop.version>
<phoenix.thirdparty.version>2.0.0</phoenix.thirdparty.version>
<guice.version>3.0</guice.version>
<testng.version>6.10</testng.version>
@@ -381,6 +381,9 @@
<!-- Exclude config and scripts -->
<exclude>**/dev-utils/*</exclude>
+ <exclude>dev-support/artifacts/**</exclude>
+ <exclude>dev-support/work/**</exclude>
+
<!-- Exclude assembly -->
<exclude>**/maven/assembly/*</exclude>
@@ -397,6 +400,7 @@
<exclude>**/src/main/java/org/apache/omid/benchmarks/utils/ScrambledZipfianGenerator.java
</exclude>
+
<!-- Taken from https://github.com/apache/hbase -->
<exclude>**/src/main/java/org/apache/omid/committable/hbase/RegionSplitter.java</exclude>
@@ -435,6 +439,25 @@
</dependencies>
</plugin>
+ <plugin>
+ <groupId>org.apache.rat</groupId>
+ <artifactId>apache-rat-plugin</artifactId>
+ <configuration>
+ <excludes>
+ <exclude>**/*.yml</exclude>
+ <exclude>**/*.properties</exclude>
+ <exclude>**/hbase-site.xml</exclude>
+ <exclude>**/test-output/**</exclude>
+ <exclude>doc/site/site.xml</exclude>
+ <exclude>doc/images/ModuleDependencies.graffle</exclude>
+ <exclude>misc/findbugs-exclude.xml</exclude>
+ <exclude>misc/omid_checks.xml</exclude>
+ <exclude>dev-support/artifacts/**</exclude>
+ <exclude>dev-support/work/**</exclude>
+ </excludes>
+ </configuration>
+ </plugin>
+
</plugins>
<extensions>
@@ -488,18 +511,6 @@
<plugin>
<groupId>org.apache.rat</groupId>
<artifactId>apache-rat-plugin</artifactId>
- <configuration>
- <excludes>
- <exclude>**/*.yml</exclude>
- <exclude>**/*.properties</exclude>
- <exclude>**/hbase-site.xml</exclude>
- <exclude>**/test-output/**</exclude>
- <exclude>doc/site/site.xml</exclude>
- <exclude>doc/images/ModuleDependencies.graffle</exclude>
- <exclude>misc/findbugs-exclude.xml</exclude>
- <exclude>misc/omid_checks.xml</exclude>
- </excludes>
- </configuration>
<executions>
<execution>
<phase>package</phase>