You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@fluo.apache.org by ct...@apache.org on 2020/12/23 18:58:42 UTC

[fluo-uno] branch main updated: Improve handling of downloads (#264)

This is an automated email from the ASF dual-hosted git repository.

ctubbsii pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/fluo-uno.git


The following commit(s) were added to refs/heads/main by this push:
     new 0142b24  Improve handling of downloads (#264)
0142b24 is described below

commit 0142b2431df511a522d94cb2e9a41e045bf0d118
Author: Christopher Tubbs <ct...@apache.org>
AuthorDate: Wed Dec 23 13:58:35 2020 -0500

    Improve handling of downloads (#264)
    
    * Add color output in the checksum matching to make it easier to read
    * Bypass download step if file already exists with matching checksum
    * Remove old hashes, keeping only the latest in each release line
    * Use only SHA-512
    * Use ZK 3.6.2 by default
    * Support a default environment value for the DOWNLOADS directory
---
 bin/impl/util.sh | 30 +++++++++++++++++++++---------
 conf/checksums   | 44 ++++++++++----------------------------------
 conf/uno.conf    |  4 ++--
 3 files changed, 33 insertions(+), 45 deletions(-)

diff --git a/bin/impl/util.sh b/bin/impl/util.sh
index 5f0528e..508059c 100755
--- a/bin/impl/util.sh
+++ b/bin/impl/util.sh
@@ -15,6 +15,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+# check if running in a color terminal
+function terminalSupportsColor() { local c; c=$(tput colors 2>/dev/null) || c=-1; [[ -t 1 ]] && [[ $c -ge 8 ]]; }
+terminalSupportsColor && doColor=1 || doColor=0
+function color() { local c; c=$1; shift; [[ $doColor -eq 1 ]] && echo -e "\\e[0;${c}m${*}\\e[0m" || echo "$@"; }
+function red() { color 31 "$@"; }
+function green() { color 32 "$@"; }
+function yellow() { color 33 "$@"; }
+
 function verify_exist_hash() {
   local tarball=$1 expected_hash actual_hash hash_cmd
   expected_hash=$(echo "${2// /}" | tr '[:upper:]' '[:lower:]')
@@ -30,14 +38,14 @@ function verify_exist_hash() {
     64) hash_cmd='shasum -a 256' ;;
     128) hash_cmd='shasum -a 512' ;;
     *)
-      print_to_console "Expected checksum ($expected_hash) of $tarball is not an MD5, SHA1, SHA256, or SHA512 sum"
+      print_to_console "Expected checksum ($(red "$expected_hash")) of $(yellow "$tarball") is not an MD5, SHA1, SHA256, or SHA512 sum"
       return 1
       ;;
   esac
   actual_hash=$($hash_cmd "$DOWNLOADS/$tarball" | awk '{print $1}')
 
   if [[ $actual_hash != "$expected_hash" ]]; then
-    print_to_console "The actual checksum ($actual_hash) of $tarball does not match the expected checksum ($expected_hash)"
+    print_to_console "The actual checksum ($(red "$actual_hash")) of $(yellow "$tarball") does not match the expected checksum ($(green "$expected_hash"))"
     return 1
   fi
 }
@@ -121,20 +129,24 @@ function print_to_console {
 
 function download_tarball() {
   local url_prefix=$1 tarball=$2 expected_hash=$3
+  verify_exist_hash "$tarball" "$expected_hash" &>/dev/null || \
   wget -c -P "$DOWNLOADS" "$url_prefix/$tarball"
   verify_exist_hash "$tarball" "$expected_hash" || return 1
-  echo "$tarball exists in downloads/ and matches expected checksum ($expected_hash)"
+  echo "$(yellow "$tarball") download matches expected checksum ($(green "$expected_hash"))"
 }
 
 function download_apache() {
   local url_prefix=$1 tarball=$2 expected_hash=$3
-  [[ -n "${apache_mirror:-}" ]] && wget -c -P "$DOWNLOADS" "$apache_mirror/$url_prefix/$tarball"
-  if [[ ! -f "$DOWNLOADS/$tarball" ]]; then
-    echo "Downloading $tarball from Apache archive"
-    wget -c -P "$DOWNLOADS" "https://archive.apache.org/dist/$url_prefix/$tarball"
-  fi
+  verify_exist_hash "$tarball" "$expected_hash" &>/dev/null || \
+  {
+    [[ -n "${apache_mirror:-}" ]] && wget -c -P "$DOWNLOADS" "$apache_mirror/$url_prefix/$tarball"
+    if [[ ! -f "$DOWNLOADS/$tarball" ]]; then
+      echo "Downloading $tarball from Apache archive"
+      wget -c -P "$DOWNLOADS" "https://archive.apache.org/dist/$url_prefix/$tarball"
+    fi
+  }
   verify_exist_hash "$tarball" "$expected_hash" || return 1
-  echo "$tarball exists in downloads/ and matches expected checksum ($expected_hash)"
+  echo "$(yellow "$tarball") download matches expected checksum ($(green "$expected_hash"))"
 }
 
 function print_cmd_usage() {
diff --git a/conf/checksums b/conf/checksums
index 1c0d644..e16a94f 100644
--- a/conf/checksums
+++ b/conf/checksums
@@ -1,41 +1,17 @@
 accumulo:2.0.0:1e2b822e0fd6ba5293b09203eb0c5cc230e9f111361634b4d5665b0ddd2b28f42d76699cb08aaeff9b3242efd5fe369bfc871a7dc361e935980889bcb7b4568f
-accumulo:1.10.0:9d3023c8724069282035ed6dcb047f737c1c53dc05f7b15da2cfd941f51d1d7720892496475430eb639f3a36c83f4eecc1942c0317c67d38dcf2061d06beb648
-accumulo:1.9.3:dcda2ca6188314d43984282cf6fb32f53f93eb98e9c01d2988937b5c790b946ab8ced1d3a57171caaa32684cabbd2eabceaa955342fcd077bd78f1cc8fb22496
-accumulo:1.9.2:3018d55284940ca22946389c546db3c0dcaca682d49aee8fc031e25321badb8e1e518d5c086906e03e8c53926db5bb79a97daecaee5d1aa5417d46ee2b8a2419
-accumulo:1.9.1:7239561833359edf8dcdbee250b27df13feef35bfbf91a6c45f252c6c7712c51d543c21f7621e27b901db20b5a61d11cc8770f10e6e503b6ca42d12ba3657487
-accumulo:1.9.0:f68a6145029a9ea843b0305c90a7f5f0334d8a8ceeea94734267ec36421fe7fe
-accumulo:1.8.1:eba3bfe823935ca7901ea7c2bd59c84a68b9381361699c7e260bbd9191f237f4
-accumulo:1.7.4:3776dddbc2a09f4a9d7a2ae4958e212e91eb5067a124a628330edbee4e32e754
-accumulo:1.7.3:294f2f1f3fbc164b68e80cecd5a6ce5c245df804fb35ae5e03ab1c86bc9480da
+accumulo:1.10.1:cd613d042f9509aa4f6887eb70af48267c680fe21b463a7967ba0bd7d6688610ecad33073f916e8969243d1f2111773ecd57c8e09efae683c31c09428d93cd03
 hadoop:3.3.0:9ac5a5a8d29de4d2edfb5e554c178b04863375c5644d6fea1f6464ab4a7e22a50a6c43253ea348edbd114fc534dcde5bdd2826007e24b2a6b0ce0d704c5b4f5b
 hadoop:3.2.1:d62709c3d7144fcaafc60e18d0fa03d7d477cc813e45526f3646030cd87dbf010aeccf3f4ce795b57b08d2884b3a55f91fe9d74ac144992d2dfe444a4bbf34ee
-hadoop:3.2.0:79676a7dadbc4740cb2ff4ae7af75f5b0a45b4748f217f4179ab64827b840eef58c63b9132260c5682cb28b6d12a27d4a4d09a15173aca158fb1fc3cdb0b1609
 hadoop:3.1.4:f7eff67e948e21e23d7069dc169c98fcce757f59473e1e9a0e27d69a23df88840a9776089eeae5a8e1e496753f811868fe65638b04e3388c197931cd44d86f63
-hadoop:3.1.3:c790711a61e9052a7d9c02d97b1d5acbe3d1cc2cd7045bb387791cc5321e97e27edf118d3b4b319cc1538c9493d8f4fbbef4dda5ef8996157438d9db9ba2cfdb
-hadoop:3.1.2:0e0ee817c89b3c4eb761eca7f16640742a83b0e99b6fda26c1bee2baabedad93aab86e252bf5f1e2381c6d464bc4003d10c7cc0f61b2062f4c59732ca24d1bd9
-hadoop:3.1.1:0821685c2f77710f189cf6a37309cd6ba35b63432bae19f7b3db05fdbdd1d375d8333e47461d82762d5f7f4133c6b33216689a6403d7dff3f8f41dcbe5477030
-hadoop:3.1.0:8c620d1c82cc04629b7ada90ba0691c734196295e0103d74569de1f29e914327c281c0c9f1e48881df3d567f6482c288bd493a16257c801c82247f5eb5d7b1e4
-hadoop:3.0.3:db96e2c0d0d5352d8984892dfac4e27c0e682d98a497b7e04ee97c3e2019277a
-hadoop:3.0.2:0d507aa71007b2685e292343c11c2cb90a92ea7625446b57d1fb47c5721e2f82
-hadoop:2.10.0:76592efe09cd9887adb9e058c43d28858b19bcbc829ea1de3d7f7d2e54e4b37f415984dcac5f401deb9c30e69e85b7f3ac29785ac6eb17cd15b7664c731bcd85
-hadoop:2.9.2:3d2023c46b1156c1b102461ad08cbc17c8cc53004eae95dab40a1f659839f28a
-hadoop:2.9.0:8d48666f29f9ade6ed2762b7a9edab177bad2c57396f43d0ffd6a269d54f6fe1
-hadoop:2.8.5:f9c726df693ce2daa4107886f603270d66e7257f77a92c9886502d6cd4a884a4
-hadoop:2.8.4:63007792ecaf566aa8b97779db22805461ff3542fd18f14a60b3bcca1c6831bdeb6c9bb6d59596914fc6cc92c2049ce183e29c41aa10a97f5193fd3284a47acb
-hadoop:2.8.3:e8bf9a53337b1dca3b152b0a5b5e277dc734e76520543e525c301a050bb27eae
-hadoop:2.7.7:d129d08a2c9dafec32855a376cbd2ab90c6a42790898cabbac6be4d29f9c2026
-hadoop:2.7.6:f2327ea93f4bc5a5d7150dee8e0ede196d3a77ff8526a7dd05a48a09aae25669
-hadoop:2.7.5:0bfc4d9b04be919be2fdf36f67fa3b4526cdbd406c512a7a1f5f1b715661f831
-hadoop:2.6.5:001ad18d4b6d0fe542b15ddadba2d092bc97df1c4d2d797381c8d12887691898
-spark:3.0.1:07FC6DF224F303EF53189E6082004400D51CD6C2244D1851D90EABBE58404A69FF980BFFE147A5259A34190E1E1C974C72D02470D10A783D2D87F43A8DA0850B
-spark:2.4.7:FF75E6DB3C4CA5CECE2A8A26AD7BD8A2C1C46B4F93E8F2922A31E6A8D98274BEDC66BEF3E469E862D249C5439355CCCA83687A20D536A8400F2C058BE553DF57
+hadoop:3.0.3:bef5006616777ac07ece37db067e34499f4862ed93fe2fac5f2968f619a4a0aa5a87e7d2875f55e8f9abe6c3d0e92401fa7cfecfacfc6acbbb425eb91ea5fe3c
+hadoop:2.10.1:2460e02cd1f80dfed7a8981bbc934c095c0a341435118bec781fd835ec2ebdc5543a03d92d24f2ddeebdfe1c2c460065ba1d394ed9a73cbb2020b40a8d8b5e07
+hadoop:2.9.2:bd6060b629c2e7c5d411ae3ba2330b79cdb2f1caa87d5ea69392c0f9cb64a83464ec8e23f2cc61331b9f49284a6bfdc1a3fbdeffe3402650e64938f5672094f5
+hadoop:2.8.5:4174e7a67b614b7d5e47a1a2420cbe9a57978908f8ad0405f1d177306fb36ed87c895810f70e3c6a6cbadc76afb9303f1c49cbca67237e18c799d30f87afa57c
+hadoop:2.7.7:17c8917211dd4c25f78bf60130a390f9e273b0149737094e45f4ae5c917b1174b97eb90818c5df068e607835120126281bcc07514f38bd7fd3cb8e9d3db1bdde
+hadoop:2.6.5:fd9d24155bb18475c25bc6a88284a696224d5ef38503c5c3f1d4c0b65ae1c552d90d2136ca0fa45294d62204eed3d1154785c12cc260cca42ea2fbf027b461b2
+spark:3.0.1:07fc6df224f303ef53189e6082004400d51cd6c2244d1851d90eabbe58404a69ff980bffe147a5259a34190e1e1c974c72d02470d10a783d2d87f43a8da0850b
+spark:2.4.7:ff75e6db3c4ca5cece2a8a26ad7bd8a2c1c46b4f93e8f2922a31e6a8d98274bedc66bef3e469e862d249c5439355ccca83687a20d536a8400f2c058be553df57
 spark:2.3.2:79d1de6870cf43ecc059d4423540124610ef00abc929c171be42548b505f115906e251e4604ed7d22b6a8cce012a845d90017319c2abb3d5d700369b1c335c6d
 zookeeper:3.6.2:caff5111bb6876b7124760bc006e6fa2523efa54b99321a3c9cd8192ea0d5596abc7d70a054b1aac9b20a411407dae7611c7aba870c23bff28eb1643ba499199
-zookeeper:3.6.1:1c5cb4d9886fae41bf244a446dd874b73c0fff7a5fc2dda4305041964420cde21e59b388dfd2551464a46bb6918d9d3c3c01ebe68fdbe782074ee360aa830c7d
 zookeeper:3.5.8:9f2c2293a60e02244dff64438704094b2edb167bc45aa82def49eb4a7a3084881057f5d81384926e23a1029f699e6f41bfee5e19fc1664d044ff9c56bcaa6b31
-zookeeper:3.5.7:b9baa1ecb3d4dc0ef648ce7c74da4c5267ee89534c7614b8f27d3b0bc52004dcfbb8cecec810ffb7c8c45053daf8a5e849ce60ba241280fa1e2ab1d3b4990494
-zookeeper:3.4.14:b14f7a0fece8bd34c7fffa46039e563ac5367607c612517aa7bd37306afbd1cd
-zookeeper:3.4.13:3481bd19945d80848f81d9dc2896a682ae8b62269b8164ffbae532e55aa4219961403e0208c8e72cf784605eae436d70ddae2b26e0deba0f1f84c74188c32c0a
-zookeeper:3.4.12:c686f9319050565b58e642149cb9e4c9cc8c7207aacc2cb70c5c0672849594b9
-zookeeper:3.4.11:f6bd68a1c8f7c13ea4c2c99f13082d0d71ac464ffaf3bf7a365879ab6ad10e84
-zookeeper:3.4.10:7f7f5414e044ac11fee2a1e0bc225469f51fb0cdf821e67df762a43098223f27
+zookeeper:3.4.14:b2e03d95f8cf18b97a46e2f53871cef5a5da9d5d80b97009375aed7fb35368c440ca944c7e8b64efabbc065f6fb98bb86239f7c1491f0490efc71876d5a7f424
diff --git a/conf/uno.conf b/conf/uno.conf
index 61340f1..812d8a1 100644
--- a/conf/uno.conf
+++ b/conf/uno.conf
@@ -4,7 +4,7 @@
 # Versions
 # --------
 export HADOOP_VERSION=${HADOOP_VERSION:-3.3.0}
-export ZOOKEEPER_VERSION=${ZOOKEEPER_VERSION:-3.6.1}
+export ZOOKEEPER_VERSION=${ZOOKEEPER_VERSION:-3.6.2}
 export ACCUMULO_VERSION=${ACCUMULO_VERSION:-2.0.0}
 export FLUO_VERSION=${FLUO_VERSION:-1.2.0}
 export FLUO_YARN_VERSION=${FLUO_YARN_VERSION:-1.0.0}
@@ -28,7 +28,7 @@ export UNO_HOST=${UNO_HOST:-localhost}
 # Download configuration
 # ----------------------
 # Directory where all depedency packages are downloaded
-export DOWNLOADS=$UNO_HOME/downloads
+export DOWNLOADS=${UNO_DOWNLOADS:-$UNO_HOME/downloads}
 # Tarball file names
 export ACCUMULO_TARBALL=accumulo-$ACCUMULO_VERSION-bin.tar.gz
 export HADOOP_TARBALL=hadoop-"$HADOOP_VERSION".tar.gz