You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@fluo.apache.org by ct...@apache.org on 2020/08/26 12:40:25 UTC
[fluo-uno] branch main updated: Do more with ShellCheck (#254)
This is an automated email from the ASF dual-hosted git repository.
ctubbsii pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/fluo-uno.git
The following commit(s) were added to refs/heads/main by this push:
new 76d64de Do more with ShellCheck (#254)
76d64de is described below
commit 76d64de72239c44fdfee9730fdb575bbaefad8d6
Author: Christopher Tubbs <ct...@apache.org>
AuthorDate: Wed Aug 26 08:40:16 2020 -0400
Do more with ShellCheck (#254)
* Fix numerous issues to make ShellCheck pass across `bin/*` and
`conf/uno.conf`
* Add GitHub Actions and CI script for automated ShellCheck testing
* Add missing checksum for latest Hadoops still supported by Accumulo
* Fix parameter passing for `--no-deps` flag
* Build SNAPSHOT version with correct version of Guava for Hadoop 3.1
and later
* Make some long if statements into more concise `&&` syntax for
readability
* Remove some unnecessary quoting
* Rely on return code for pgrep for many statements that were previously
relying on its output unnecessarily
* Fix zk dir pattern
* Fix return codes and make rm safer
* Fix ZK capitalization
* Overhaul some of the scripts:
Consolidate some of the scripts into a commands.sh file and use return
statements instead of exit statements, as appropriate, for more reliable
exit behavior for functions.
---
.github/workflows/shellcheck.yaml | 37 ++++
README.md | 22 +--
bin/impl/commands.sh | 343 ++++++++++++++++++++++++++++++++++++++
bin/impl/fetch.sh | 83 ++++-----
bin/impl/install.sh | 46 -----
bin/impl/install/accumulo.sh | 22 +--
bin/impl/install/fluo-yarn.sh | 12 +-
bin/impl/install/fluo.sh | 19 +--
bin/impl/install/hadoop.sh | 28 ++--
bin/impl/install/zookeeper.sh | 10 +-
bin/impl/kill.sh | 36 ----
bin/impl/load-env.sh | 59 ++-----
bin/impl/print-env.sh | 49 ------
bin/impl/run.sh | 49 ------
bin/impl/run/accumulo.sh | 9 +-
bin/impl/run/fluo-yarn.sh | 7 +-
bin/impl/run/fluo.sh | 8 +-
bin/impl/run/hadoop.sh | 4 +
bin/impl/run/zookeeper.sh | 8 +-
bin/impl/setup.sh | 50 ------
bin/impl/start.sh | 96 -----------
bin/impl/status.sh | 43 -----
bin/impl/stop.sh | 81 ---------
bin/impl/util.sh | 111 ++++++------
bin/impl/version.sh | 46 -----
bin/uno | 94 +++--------
conf/checksums | 5 +
conf/uno.conf | 14 +-
contrib/run-shellcheck | 12 ++
29 files changed, 623 insertions(+), 780 deletions(-)
diff --git a/.github/workflows/shellcheck.yaml b/.github/workflows/shellcheck.yaml
new file mode 100644
index 0000000..8343193
--- /dev/null
+++ b/.github/workflows/shellcheck.yaml
@@ -0,0 +1,37 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+name: ShellCheck
+
+on:
+ push:
+ branches: [ '*' ]
+ pull_request:
+ branches: [ '*' ]
+
+jobs:
+ shellcheck:
+ name: ShellCheck
+ timeout-minutes: 3
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: Running shellcheck on bin/** and conf/uno.conf
+ run: contrib/run-shellcheck
+
diff --git a/README.md b/README.md
index fe297dc..ab89908 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
![Uno][logo]
---
-[![Apache License][li]][ll]
+[![ShellCheck][ci_img]][ci_link] [![Apache License][li]][ll]
**Uno automates setting up [Apache Accumulo][accumulo] or [Apache Fluo][fluo] (and their dependencies) on a single machine.**
@@ -188,17 +188,19 @@ applications or follow the instructions starting at the [Configure a Fluo applic
section of the Fluo install instructions. These instructions will guide you through the process of
configuring, initializing, and starting your application.
-[fluo]: http://fluo.apache.org/
-[accumulo]: http://accumulo.apache.org/
-[zookeeper]: http://zookeeper.apache.org/
-[hadoop]: http://hadoop.apache.org/
-[mirrors]: http://www.apache.org/dyn/closer.cgi
-[Webindex]: https://github.com/apache/fluo-examples/tree/main/webindex
+[Accumulo Proxy]: https://github.com/apache/accumulo-proxy
+[Muchos]: https://github.com/apache/fluo-muchos
[Phrasecount]: https://github.com/apache/fluo-examples/tree/main/phrasecount
+[Webindex]: https://github.com/apache/fluo-examples/tree/main/webindex
+[accumulo]: https://accumulo.apache.org/
+[ci_img]: https://github.com/apache/fluo-uno/workflows/ShellCheck/badge.svg
+[ci_link]: https://github.com/apache/fluo-uno/actions
[configure]: https://github.com/apache/fluo/blob/main/docs/install.md#configure-a-fluo-application
-[li]: http://img.shields.io/badge/license-ASL-blue.svg
+[fluo]: https://fluo.apache.org/
+[hadoop]: https://hadoop.apache.org/
+[li]: https://img.shields.io/badge/license-Apache%202.0-blue.svg
[ll]: https://github.com/apache/fluo-uno/blob/main/LICENSE
[logo]: contrib/uno-logo.png
-[Muchos]: https://github.com/apache/fluo-muchos
+[mirrors]: https://www.apache.org/dyn/closer.cgi
[ssh-docs]: https://hadoop.apache.org/docs/r3.3.0/hadoop-project-dist/hadoop-common/SingleCluster.html#Setup_passphraseless_ssh
-[Accumulo Proxy]: https://github.com/apache/accumulo-proxy
+[zookeeper]: https://zookeeper.apache.org/
diff --git a/bin/impl/commands.sh b/bin/impl/commands.sh
new file mode 100755
index 0000000..1a9df24
--- /dev/null
+++ b/bin/impl/commands.sh
@@ -0,0 +1,343 @@
+#! /usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+: "${bin:?"'\$bin' should be set by 'uno' script"}"
+
+# shellcheck source=bin/impl/util.sh
+source "$bin"/impl/util.sh
+
+function uno_install_main() {
+ case "$1" in
+ accumulo|hadoop|fluo|fluo-yarn|zookeeper)
+ if install_component "$@"; then
+ echo "Installation of $1 complete."
+ else
+ echo "Installation of $1 failed!"
+ return 1
+ fi
+ ;;
+ *)
+ print_cmd_usage 'install' 'Installs'
+ return 1
+ ;;
+ esac
+}
+
+function uno_run_main() {
+ [[ -n $LOGS_DIR ]] && rm -f "$LOGS_DIR"/setup/*.{out,err}
+ echo "Running $1 (detailed logs in $LOGS_DIR/setup)..."
+ save_console_fd
+ case "$1" in
+ accumulo|hadoop|fluo|fluo-yarn|zookeeper)
+ if run_component "$@"; then
+ echo "Running $1 complete."
+ else
+ echo "Running $1 failed!"
+ return 1
+ fi
+ ;;
+ *)
+ print_cmd_usage 'run' 'Runs'
+ return 1
+ ;;
+ esac
+}
+
+function uno_setup_main() {
+ [[ -n $LOGS_DIR ]] && rm -f "$LOGS_DIR"/setup/*.{out,err}
+ echo "Setting up $1 (detailed logs in $LOGS_DIR/setup)..."
+ save_console_fd
+ case "$1" in
+ accumulo|hadoop|fluo|fluo-yarn|zookeeper)
+ if setup_component "$@"; then
+ echo "Setup of $1 complete."
+ else
+ echo "Setup of $1 failed!"
+ return 1
+ fi
+ ;;
+ *)
+ print_cmd_usage 'setup' 'Sets up'
+ return 1
+ ;;
+ esac
+}
+
+function uno_kill_main() {
+ pkill -f fluo\\.yarn
+ pkill -f MiniFluo
+ pkill -f accumulo\\.start
+ pkill -f hadoop\\.hdfs
+ pkill -f hadoop\\.yarn
+ pkill -f QuorumPeerMain
+ [[ -d $SPARK_HOME ]] && pkill -f org\\.apache\\.spark\\.deploy\\.history\\.HistoryServer
+ [[ -d $INFLUXDB_HOME ]] && pkill -f influxdb
+ [[ -d $GRAFANA_HOME ]] && pkill -f grafana-server
+ [[ -d $PROXY_HOME ]] && pkill -f accumulo\\.proxy\\.Proxy
+ return 0
+}
+
+function uno_env_main() {
+ if [[ -n $1 && $1 != '--vars' && $1 != '--paths' ]]; then
+ echo "Unrecognized env option '$1'"
+ return 1
+ fi
+ if [[ -z $1 || $1 == '--vars' ]]; then
+ echo "export HADOOP_HOME=\"$HADOOP_HOME\""
+ [[ $HADOOP_VERSION =~ ^2\..*$ ]] && echo "export HADOOP_PREFIX=\"$HADOOP_HOME\""
+ echo "export HADOOP_CONF_DIR=\"$HADOOP_CONF_DIR\""
+ echo "export ZOOKEEPER_HOME=\"$ZOOKEEPER_HOME\""
+ echo "export SPARK_HOME=\"$SPARK_HOME\""
+ echo "export ACCUMULO_HOME=\"$ACCUMULO_HOME\""
+ echo "export FLUO_HOME=\"$FLUO_HOME\""
+ echo "export FLUO_YARN_HOME=\"$FLUO_YARN_HOME\""
+ fi
+ if [[ -z $1 || $1 == '--paths' ]]; then
+ echo -n "export PATH=\"\$PATH:$UNO_HOME/bin:$HADOOP_HOME/bin:$ZOOKEEPER_HOME/bin:$ACCUMULO_HOME/bin"
+ [[ -d "$SPARK_HOME" ]] && echo -n ":$SPARK_HOME/bin"
+ [[ -d "$FLUO_HOME" ]] && echo -n ":$FLUO_HOME/bin"
+ [[ -d "$FLUO_YARN_HOME" ]] && echo -n ":$FLUO_YARN_HOME/bin"
+ [[ -d "$INFLUXDB_HOME" ]] && echo -n ":$INFLUXDB_HOME/bin"
+ [[ -d "$GRAFANA_HOME" ]] && echo -n ":$GRAFANA_HOME/bin"
+ echo '"'
+ fi
+}
+
+function uno_version_main() {
+ case "$1" in
+ hadoop) echo -n "$HADOOP_VERSION" ;;
+ zookeeper) echo -n "$ZOOKEEPER_VERSION" ;;
+ accumulo) echo -n "$ACCUMULO_VERSION" ;;
+ fluo) echo -n "$FLUO_VERSION" ;;
+ fluo-yarn) echo -n "$FLUO_YARN_VERSION" ;;
+ spark) echo -n "$SPARK_VERSION" ;;
+ influxdb) echo -n "$INFLUXDB_VERSION" ;;
+ grafana) echo -n "$GRAFANA_VERSION" ;;
+ *)
+ echo "You must specify a valid depedency (i.e hadoop, zookeeper, accumulo, etc)"
+ return 1
+ ;;
+ esac
+}
+
+function uno_start_main() {
+ case "$1" in
+ accumulo)
+ check_dirs ACCUMULO_HOME || return 1
+
+ if [[ $2 != '--no-deps' ]]; then
+ check_dirs ZOOKEEPER_HOME HADOOP_HOME || return 1
+
+ tmp="$(pgrep -f QuorumPeerMain | tr '\n' ' ')"
+ if [[ -z $tmp ]]; then
+ "$ZOOKEEPER_HOME"/bin/zkServer.sh start
+ else echo "ZooKeeper already running at: $tmp"
+ fi
+
+ tmp="$(pgrep -f hadoop\\.hdfs | tr '\n' ' ')"
+ if [[ -z $tmp ]]; then
+ "$HADOOP_HOME"/sbin/start-dfs.sh
+ else echo "Hadoop DFS already running at: $tmp"
+ fi
+
+ tmp="$(pgrep -f hadoop\\.yarn | tr '\n' ' ')"
+ if [[ -z $tmp ]]; then
+ "$HADOOP_HOME"/sbin/start-yarn.sh
+ else echo "Hadoop Yarn already running at: $tmp"
+ fi
+ fi
+
+ tmp="$(pgrep -f accumulo\\.start | tr '\n' ' ')"
+ if [[ -z $tmp ]]; then
+ if [[ $ACCUMULO_VERSION =~ ^1\..*$ ]]; then
+ "$ACCUMULO_HOME"/bin/start-all.sh
+ else
+ "$ACCUMULO_HOME"/bin/accumulo-cluster start
+ fi
+ else echo "Accumulo already running at: $tmp"
+ fi
+ ;;
+ hadoop)
+ check_dirs HADOOP_HOME || return 1
+
+ tmp="$(pgrep -f hadoop\\.hdfs | tr '\n' ' ')"
+ if [[ -z $tmp ]]; then
+ "$HADOOP_HOME"/sbin/start-dfs.sh
+ else echo "Hadoop DFS already running at: $tmp"
+ fi
+
+ tmp="$(pgrep -f hadoop\\.yarn | tr '\n' ' ')"
+ if [[ -z $tmp ]]; then
+ "$HADOOP_HOME"/sbin/start-yarn.sh
+ else echo "Hadoop Yarn already running at: $tmp"
+ fi
+ ;;
+ zookeeper)
+ check_dirs ZOOKEEPER_HOME || return 1
+
+ tmp="$(pgrep -f QuorumPeerMain | tr '\n' ' ')"
+ if [[ -z $tmp ]]; then
+ "$ZOOKEEPER_HOME"/bin/zkServer.sh start
+ else echo "ZooKeeper already running at: $tmp"
+ fi
+ ;;
+ *)
+ cat <<EOF
+Usage: uno start <component> [--no-deps]
+
+Possible components:
+
+ accumulo Start Apache Accumulo plus dependencies: Hadoop, ZooKeeper
+ hadoop Start Apache Hadoop
+ zookeeper Start Apache ZooKeeper
+
+Options:
+ --no-deps Dependencies will start unless this option is specified. Only works for accumulo component.
+EOF
+ return 1
+ ;;
+ esac
+}
+
+function uno_stop_main() {
+ case "$1" in
+ accumulo)
+ check_dirs ACCUMULO_HOME || return 1
+
+ if pgrep -f accumulo\\.start >/dev/null; then
+ if [[ $ACCUMULO_VERSION =~ ^1\..*$ ]]; then
+ "$ACCUMULO_HOME"/bin/stop-all.sh
+ else
+ "$ACCUMULO_HOME"/bin/accumulo-cluster stop
+ fi
+ fi
+
+ if [[ $2 != "--no-deps" ]]; then
+ check_dirs ZOOKEEPER_HOME HADOOP_HOME || return 1
+ pgrep -f hadoop\\.yarn >/dev/null && "$HADOOP_HOME"/sbin/stop-yarn.sh
+ pgrep -f hadoop\\.hdfs >/dev/null && "$HADOOP_HOME"/sbin/stop-dfs.sh
+ pgrep -f QuorumPeerMain >/dev/null && "$ZOOKEEPER_HOME"/bin/zkServer.sh stop
+ fi
+ ;;
+ hadoop)
+ check_dirs HADOOP_HOME || return 1
+ pgrep -f hadoop\\.yarn >/dev/null && "$HADOOP_HOME"/sbin/stop-yarn.sh
+ pgrep -f hadoop\\.hdfs >/dev/null && "$HADOOP_HOME"/sbin/stop-dfs.sh
+ ;;
+ zookeeper)
+ check_dirs ZOOKEEPER_HOME || return 1
+ pgrep -f QuorumPeerMain >/dev/null && "$ZOOKEEPER_HOME"/bin/zkServer.sh stop
+ ;;
+ *)
+ cat <<EOF
+Usage: uno stop <component> [--no-deps]
+
+Possible components:
+
+ accumulo Stop Apache Accumulo plus dependencies: Hadoop, ZooKeeper
+ hadoop Stop Apache Hadoop
+ zookeeper Stop Apache ZooKeeper
+
+Options:
+ --no-deps Dependencies will stop unless this option is specified. Only works for accumulo component.
+EOF
+ return 1
+ ;;
+ esac
+}
+
+function uno_status_main() {
+ # TODO this should be converted to using pgrep
+ # shellcheck disable=SC2009
+ atmp="$(ps -ef | grep accumulo\\.start | awk '{print $NF "(" $2 ")"}' | tr '\n' ' ')"
+ # shellcheck disable=SC2009
+ htmp="$(ps -ef | grep -e hadoop\\.hdfs -e hadoop\\.yarn | tr '.' ' ' | awk '{print $NF "(" $2 ")"}' | tr '\n' ' ')"
+ ztmp="$(pgrep -f QuorumPeerMain | awk '{print "zoo(" $1 ")"}' | tr '\n' ' ')"
+
+ if [[ -n $atmp || -n $ztmp || -n $htmp ]]; then
+ [[ -n $atmp ]] && echo "Accumulo processes running: $atmp"
+ [[ -n $ztmp ]] && echo "ZooKeeper processes running: $ztmp"
+ [[ -n $htmp ]] && echo "Hadoop processes running: $htmp"
+ else
+ echo "No components runnning."
+ fi
+}
+
+function uno_ashell_main() {
+ check_dirs ACCUMULO_HOME || return 1
+ "$ACCUMULO_HOME"/bin/accumulo shell -u "$ACCUMULO_USER" -p "$ACCUMULO_PASSWORD" "$@"
+}
+
+function uno_zk_main() {
+ check_dirs ZOOKEEPER_HOME || return 1
+ "$ZOOKEEPER_HOME"/bin/zkCli.sh "$@"
+}
+
+function uno_fetch_main() {
+ hash mvn 2>/dev/null || { echo >&2 "Maven must be installed & on PATH. Aborting."; return 1; }
+ hash wget 2>/dev/null || { echo >&2 "wget must be installed & on PATH. Aborting."; return 1; }
+ if [[ "$1" == "all" ]]; then
+ "$bin"/impl/fetch.sh fluo
+ else
+ "$bin"/impl/fetch.sh "$1" "$2"
+ fi
+}
+
+function uno_wipe_main() {
+ local yn
+ uno_kill_main
+ read -r -p "Are you sure you want to wipe '$INSTALL'? " yn
+ case "$yn" in
+ [yY]|[yY][eE][sS])
+ if [[ -d $INSTALL && $INSTALL != '/' ]]; then
+ echo "removing $INSTALL"
+ rm -rf "${INSTALL:?}"
+ fi
+ ;;
+ *)
+ exit
+ ;;
+ esac
+}
+
+function uno_help_main() {
+ cat <<EOF
+Usage: uno <command> (<argument>)
+
+Possible commands:
+
+ fetch <component> Fetches binary tarballs of component and it dependencies by either building or downloading
+ the tarball (as configured by uno.conf). Run 'uno fetch all' to fetch all binary tarballs.
+ install <component> Installs component and its dependencies (clearing any existing data)
+ run <component> Runs component and its dependencies (clearing any existing data)
+ setup <component> Installs and runs component and its dependencies (clearing any existing data)
+ start <component> Start ZooKeeper, Hadoop, Accumulo, if not running.
+ stop <component> Stop Accumulo, Hadoop, ZooKeeper, if running.
+ status Check if Accumulo, Hadoop, or Zookeeper are running.
+ kill Kills all processes
+ ashell Runs the Accumulo shell
+ zk Connects to ZooKeeper CLI
+ env Prints out shell configuration for PATH and common environment variables.
+ Add '--paths' or '--vars' command to limit what is printed.
+ version <dep> Prints out configured version for dependency
+ wipe Kills all processes and clears install directory
+
+Possible components: accumulo, fluo, fluo-yarn, hadoop, zookeeper
+EOF
+}
+
+# commands.sh
diff --git a/bin/impl/fetch.sh b/bin/impl/fetch.sh
index 1cebec8..d373906 100755
--- a/bin/impl/fetch.sh
+++ b/bin/impl/fetch.sh
@@ -15,6 +15,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# shellcheck source=bin/impl/util.sh
source "$UNO_HOME"/bin/impl/util.sh
function fetch_hadoop() {
@@ -26,32 +27,33 @@ function fetch_zookeeper() {
}
function fetch_accumulo() {
- if [[ $1 != "--no-deps" ]]; then
- fetch_hadoop
- fetch_zookeeper
- fi
+ [[ $1 != '--no-deps' ]] && fetch_hadoop && fetch_zookeeper
if [[ -n "$ACCUMULO_REPO" ]]; then
declare -a maven_args=(-DskipTests -DskipFormat)
- if [[ "${HADOOP_VERSION}" = 3.* ]]; then
+ if [[ $HADOOP_VERSION =~ 3\..* ]]; then
maven_args=("${maven_args[@]}" '-Dhadoop.profile=3')
+ if ! [[ $HADOOP_VERSION =~ 3\.0\..* ]]; then
+ # Hadoop 3.1 and later require a newer version of Guava
+ # than what Accumulo builds by default
+ maven_args=("${maven_args[@]}" '-Dguava.version=27.0-jre')
+ fi
fi
- rm -f "$DOWNLOADS/$ACCUMULO_TARBALL"
- pushd .
- cd "$ACCUMULO_REPO"
- mvn -V -e clean package "${maven_args[@]}"
+ rm -f "${DOWNLOADS:?}/${ACCUMULO_TARBALL:?}"
+ (cd "$ACCUMULO_REPO" && mvn -V -e clean package "${maven_args[@]}")
accumulo_built_tarball=$ACCUMULO_REPO/assemble/target/$ACCUMULO_TARBALL
if [[ ! -f "$accumulo_built_tarball" ]]; then
- echo
- echo "The following file does not exist :"
- echo " $accumulo_built_tarball"
- echo "after building from :"
- echo " ACCUMULO_REPO=$ACCUMULO_REPO"
- echo "ensure ACCUMULO_VERSION=$ACCUMULO_VERSION is correct."
- echo
+ cat <<EOF
+
+The following file does not exist :
+ $accumulo_built_tarball
+after building from :
+ ACCUMULO_REPO=$ACCUMULO_REPO
+ensure ACCUMULO_VERSION=$ACCUMULO_VERSION is correct.
+
+EOF
exit 1
fi
- popd
cp "$accumulo_built_tarball" "$DOWNLOADS"/
else
download_apache "accumulo/$ACCUMULO_VERSION" "$ACCUMULO_TARBALL" "$ACCUMULO_HASH"
@@ -59,14 +61,10 @@ function fetch_accumulo() {
}
function fetch_fluo() {
- if [[ $1 != "--no-deps" ]]; then
- fetch_accumulo
- fi
+ [[ $1 != '--no-deps' ]] && fetch_accumulo
if [[ -n "$FLUO_REPO" ]]; then
- rm -f "$DOWNLOADS/$FLUO_TARBALL"
- cd "$FLUO_REPO"
- mvn -V -e clean package -DskipTests -Dformatter.skip
-
+ rm -f "${DOWNLOADS:?}/${FLUO_TARBALL:?}"
+ (cd "$FLUO_REPO" && mvn -V -e clean package -DskipTests -Dformatter.skip)
fluo_built_tarball=$FLUO_REPO/modules/distribution/target/$FLUO_TARBALL
if [[ ! -f "$fluo_built_tarball" ]]; then
echo "The tarball $fluo_built_tarball does not exist after building from the FLUO_REPO=$FLUO_REPO"
@@ -83,7 +81,7 @@ function fetch_fluo() {
# Determine best apache mirror to use
apache_mirror=$(curl -sk https://apache.org/mirrors.cgi?as_json | grep preferred | cut -d \" -f 4)
-if [ -z "$apache_mirror" ]; then
+if [[ -z $apache_mirror ]]; then
echo "Failed querying apache.org for best download mirror!"
echo "Fetch can only verify existing downloads or build Accumulo/Fluo tarballs from a repo."
fi
@@ -96,14 +94,10 @@ fluo)
fetch_fluo "$2"
;;
fluo-yarn)
- if [[ $2 != "--no-deps" ]]; then
- fetch_fluo
- fi
- if [[ -n "$FLUO_YARN_REPO" ]]; then
- rm -f "$DOWNLOADS/$FLUO_YARN_TARBALL"
- cd "$FLUO_YARN_REPO"
- mvn -V -e clean package -DskipTests -Dformatter.skip
-
+ [[ $2 != '--no-deps' ]] && fetch_fluo
+ if [[ -n $FLUO_YARN_REPO ]]; then
+ rm -f "${DOWNLOADS:?}/${FLUO_YARN_TARBALL:?}"
+ (cd "$FLUO_YARN_REPO" && mvn -V -e clean package -DskipTests -Dformatter.skip)
built_tarball=$FLUO_YARN_REPO/target/$FLUO_YARN_TARBALL
if [[ ! -f "$built_tarball" ]]; then
echo "The tarball $built_tarball does not exist after building from the FLUO_YARN_REPO=$FLUO_YARN_REPO"
@@ -122,13 +116,20 @@ zookeeper)
fetch_zookeeper
;;
*)
- echo "Usage: uno fetch <component>"
- echo -e "\nPossible components:\n"
- echo " accumulo Downloads Accumulo, Hadoop & ZooKeeper. Builds Accumulo if repo set in uno.conf"
- echo " fluo Downloads Fluo, Accumulo, Hadoop & ZooKeeper. Builds Fluo or Accumulo if repo set in uno.conf"
- echo " hadoop Downloads Hadoop"
- echo " zookeeper Downloads ZooKeeper"
- echo "Options:"
- echo " --no-deps Dependencies will be fetched unless this option is specified. Only works for fluo & accumulo components."
+ cat <<EOF
+Usage: uno fetch <component>
+
+Possible components:
+
+ accumulo Downloads Accumulo, Hadoop & ZooKeeper. Builds Accumulo if repo set in uno.conf
+ fluo Downloads Fluo, Accumulo, Hadoop & ZooKeeper. Builds Fluo or Accumulo if repo set in uno.conf
+ hadoop Downloads Hadoop
+ zookeeper Downloads ZooKeeper
+
+Options:
+ --no-deps Dependencies will be fetched unless this option is specified. Only works for fluo & accumulo components.
+EOF
exit 1
esac
+
+# fetch.sh
diff --git a/bin/impl/install.sh b/bin/impl/install.sh
deleted file mode 100755
index 29af8b6..0000000
--- a/bin/impl/install.sh
+++ /dev/null
@@ -1,46 +0,0 @@
-#! /usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source "$UNO_HOME"/bin/impl/util.sh
-
-case "$1" in
- accumulo|fluo|fluo-yarn)
- install_component "$1" "$2"
- ;;
- hadoop|zookeeper)
- install_component "$1"
- ;;
- *)
- echo "Usage: uno install <component> [--no-deps]"
- echo -e "\nPossible components:\n"
- echo " accumulo Installs Apache Accumulo and its dependencies (Hadoop & ZooKeeper)"
- echo " fluo Installs Apache Fluo and its dependencies (Accumulo, Hadoop, & ZooKeeper)"
- echo " fluo-yarn Installs Apache Fluo YARN"
- echo " hadoop Installs Apache Hadoop"
- echo -e " zookeeper Installs Apache ZooKeeper\n"
- echo "Options:"
- echo " --no-deps Dependencies will be setup unless this option is specified. Only works for fluo & accumulo components."
- exit 1
- ;;
-esac
-
-if [[ "$?" == 0 ]]; then
- echo "Install complete."
-else
- echo "Install failed!"
- false
-fi
diff --git a/bin/impl/install/accumulo.sh b/bin/impl/install/accumulo.sh
index 9c98268..24aa38a 100755
--- a/bin/impl/install/accumulo.sh
+++ b/bin/impl/install/accumulo.sh
@@ -15,6 +15,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# shellcheck source=bin/impl/util.sh
source "$UNO_HOME"/bin/impl/util.sh
pkill -f accumulo.start
@@ -23,19 +24,13 @@ pkill -f accumulo.start
set -e
trap 'echo "[ERROR] Error occurred at $BASH_SOURCE:$LINENO command: $BASH_COMMAND"' ERR
-if [[ -z "$ACCUMULO_REPO" ]]; then
- verify_exist_hash "$ACCUMULO_TARBALL" "$ACCUMULO_HASH"
-fi
-
-if [[ $1 != "--no-deps" ]]; then
- install_component Hadoop
- install_component ZooKeeper
-fi
+[[ -z $ACCUMULO_REPO ]] && verify_exist_hash "$ACCUMULO_TARBALL" "$ACCUMULO_HASH"
+[[ $1 != '--no-deps' ]] && install_component hadoop && install_component zookeeper
print_to_console "Installing Apache Accumulo $ACCUMULO_VERSION at $ACCUMULO_HOME"
-rm -rf "$INSTALL"/accumulo-*
-rm -f "$ACCUMULO_LOG_DIR"/*
+rm -rf "${INSTALL:?}"/accumulo-*
+rm -f "${ACCUMULO_LOG_DIR:?}"/*
mkdir -p "$ACCUMULO_LOG_DIR"
tar xzf "$DOWNLOADS/$ACCUMULO_TARBALL" -C "$INSTALL"
@@ -62,6 +57,8 @@ else
$SED "s#auth[.]principal=#auth.principal=$ACCUMULO_USER#" "$conf"/accumulo-client.properties
$SED "s#auth[.]token=#auth.token=$ACCUMULO_PASSWORD#" "$conf"/accumulo-client.properties
if [[ $ACCUMULO_VERSION =~ ^2\.0\.0.*$ ]]; then
+ # Ignore false positive; we actually want the literal '${ZOOKEEPER_HOME}' and not its current value
+ # shellcheck disable=SC2016
$SED 's#:[$][{]ZOOKEEPER_HOME[}]/[*]:#:${ZOOKEEPER_HOME}/*:${ZOOKEEPER_HOME}/lib/*:#' "$conf"/accumulo-env.sh
fi
fi
@@ -87,10 +84,13 @@ $SED "s#ACCUMULO_INSTANCE#$ACCUMULO_INSTANCE#" "$it_props"
$SED "s#HADOOP_CONF_DIR#$HADOOP_CONF_DIR#" "$it_props"
$SED "s#ACCUMULO_HOME#$ACCUMULO_HOME#" "$it_props"
-if [[ "$ACCUMULO_USE_NATIVE_MAP" == "true" ]]; then
+if [[ $ACCUMULO_USE_NATIVE_MAP == 'true' ]]; then
if [[ $ACCUMULO_VERSION =~ ^1\..*$ ]]; then
"$ACCUMULO_HOME"/bin/build_native_library.sh
else
"$ACCUMULO_HOME"/bin/accumulo-util build-native
fi
fi
+
+true
+# accumulo.sh
diff --git a/bin/impl/install/fluo-yarn.sh b/bin/impl/install/fluo-yarn.sh
index bbf7a71..3506778 100755
--- a/bin/impl/install/fluo-yarn.sh
+++ b/bin/impl/install/fluo-yarn.sh
@@ -15,17 +15,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# shellcheck source=bin/impl/util.sh
source "$UNO_HOME"/bin/impl/util.sh
# stop if any command fails
set -e
trap 'echo "[ERROR] Error occurred at $BASH_SOURCE:$LINENO command: $BASH_COMMAND"' ERR
-if [[ -z "$FLUO_YARN_REPO" ]]; then
- verify_exist_hash "$FLUO_YARN_TARBALL" "$FLUO_YARN_HASH"
-fi
+[[ -z $FLUO_YARN_REPO ]] && verify_exist_hash "$FLUO_YARN_TARBALL" "$FLUO_YARN_HASH"
-if [[ -f "$DOWNLOADS/$FLUO_YARN_TARBALL" ]]; then
+if [[ -f $DOWNLOADS/$FLUO_YARN_TARBALL ]]; then
print_to_console "WARNING: Apache Fluo YARN launcher tarball '$FLUO_YARN_TARBALL' was not found in $DOWNLOADS."
print_to_console "Apache Fluo YARN launcher will not be set up!"
fi
@@ -39,7 +38,7 @@ pkill -f twill.launcher
set -e
trap 'echo "[ERROR] Error occurred at $BASH_SOURCE:$LINENO command: $BASH_COMMAND"' ERR
-rm -rf "$INSTALL"/fluo-yarn*
+rm -rf "${INSTALL:?}"/fluo-yarn*
tar xzf "$DOWNLOADS/$FLUO_YARN_TARBALL" -C "$INSTALL"/
@@ -56,3 +55,6 @@ $SED "s#ZOOKEEPER_HOME=.*#ZOOKEEPER_HOME=$ZOOKEEPER_HOME#g" "$FLUO_YARN_HOME"/co
"$FLUO_YARN_HOME"/lib/fetch.sh
stty sane
+
+true
+# fluo-yarn.sh
diff --git a/bin/impl/install/fluo.sh b/bin/impl/install/fluo.sh
index c577958..ab825e8 100755
--- a/bin/impl/install/fluo.sh
+++ b/bin/impl/install/fluo.sh
@@ -15,21 +15,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# shellcheck source=bin/impl/util.sh
source "$UNO_HOME"/bin/impl/util.sh
# stop if any command fails
set -e
trap 'echo "[ERROR] Error occurred at $BASH_SOURCE:$LINENO command: $BASH_COMMAND"' ERR
-if [[ -z "$FLUO_REPO" ]]; then
- verify_exist_hash "$FLUO_TARBALL" "$FLUO_HASH"
-fi
-
-if [[ $1 != "--no-deps" ]]; then
- install_component Accumulo
-fi
+[[ -z $FLUO_REPO ]] && verify_exist_hash "$FLUO_TARBALL" "$FLUO_HASH"
+[[ $1 != '--no-deps' ]] && install_component accumulo
-if [[ -f "$DOWNLOADS/$FLUO_TARBALL" ]]; then
+if [[ -f $DOWNLOADS/$FLUO_TARBALL ]]; then
print_to_console "Setting up Apache Fluo at $FLUO_HOME"
# Don't stop if pkills fail
set +e
@@ -40,7 +36,7 @@ if [[ -f "$DOWNLOADS/$FLUO_TARBALL" ]]; then
set -e
trap 'echo "[ERROR] Error occurred at $BASH_SOURCE:$LINENO command: $BASH_COMMAND"' ERR
- rm -rf "$INSTALL"/fluo-[0-9]*
+ rm -rf "${INSTALL:?}"/fluo-[0-9]*
tar xzf "$DOWNLOADS/$FLUO_TARBALL" -C "$INSTALL"/
@@ -60,7 +56,7 @@ if [[ -f "$DOWNLOADS/$FLUO_TARBALL" ]]; then
$SED "s/.*fluo.worker.num.threads=.*/fluo.worker.num.threads=$FLUO_WORKER_THREADS/g" "$app_props"
fi
- if [[ -f "$fluo_props" ]]; then
+ if [[ -f $fluo_props ]]; then
# This file was deprecated in Fluo 1.2.0 and removed in Fluo 2.0. Only update it if it exists.
$SED "s#fluo.admin.hdfs.root=.*#fluo.admin.hdfs.root=hdfs://$UNO_HOST:8020#g" "$fluo_props"
$SED "s/fluo.client.accumulo.instance=/fluo.client.accumulo.instance=$ACCUMULO_INSTANCE/g" "$fluo_props"
@@ -82,3 +78,6 @@ else
print_to_console "WARNING: Apache Fluo tarball '$FLUO_TARBALL' was not found in $DOWNLOADS."
print_to_console "Apache Fluo will not be set up!"
fi
+
+true
+# fluo.sh
diff --git a/bin/impl/install/hadoop.sh b/bin/impl/install/hadoop.sh
index 985bdd7..a6d7f9d 100755
--- a/bin/impl/install/hadoop.sh
+++ b/bin/impl/install/hadoop.sh
@@ -15,6 +15,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# shellcheck source=bin/impl/util.sh
source "$UNO_HOME"/bin/impl/util.sh
pkill -f hadoop.hdfs
@@ -28,20 +29,16 @@ verify_exist_hash "$HADOOP_TARBALL" "$HADOOP_HASH"
print_to_console "Installing Apache Hadoop $HADOOP_VERSION at $HADOOP_HOME"
-rm -rf "$INSTALL"/hadoop-*
-rm -rf "$HADOOP_LOG_DIR"/*
-rm -rf "$DATA_DIR"/hadoop
+rm -rf "${INSTALL:?}"/hadoop-*
+rm -rf "${HADOOP_LOG_DIR:?}"/* # use :? to avoid removing /* if var is empty string
+rm -rf "${DATA_DIR:?}"/hadoop
mkdir -p "$HADOOP_LOG_DIR"
tar xzf "$DOWNLOADS/$HADOOP_TARBALL" -C "$INSTALL"
hadoop_conf="$HADOOP_HOME"/etc/hadoop
cp "$UNO_HOME"/conf/hadoop/common/* "$hadoop_conf/"
-if [[ $HADOOP_VERSION =~ ^2\..*$ ]]; then
- cp "$UNO_HOME"/conf/hadoop/2/* "$hadoop_conf/"
-else
- cp "$UNO_HOME"/conf/hadoop/3/* "$hadoop_conf/"
-fi
+cp "$UNO_HOME/conf/hadoop/${HADOOP_VERSION:0:1}"/* "$hadoop_conf/"
if [[ $HADOOP_VERSION =~ ^3\.[012]\..*$ ]]; then
# need the following for Java 11, because Hadoop doesn't include it until 3.3
@@ -59,9 +56,12 @@ $SED "s#HADOOP_LOG_DIR#$HADOOP_LOG_DIR#g" "$hadoop_conf/yarn-site.xml"
$SED "s#YARN_NM_MEM_MB#$YARN_NM_MEM_MB#g" "$hadoop_conf/yarn-site.xml"
$SED "s#YARN_NM_CPU_VCORES#$YARN_NM_CPU_VCORES#g" "$hadoop_conf/yarn-site.xml"
-echo "export JAVA_HOME=$JAVA_HOME" >> "$hadoop_conf/hadoop-env.sh"
-echo "export HADOOP_LOG_DIR=$HADOOP_LOG_DIR" >> "$hadoop_conf/hadoop-env.sh"
-echo "export HADOOP_MAPRED_HOME=$HADOOP_HOME" >> "$hadoop_conf/hadoop-env.sh"
-if [[ $HADOOP_VERSION =~ ^2\..*$ ]]; then
- echo "export YARN_LOG_DIR=$HADOOP_LOG_DIR" >> "$hadoop_conf/yarn-env.sh"
-fi
+{
+ echo "export JAVA_HOME=\"$JAVA_HOME\""
+ echo "export HADOOP_LOG_DIR=\"$HADOOP_LOG_DIR\""
+ echo "export HADOOP_MAPRED_HOME=\"$HADOOP_HOME\""
+} >> "$hadoop_conf/hadoop-env.sh"
+[[ $HADOOP_VERSION =~ ^2\..*$ ]] && echo "export YARN_LOG_DIR=$HADOOP_LOG_DIR" >> "$hadoop_conf/yarn-env.sh"
+
+true
+# hadoop.sh
diff --git a/bin/impl/install/zookeeper.sh b/bin/impl/install/zookeeper.sh
index 9c5e285..1ec9ad1 100755
--- a/bin/impl/install/zookeeper.sh
+++ b/bin/impl/install/zookeeper.sh
@@ -15,6 +15,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# shellcheck source=bin/impl/util.sh
source "$UNO_HOME"/bin/impl/util.sh
pkill -f QuorumPeerMain
@@ -27,12 +28,15 @@ verify_exist_hash "$ZOOKEEPER_TARBALL" "$ZOOKEEPER_HASH"
print_to_console "Installing Apache ZooKeeper $ZOOKEEPER_VERSION at $ZOOKEEPER_HOME"
-rm -rf "$INSTALL"/zookeeper-*
-rm -f "$ZOO_LOG_DIR"/*
-rm -rf "$DATA_DIR"/zookeeper
+rm -rf "${INSTALL:?}"/*zookeeper-*
+rm -f "${ZOO_LOG_DIR:?}"/*
+rm -rf "${DATA_DIR:?}"/zookeeper
mkdir -p "$ZOO_LOG_DIR"
tar xzf "$DOWNLOADS/$ZOOKEEPER_TARBALL" -C "$INSTALL"
cp "$UNO_HOME"/conf/zookeeper/* "$ZOOKEEPER_HOME"/conf/
$SED "s#DATA_DIR#$DATA_DIR#g" "$ZOOKEEPER_HOME"/conf/zoo.cfg
+
+true
+# zookeeper.sh
diff --git a/bin/impl/kill.sh b/bin/impl/kill.sh
deleted file mode 100755
index a0ebdd3..0000000
--- a/bin/impl/kill.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#! /usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-pkill -f fluo\\.yarn
-pkill -f MiniFluo
-pkill -f accumulo\\.start
-pkill -f hadoop\\.hdfs
-pkill -f hadoop\\.yarn
-pkill -f QuorumPeerMain
-
-if [[ -d "$SPARK_HOME" ]]; then
- pkill -f org\\.apache\\.spark\\.deploy\\.history\\.HistoryServer
-fi
-if [[ -d "$INFLUXDB_HOME" ]]; then
- pkill -f influxdb
-fi
-if [[ -d "$GRAFANA_HOME" ]]; then
- pkill -f grafana-server
-fi
-if [[ -d "$PROXY_HOME" ]]; then
- pkill -f accumulo\\.proxy\\.Proxy
-fi
diff --git a/bin/impl/load-env.sh b/bin/impl/load-env.sh
index 45c5955..a750727 100755
--- a/bin/impl/load-env.sh
+++ b/bin/impl/load-env.sh
@@ -15,22 +15,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-# Start: Resolve Script Directory
-SOURCE="${BASH_SOURCE[0]}"
-while [[ -h "$SOURCE" ]]; do # resolve $SOURCE until the file is no longer a symlink
- impl="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
- SOURCE="$(readlink "$SOURCE")"
- [[ $SOURCE != /* ]] && SOURCE="$impl/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
-done
-impl="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
-bin="$( cd -P "$( dirname "$impl" )" && pwd )"
-# Stop: Resolve Script Directory
+: "${bin:?"'\$bin' should be set by 'uno' script"}"
# Determine UNO_HOME - Use env variable set by user. If none set, calculate using bin dir
UNO_HOME="${UNO_HOME:-$( cd -P "${bin}"/.. && pwd )}"
export UNO_HOME
-if [[ -z "$UNO_HOME" || ! -d "$UNO_HOME" ]]
-then
+if [[ -z $UNO_HOME || ! -d $UNO_HOME ]]; then
echo "UNO_HOME=$UNO_HOME is not a valid directory. Please make sure it exists"
exit 1
fi
@@ -44,8 +34,10 @@ FH=$FLUO_HOME
# Load env configuration
if [[ -f "$UNO_HOME/conf/uno-local.conf" ]]; then
+ # shellcheck source=conf/uno.conf
source "$UNO_HOME"/conf/uno-local.conf
elif [[ -f "$UNO_HOME/conf/uno.conf" ]]; then
+ # shellcheck source=conf/uno.conf
source "$UNO_HOME"/conf/uno.conf
else
echo "ERROR: Configuration file $UNO_HOME/conf/uno.conf does not exist" 1>&2
@@ -53,36 +45,19 @@ else
fi
function env_error() {
+ echo "$1 in your shell env '$2' needs to match your uno.conf '$3'"
echo 'Make your shell env match uno.conf by running: source <(./bin/uno env)'
exit 1
}
# Confirm that hadoop, accumulo, and zookeeper env variables are not set
if [[ ! "version env" =~ $1 ]]; then
- if [[ -n "$HH" && "$HH" != "$HADOOP_HOME" ]]; then
- echo "HADOOP_HOME in your shell env '$HH' needs to match your uno uno.conf '$HADOOP_HOME'"
- env_error
- fi
- if [[ -n "$HC" && "$HC" != "$HADOOP_CONF_DIR" ]]; then
- echo "HADOOP_CONF_DIR in your shell env '$HC' needs to match your uno uno.conf '$HADOOP_CONF_DIR'"
- env_error
- fi
- if [[ -n "$ZH" && "$ZH" != "$ZOOKEEPER_HOME" ]]; then
- echo "ZOOKEEPER_HOME in your shell env '$ZH' needs to match your uno uno.conf '$ZOOKEEPER_HOME'"
- env_error
- fi
- if [[ -n "$SH" && "$SH" != "$SPARK_HOME" ]]; then
- echo "SPARK_HOME in your shell env '$SH' needs to match your uno uno.conf '$SPARK_HOME'"
- env_error
- fi
- if [[ -n "$AH" && "$AH" != "$ACCUMULO_HOME" ]]; then
- echo "ACCUMULO_HOME in your shell env '$AH' needs to match your uno uno.conf '$ACCUMULO_HOME'"
- env_error
- fi
- if [[ -n "$FH" && "$FH" != "$FLUO_HOME" ]]; then
- echo "FLUO_HOME in your shell env '$FH' needs to match your uno uno.conf '$FLUO_HOME'"
- env_error
- fi
+ [[ -n "$HH" && "$HH" != "$HADOOP_HOME" ]] && env_error 'HADOOP_HOME' "$HH" "$HADOOP_HOME"
+ [[ -n "$HC" && "$HC" != "$HADOOP_CONF_DIR" ]] && env_error 'HADOOP_CONF_DIR' "$HC" "$HADOOP_CONF_DIR"
+ [[ -n "$ZH" && "$ZH" != "$ZOOKEEPER_HOME" ]] && env_error 'ZOOKEEPER_HOME' "$ZH" "$ZOOKEEPER_HOME"
+ [[ -n "$SH" && "$SH" != "$SPARK_HOME" ]] && env_error 'SPARK_HOME' "$SH" "$SPARK_HOME"
+ [[ -n "$AH" && "$AH" != "$ACCUMULO_HOME" ]] && env_error 'ACCUMULO_HOME' "$AH" "$ACCUMULO_HOME"
+ [[ -n "$FH" && "$FH" != "$FLUO_HOME" ]] && env_error 'FLUO_HOME' "$FH" "$FLUO_HOME"
fi
# Confirm that env variables were set correctly
@@ -105,7 +80,7 @@ if [[ ! -d "$INSTALL" ]]; then
fi
if [[ -z "$JAVA_HOME" || ! -d "$JAVA_HOME" ]]; then
- echo "JAVA_HOME must be set in your shell to a valid directory. Currently, JAVA_HOME=$JAVA_HOME"
+ echo "JAVA_HOME must be set in your shell to a valid directory. Currently, JAVA_HOME=$JAVA_HOME"
exit 1
fi
@@ -131,15 +106,15 @@ fi
: "${HADOOP_LOG_DIR:?"HADOOP_LOG_DIR is not set in uno.conf"}"
: "${ZOO_LOG_DIR:?"ZOO_LOG_DIR is not set in uno.conf"}"
-if [[ -z "$HADOOP_HASH" ]]; then
+if [[ -z $HADOOP_HASH ]]; then
echo "HADOOP_HASH is not set. Set it for your version in 'conf/checksums' or uno.conf"
exit 1
fi
-if [[ -z "$ZOOKEEPER_HASH" ]]; then
+if [[ -z $ZOOKEEPER_HASH ]]; then
echo "ZOOKEEPER_HASH is not set. Set it for your version in 'conf/checksums' or uno.conf"
exit 1
fi
-if [[ -z "$ACCUMULO_HASH" && "$ACCUMULO_VERSION" != *"SNAPSHOT"* ]]; then
+if [[ -z $ACCUMULO_HASH && ! $ACCUMULO_VERSION =~ SNAPSHOT ]]; then
echo "ACCUMULO_HASH is not set. Set it for your version in 'conf/checksums' or uno.conf"
exit 1
fi
@@ -147,8 +122,10 @@ fi
hash shasum 2>/dev/null || { echo >&2 "shasum must be installed & on PATH. Aborting."; exit 1; }
hash sed 2>/dev/null || { echo >&2 "sed must be installed & on PATH. Aborting."; exit 1; }
-if [[ "$OSTYPE" == "darwin"* ]]; then
+if [[ $OSTYPE =~ ^darwin ]]; then
export SED="sed -i .bak"
else
export SED="sed -i"
fi
+
+# load-env.sh
diff --git a/bin/impl/print-env.sh b/bin/impl/print-env.sh
deleted file mode 100755
index 32419ba..0000000
--- a/bin/impl/print-env.sh
+++ /dev/null
@@ -1,49 +0,0 @@
-#! /usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-if [[ -z "$1" || "$1" == "--vars" ]]; then
- echo "export HADOOP_HOME=\"$HADOOP_HOME\""
- if [[ $HADOOP_VERSION =~ ^2\..*$ ]]; then
- echo "export HADOOP_PREFIX=\"$HADOOP_HOME\""
- fi
- echo "export HADOOP_CONF_DIR=\"$HADOOP_CONF_DIR\""
- echo "export ZOOKEEPER_HOME=\"$ZOOKEEPER_HOME\""
- echo "export SPARK_HOME=\"$SPARK_HOME\""
- echo "export ACCUMULO_HOME=\"$ACCUMULO_HOME\""
- echo "export FLUO_HOME=\"$FLUO_HOME\""
- echo "export FLUO_YARN_HOME=\"$FLUO_YARN_HOME\""
-fi
-
-if [[ -z "$1" || "$1" == "--paths" ]]; then
- echo -n "export PATH=\"\$PATH:$UNO_HOME/bin:$HADOOP_HOME/bin:$ZOOKEEPER_HOME/bin:$ACCUMULO_HOME/bin"
- if [[ -d "$SPARK_HOME" ]]; then
- echo -n ":$SPARK_HOME/bin"
- fi
- if [[ -d "$FLUO_HOME" ]]; then
- echo -n ":$FLUO_HOME/bin"
- fi
- if [[ -d "$FLUO_YARN_HOME" ]]; then
- echo -n ":$FLUO_YARN_HOME/bin"
- fi
- if [[ -d "$INFLUXDB_HOME" ]]; then
- echo -n ":$INFLUXDB_HOME/bin"
- fi
- if [[ -d "$GRAFANA_HOME" ]]; then
- echo -n ":$GRAFANA_HOME/bin"
- fi
- echo '"'
-fi
diff --git a/bin/impl/run.sh b/bin/impl/run.sh
deleted file mode 100755
index f05fcb7..0000000
--- a/bin/impl/run.sh
+++ /dev/null
@@ -1,49 +0,0 @@
-#! /usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source "$UNO_HOME"/bin/impl/util.sh
-
-[[ -n $LOGS_DIR ]] && rm -f "$LOGS_DIR"/setup/*.{out,err}
-echo "Running $1 (detailed logs in $LOGS_DIR/setup)..."
-save_console_fd
-case "$1" in
- hadoop|zookeeper)
- run_component "$1"
- ;;
- accumulo|fluo|fluo-yarn)
- run_component "$1" "$2"
- ;;
- *)
- echo "Usage: uno run <component> [--no-deps]"
- echo -e "\nPossible components:\n"
- echo " accumulo Runs Apache Accumulo and its dependencies (Hadoop & ZooKeeper)"
- echo " hadoop Runs Apache Hadoop"
- echo " fluo Runs Apache Fluo and its dependencies (Accumulo, Hadoop, & ZooKeeper)"
- echo " fluo-yarn Runs Apache Fluo YARN and its dependencies (Fluo, Accumulo, Hadoop, & ZooKeeper)"
- echo -e " zookeeper Runs Apache ZooKeeper\n"
- echo "Options:"
- echo " --no-deps Dependencies will be setup unless this option is specified. Only works for fluo & accumulo components."
- exit 1
- ;;
-esac
-
-if [[ "$?" == 0 ]]; then
- echo "Run complete."
-else
- echo "Run failed!"
- false
-fi
diff --git a/bin/impl/run/accumulo.sh b/bin/impl/run/accumulo.sh
index 064a862..ddf0410 100755
--- a/bin/impl/run/accumulo.sh
+++ b/bin/impl/run/accumulo.sh
@@ -15,6 +15,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# shellcheck source=bin/impl/util.sh
source "$UNO_HOME"/bin/impl/util.sh
pkill -f accumulo.start
@@ -23,10 +24,7 @@ pkill -f accumulo.start
set -e
trap 'echo "[ERROR] Error occurred at $BASH_SOURCE:$LINENO command: $BASH_COMMAND"' ERR
-if [[ $1 != "--no-deps" ]]; then
- run_component hadoop
- run_component zookeeper
-fi
+[[ $1 != '--no-deps' ]] && run_component hadoop && run_component zookeeper
"$HADOOP_HOME"/bin/hadoop fs -rm -r /accumulo 2> /dev/null || true
"$ACCUMULO_HOME"/bin/accumulo init --clear-instance-name --instance-name "$ACCUMULO_INSTANCE" --password "$ACCUMULO_PASSWORD"
@@ -39,3 +37,6 @@ fi
print_to_console "Apache Accumulo $ACCUMULO_VERSION is running"
print_to_console " * Accumulo Monitor: http://localhost:9995/"
print_to_console " * view logs at $ACCUMULO_LOG_DIR"
+
+true
+# accumulo.sh
diff --git a/bin/impl/run/fluo-yarn.sh b/bin/impl/run/fluo-yarn.sh
index 759cb89..0d5c5a7 100755
--- a/bin/impl/run/fluo-yarn.sh
+++ b/bin/impl/run/fluo-yarn.sh
@@ -15,13 +15,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# shellcheck source=bin/impl/util.sh
source "$UNO_HOME"/bin/impl/util.sh
# stop if any command fails
set -e
trap 'echo "[ERROR] Error occurred at $BASH_SOURCE:$LINENO command: $BASH_COMMAND"' ERR
-if [[ $1 != "--no-deps" ]]; then
- run_component fluo
-fi
+[[ $1 != '--no-deps' ]] && run_component fluo
+true
+# fluo-yarn.sh
diff --git a/bin/impl/run/fluo.sh b/bin/impl/run/fluo.sh
index e1106b4..95feb3a 100755
--- a/bin/impl/run/fluo.sh
+++ b/bin/impl/run/fluo.sh
@@ -15,6 +15,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# shellcheck source=bin/impl/util.sh
source "$UNO_HOME"/bin/impl/util.sh
pkill -f fluo.yarn
@@ -25,6 +26,7 @@ pkill -f twill.launcher
set -e
trap 'echo "[ERROR] Error occurred at $BASH_SOURCE:$LINENO command: $BASH_COMMAND"' ERR
-if [[ $2 != "--no-deps" ]]; then
- run_component accumulo
-fi
+[[ $2 != '--no-deps' ]] && run_component accumulo
+
+true
+# fluo.sh
diff --git a/bin/impl/run/hadoop.sh b/bin/impl/run/hadoop.sh
index d0791f6..17be5d4 100755
--- a/bin/impl/run/hadoop.sh
+++ b/bin/impl/run/hadoop.sh
@@ -15,6 +15,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# shellcheck source=bin/impl/util.sh
source "$UNO_HOME"/bin/impl/util.sh
pkill -f hadoop.hdfs
@@ -38,3 +39,6 @@ print_to_console "Apache Hadoop $HADOOP_VERSION is running"
print_to_console " * NameNode status: http://localhost:$namenode_port/"
print_to_console " * ResourceManager status: http://localhost:8088/"
print_to_console " * view logs at $HADOOP_LOG_DIR"
+
+true
+# hadoop.sh
diff --git a/bin/impl/run/zookeeper.sh b/bin/impl/run/zookeeper.sh
index 6c9d607..a8ea88a 100755
--- a/bin/impl/run/zookeeper.sh
+++ b/bin/impl/run/zookeeper.sh
@@ -15,6 +15,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# shellcheck source=bin/impl/util.sh
source "$UNO_HOME"/bin/impl/util.sh
pkill -f QuorumPeerMain
@@ -23,11 +24,14 @@ pkill -f QuorumPeerMain
set -e
trap 'echo "[ERROR] Error occurred at $BASH_SOURCE:$LINENO command: $BASH_COMMAND"' ERR
-rm -f "$ZOO_LOG_DIR"/*
-rm -rf "$DATA_DIR"/zookeeper
+rm -f "${ZOO_LOG_DIR:?}"/*
+rm -rf "${DATA_DIR:?}"/zookeeper
mkdir -p "$ZOO_LOG_DIR"
"$ZOOKEEPER_HOME"/bin/zkServer.sh start
print_to_console "Apache ZooKeeper $ZOOKEEPER_VERSION is running"
print_to_console " * view logs at $ZOO_LOG_DIR"
+
+true
+# zookeeper.sh
diff --git a/bin/impl/setup.sh b/bin/impl/setup.sh
deleted file mode 100755
index b84318c..0000000
--- a/bin/impl/setup.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-#! /usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source "$UNO_HOME"/bin/impl/util.sh
-
-[[ -n $LOGS_DIR ]] && rm -f "$LOGS_DIR"/setup/*.{out,err}
-echo "Beginning setup (detailed logs in $LOGS_DIR/setup)..."
-save_console_fd
-
-case "$1" in
- accumulo|fluo)
- setup_component "$1" "$2"
- ;;
- hadoop|zookeeper|fluo-yarn)
- setup_component "$1"
- ;;
- *)
- echo "Usage: uno setup <component> [--no-deps]"
- echo -e "\nPossible components:\n"
- echo " accumulo Sets up Apache Accumulo and its dependencies (Hadoop & ZooKeeper)"
- echo " hadoop Sets up Apache Hadoop"
- echo " fluo Sets up Apache Fluo and its dependencies (Accumulo, Hadoop, & ZooKeeper)"
- echo " fluo-yarn Sets up Apache Fluo YARN and its dependencies (Fluo, Accumulo, Hadoop, & ZooKeeper)"
- echo -e " zookeeper Sets up Apache ZooKeeper\n"
- echo "Options:"
- echo " --no-deps Dependencies will be setup unless this option is specified. Only works for fluo & accumulo components."
- exit 1
- ;;
-esac
-
-if [[ "$?" == 0 ]]; then
- echo "Setup complete."
-else
- echo "Setup failed!"
- false
-fi
diff --git a/bin/impl/start.sh b/bin/impl/start.sh
deleted file mode 100755
index 969126a..0000000
--- a/bin/impl/start.sh
+++ /dev/null
@@ -1,96 +0,0 @@
-#! /usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source "$UNO_HOME"/bin/impl/util.sh
-
-case "$1" in
- accumulo)
- check_dirs ACCUMULO_HOME
-
- if [[ "$2" != "--no-deps" ]]; then
- check_dirs ZOOKEEPER_HOME HADOOP_HOME
-
- tmp="$(pgrep -f QuorumPeerMain | tr '\n' ' ')"
- if [[ -z "$tmp" ]]; then
- "$ZOOKEEPER_HOME"/bin/zkServer.sh start
- else echo "ZooKeeper already running at: $tmp"
- fi
-
- tmp="$(pgrep -f hadoop\\.hdfs | tr '\n' ' ')"
- if [[ -z "$tmp" ]]; then
- "$HADOOP_HOME"/sbin/start-dfs.sh
- else echo "Hadoop DFS already running at: $tmp"
- fi
-
- tmp="$(pgrep -f hadoop\\.yarn | tr '\n' ' ')"
- if [[ -z "$tmp" ]]; then
- "$HADOOP_HOME"/sbin/start-yarn.sh
- else echo "Hadoop Yarn already running at: $tmp"
- fi
- fi
-
- tmp="$(pgrep -f accumulo\\.start | tr '\n' ' ')"
- if [[ -z "$tmp" ]]; then
- if [[ $ACCUMULO_VERSION =~ ^1\..*$ ]]; then
- "$ACCUMULO_HOME"/bin/start-all.sh
- else
- "$ACCUMULO_HOME"/bin/accumulo-cluster start
- fi
- else echo "Accumulo already running at: $tmp"
- fi
- ;;
- hadoop)
- check_dirs HADOOP_HOME
-
- tmp="$(pgrep -f hadoop\\.hdfs | tr '\n' ' ')"
- if [[ -z "$tmp" ]]; then
- "$HADOOP_HOME"/sbin/start-dfs.sh
- else echo "Hadoop DFS already running at: $tmp"
- fi
-
- tmp="$(pgrep -f hadoop\\.yarn | tr '\n' ' ')"
- if [[ -z "$tmp" ]]; then
- "$HADOOP_HOME"/sbin/start-yarn.sh
- else echo "Hadoop Yarn already running at: $tmp"
- fi
- ;;
- zookeeper)
- check_dirs ZOOKEEPER_HOME
-
- tmp="$(pgrep -f QuorumPeerMain | tr '\n' ' ')"
- if [[ -z "$tmp" ]]; then
- "$ZOOKEEPER_HOME"/bin/zkServer.sh start
- else echo "ZooKeeper already running at: $tmp"
- fi
- ;;
-
- # NYI
- # fluo)
- #
- # ;;
-
- *)
- echo "Usage: uno start <component> [--no-deps]"
- echo -e "\nPossible components:\n"
- echo " accumulo Start Apache Accumulo plus dependencies: Hadoop, ZooKeeper"
- echo " hadoop Start Apache Hadoop"
- echo " zookeeper Start Apache ZooKeeper"
- echo "Options:"
- echo " --no-deps Dependencies will start unless this option is specified. Only works for accumulo component."
- exit 1
- ;;
- esac
diff --git a/bin/impl/status.sh b/bin/impl/status.sh
deleted file mode 100755
index 538b461..0000000
--- a/bin/impl/status.sh
+++ /dev/null
@@ -1,43 +0,0 @@
-#! /usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-
-atmp="$(ps -ef | grep accumulo\\.start | awk '{print $NF "(" $2 ")"}' | tr '\n' ' ')"
-htmp="$(ps -ef | grep -e hadoop\\.hdfs -e hadoop\\.yarn | tr '.' ' ' | awk '{print $NF "(" $2 ")"}' | tr '\n' ' ')"
-ztmp="$(pgrep -f QuorumPeerMain | awk '{print "zoo(" $1 ")"}' | tr '\n' ' ')"
-
-if [[ "$atmp" || "$ztmp" || "$htmp" ]]; then
- if [[ "$atmp" ]]; then
- echo "Accumulo processes running: $atmp"
- fi
-
- if [[ "$ztmp" ]]; then
- echo "Zookeeper processes running: $ztmp "
- fi
-
- if [[ "$htmp" ]]; then
- echo "Hadoop processes running: $htmp"
- fi
-
-else
- echo "No components runnning."
-fi
-
-
-
-
diff --git a/bin/impl/stop.sh b/bin/impl/stop.sh
deleted file mode 100755
index aa85ebb..0000000
--- a/bin/impl/stop.sh
+++ /dev/null
@@ -1,81 +0,0 @@
-#! /usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source "$UNO_HOME"/bin/impl/util.sh
-
-case "$1" in
- accumulo)
- check_dirs ACCUMULO_HOME
-
- if [[ ! -z "$(pgrep -f accumulo\\.start)" ]]; then
- if [[ $ACCUMULO_VERSION =~ ^1\..*$ ]]; then
- "$ACCUMULO_HOME"/bin/stop-all.sh
- else
- "$ACCUMULO_HOME"/bin/accumulo-cluster stop
- fi
- fi
-
- if [[ "$2" != "--no-deps" ]]; then
- check_dirs ZOOKEEPER_HOME HADOOP_HOME
-
- if [[ ! -z "$(pgrep -f hadoop\\.yarn)" ]]; then
- "$HADOOP_HOME"/sbin/stop-yarn.sh
- fi
-
- if [[ ! -z "$(pgrep -f hadoop\\.hdfs)" ]]; then
- "$HADOOP_HOME"/sbin/stop-dfs.sh
- fi
-
- if [[ ! -z "$(pgrep -f QuorumPeerMain)" ]]; then
- "$ZOOKEEPER_HOME"/bin/zkServer.sh stop
- fi
- fi
- ;;
- hadoop)
- check_dirs HADOOP_HOME
-
- if [[ ! -z "$(pgrep -f hadoop\\.yarn)" ]]; then
- "$HADOOP_HOME"/sbin/stop-yarn.sh
- fi
-
- if [[ ! -z "$(pgrep -f hadoop\\.hdfs)" ]]; then
- "$HADOOP_HOME"/sbin/stop-dfs.sh
- fi
- ;;
- zookeeper)
- check_dirs ZOOKEEPER_HOME
-
- if [[ ! -z "$(pgrep -f QuorumPeerMain)" ]]; then
- "$ZOOKEEPER_HOME"/bin/zkServer.sh stop
- fi
- ;;
-
- # NYI
- # fluo)
- #
- # ;;
- *)
- echo "Usage: uno stop <component> [--no-deps]"
- echo -e "\nPossible components:\n"
- echo " accumulo Stop Apache Accumulo plus dependencies: Hadoop, ZooKeeper"
- echo " hadoop Stop Apache Hadoop"
- echo " zookeeper Stop Apache ZooKeeper"
- echo "Options:"
- echo " --no-deps Dependencies will stop unless this option is specified. Only works for accumulo component."
- exit 1
- ;;
- esac
diff --git a/bin/impl/util.sh b/bin/impl/util.sh
index 7556e52..5f0528e 100755
--- a/bin/impl/util.sh
+++ b/bin/impl/util.sh
@@ -16,79 +16,73 @@
# limitations under the License.
function verify_exist_hash() {
- tarball=$1
+ local tarball=$1 expected_hash actual_hash hash_cmd
expected_hash=$(echo "${2// /}" | tr '[:upper:]' '[:lower:]')
- if [[ ! -f "$DOWNLOADS/$tarball" ]]; then
+ if [[ ! -f $DOWNLOADS/$tarball ]]; then
print_to_console "The tarball $tarball does not exist in downloads/"
- exit 1
+ return 1
fi
- local HASH_CMD
case "${#expected_hash}" in
- 32) HASH_CMD='md5sum' ;;
- 40) HASH_CMD='shasum -a 1' ;;
- 64) HASH_CMD='shasum -a 256' ;;
- 128) HASH_CMD='shasum -a 512' ;;
+ 32) hash_cmd='md5sum' ;;
+ 40) hash_cmd='shasum -a 1' ;;
+ 64) hash_cmd='shasum -a 256' ;;
+ 128) hash_cmd='shasum -a 512' ;;
*)
print_to_console "Expected checksum ($expected_hash) of $tarball is not an MD5, SHA1, SHA256, or SHA512 sum"
- exit 1
+ return 1
;;
esac
- actual_hash=$($HASH_CMD "$DOWNLOADS/$tarball" | awk '{print $1}')
+ actual_hash=$($hash_cmd "$DOWNLOADS/$tarball" | awk '{print $1}')
- if [[ "$actual_hash" != "$expected_hash" ]]; then
+ if [[ $actual_hash != "$expected_hash" ]]; then
print_to_console "The actual checksum ($actual_hash) of $tarball does not match the expected checksum ($expected_hash)"
- exit 1
+ return 1
fi
}
# Takes directory variables as arguments
function check_dirs() {
for arg in "$@"; do
- if [[ ! -d "${!arg}" ]]; then
+ if [[ ! -d ${!arg} ]]; then
print_to_console "$arg=${!arg} is not a valid directory. Please make sure it exists"
- exit 1
+ return 1
fi
done
}
function post_install_plugins() {
- for plugin in $POST_INSTALL_PLUGINS
- do
+ for plugin in $POST_INSTALL_PLUGINS; do
echo "Executing post install plugin: $plugin"
plugin_script="${UNO_HOME}/plugins/${plugin}.sh"
- if [[ ! -f "$plugin_script" ]]; then
+ if [[ ! -f $plugin_script ]]; then
echo "Plugin does not exist: $plugin_script"
- exit 1
+ return 1
fi
- $plugin_script
- done
+ "$plugin_script" || return 1
+ done
}
function post_run_plugins() {
- for plugin in $POST_RUN_PLUGINS
- do
+ for plugin in $POST_RUN_PLUGINS; do
echo "Executing post run plugin: $plugin"
plugin_script="${UNO_HOME}/plugins/${plugin}.sh"
if [[ ! -f "$plugin_script" ]]; then
echo "Plugin does not exist: $plugin_script"
- exit 1
+ return 1
fi
- $plugin_script
- done
+ "$plugin_script" || return 1
+ done
}
function install_component() {
local component; component=$(echo "$1" | tr '[:upper:] ' '[:lower:]-')
shift
- "$UNO_HOME/bin/impl/install/$component.sh" "$@"
+ "$UNO_HOME/bin/impl/install/$component.sh" "$@" || return 1
case "$component" in
- accumulo|fluo)
- post_install_plugins
- ;;
- *)
- ;;
+ accumulo|fluo) post_install_plugins ;;
+ *) ;;
esac
}
@@ -97,23 +91,19 @@ function run_component() {
local logs; logs="$LOGS_DIR/setup"
mkdir -p "$logs"
shift
- "$UNO_HOME/bin/impl/run/$component.sh" "$component" "$@" 1>"$logs/${component}.out" 2>"$logs/${component}.err"
+ "$UNO_HOME/bin/impl/run/$component.sh" "$component" "$@" 1>"$logs/${component}.out" 2>"$logs/${component}.err" || return 1
case "$component" in
- accumulo|fluo)
- post_run_plugins
- ;;
- *)
- ;;
+ accumulo|fluo) post_run_plugins ;;
+ *) ;;
esac
}
function setup_component() {
- install_component $1
- run_component $1
+ install_component "$@" && run_component "$@"
}
function save_console_fd {
- if [[ -z "$UNO_CONSOLE_FD" && "$OSTYPE" != "darwin"* ]]; then
+ if [[ -z $UNO_CONSOLE_FD && ! $OSTYPE =~ ^darwin ]]; then
# Allocate an unused file descriptor and make it dup stdout
# https://stackoverflow.com/a/41620630/7298689
exec {UNO_CONSOLE_FD}>&1
@@ -122,7 +112,7 @@ function save_console_fd {
}
function print_to_console {
- if [[ -z "$UNO_CONSOLE_FD" ]]; then
+ if [[ -z $UNO_CONSOLE_FD ]]; then
echo "$@"
else
echo "$@" >&${UNO_CONSOLE_FD}
@@ -130,30 +120,39 @@ function print_to_console {
}
function download_tarball() {
- local url_prefix=$1
- local tarball=$2
- local expected_hash=$3
-
+ local url_prefix=$1 tarball=$2 expected_hash=$3
wget -c -P "$DOWNLOADS" "$url_prefix/$tarball"
- verify_exist_hash "$tarball" "$expected_hash"
+ verify_exist_hash "$tarball" "$expected_hash" || return 1
echo "$tarball exists in downloads/ and matches expected checksum ($expected_hash)"
}
function download_apache() {
- local url_prefix=$1
- local tarball=$2
- local expected_hash=$3
-
- if [ -n "$apache_mirror" ]; then
- wget -c -P "$DOWNLOADS" "$apache_mirror/$url_prefix/$tarball"
- fi
-
+ local url_prefix=$1 tarball=$2 expected_hash=$3
+ [[ -n "${apache_mirror:-}" ]] && wget -c -P "$DOWNLOADS" "$apache_mirror/$url_prefix/$tarball"
if [[ ! -f "$DOWNLOADS/$tarball" ]]; then
echo "Downloading $tarball from Apache archive"
wget -c -P "$DOWNLOADS" "https://archive.apache.org/dist/$url_prefix/$tarball"
fi
-
- verify_exist_hash "$tarball" "$expected_hash"
+ verify_exist_hash "$tarball" "$expected_hash" || return 1
echo "$tarball exists in downloads/ and matches expected checksum ($expected_hash)"
}
+function print_cmd_usage() {
+ cat <<EOF
+Usage: uno $1 <component> [--no-deps]
+
+Possible components:
+
+ accumulo $2 Apache Accumulo and its dependencies (Hadoop & ZooKeeper)
+ hadoop $2 Apache Hadoop
+ fluo $2 Apache Fluo and its dependencies (Accumulo, Hadoop, & ZooKeeper)
+ fluo-yarn $2 Apache Fluo YARN and its dependencies (Fluo, Accumulo, Hadoop, & ZooKeeper)
+ zookeeper $2 Apache ZooKeeper
+
+Options:
+ --no-deps Dependencies will be setup unless this option is specified.
+ Only works for fluo & accumulo components.
+EOF
+}
+
+# util.sh
diff --git a/bin/impl/version.sh b/bin/impl/version.sh
deleted file mode 100755
index 1235704..0000000
--- a/bin/impl/version.sh
+++ /dev/null
@@ -1,46 +0,0 @@
-#! /usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-case "$1" in
-hadoop)
- echo -n "$HADOOP_VERSION"
- ;;
-zookeeper)
- echo -n "$ZOOKEEPER_VERSION"
- ;;
-accumulo)
- echo -n "$ACCUMULO_VERSION"
- ;;
-fluo)
- echo -n "$FLUO_VERSION"
- ;;
-fluo-yarn)
- echo -n "$FLUO_YARN_VERSION"
- ;;
-spark)
- echo -n "$SPARK_VERSION"
- ;;
-influxdb)
- echo -n "$INFLUXDB_VERSION"
- ;;
-grafana)
- echo -n "$GRAFANA_VERSION"
- ;;
-*)
- echo "You must specify a valid depedency (i.e hadoop, zookeeper, accumulo, etc)"
- exit 1
-esac
diff --git a/bin/uno b/bin/uno
index 095d091..6a510aa 100755
--- a/bin/uno
+++ b/bin/uno
@@ -17,89 +17,33 @@
# Start: Resolve Script Directory
SOURCE="${BASH_SOURCE[0]}"
-while [[ -h "$SOURCE" ]]; do # resolve $SOURCE until the file is no longer a symlink
+# resolve $SOURCE until the file is no longer a symlink
+while [[ -h "$SOURCE" ]]; do
bin="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
- [[ $SOURCE != /* ]] && SOURCE="$bin/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
+ # if $SOURCE was a relative symlink, we need to resolve it relative to the
+ # path where the symlink file was located
+ [[ $SOURCE != /* ]] && SOURCE="$bin/$SOURCE"
done
bin="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
# Stop: Resolve Script Directory
-source "$bin"/impl/load-env.sh "$1"
-source "$UNO_HOME"/bin/impl/util.sh
+uno_cmd=$1
+shift
-case "$1" in
- ashell)
- check_dirs ACCUMULO_HOME
- "$ACCUMULO_HOME"/bin/accumulo shell -u "$ACCUMULO_USER" -p "$ACCUMULO_PASSWORD" "${@:2}"
- ;;
- zk)
- check_dirs ZOOKEEPER_HOME
- "$ZOOKEEPER_HOME"/bin/zkCli.sh
- ;;
- env)
- "$bin"/impl/print-env.sh "${@:2}"
- ;;
- fetch)
- hash mvn 2>/dev/null || { echo >&2 "Maven must be installed & on PATH. Aborting."; exit 1; }
- hash wget 2>/dev/null || { echo >&2 "wget must be installed & on PATH. Aborting."; exit 1; }
- if [[ "$2" == "all" ]]; then
- "$bin"/impl/fetch.sh fluo
- else
- "$bin"/impl/fetch.sh "$2" "$3"
- fi
- ;;
- install)
- "$bin"/impl/install.sh "${@:2}"
- ;;
- kill)
- "$bin"/impl/kill.sh "${@:2}"
- ;;
- run)
- "$bin"/impl/run.sh "${@:2}"
- ;;
- setup)
- "$bin"/impl/setup.sh "${@:2}"
- ;;
- start)
- "$bin"/impl/start.sh "${@:2}"
- ;;
- stop)
- "$bin"/impl/stop.sh "${@:2}"
- ;;
- version)
- "$bin"/impl/version.sh "${@:2}"
- ;;
-
- status)
- "$bin"/impl/status.sh "${@:2}"
- ;;
-
- wipe)
- "$bin"/impl/kill.sh
- if [[ -d "$INSTALL" ]]; then
- echo "removing $INSTALL"
- rm -rf "$INSTALL"
- fi
+# shellcheck source=bin/impl/load-env.sh
+source "$bin"/impl/load-env.sh "$uno_cmd"
+# shellcheck source=bin/impl/commands.sh
+source "$UNO_HOME"/bin/impl/commands.sh
+
+case "$uno_cmd" in
+ ashell|env|fetch|install|kill|run|setup|start|status|stop|version|wipe|zk)
+ "uno_${uno_cmd}_main" "$@"
;;
*)
- echo -e "Usage: uno <command> (<argument>)\n"
- echo -e "Possible commands:\n"
- echo " fetch <component> Fetches binary tarballs of component and it dependencies by either building or downloading"
- echo " the tarball (as configured by uno.conf). Run 'uno fetch all' to fetch all binary tarballs."
- echo " install <component> Installs component and its dependencies (clearing any existing data)"
- echo " run <component> Runs component and its dependencies (clearing any existing data)"
- echo " setup <component> Installs and runs component and its dependencies (clearing any existing data)"
- echo " start <component> Start ZooKeeper, Hadoop, Accumulo, if not running."
- echo " stop <component> Stop Accumulo, Hadoop, ZooKeeper, if running."
- echo " status Check if Accumulo, Hadoop, or Zookeeper are running."
- echo " kill Kills all processes"
- echo " ashell Runs the Accumulo shell"
- echo " zk Connects to ZooKeeper CLI"
- echo " env Prints out shell configuration for PATH and common environment variables."
- echo " Add '--paths' or '--vars' command to limit what is printed."
- echo " version <dep> Prints out configured version for dependency"
- echo -e " wipe Kills all processes and clears install directory\n"
- echo "Possible components: accumulo, fluo, fluo-yarn, hadoop, zookeeper"
+ uno_help_main "$@"
exit 1
+ ;;
esac
+
+# uno
diff --git a/conf/checksums b/conf/checksums
index a0fc307..ad205ca 100644
--- a/conf/checksums
+++ b/conf/checksums
@@ -11,14 +11,19 @@ accumulo:1.7.3:294f2f1f3fbc164b68e80cecd5a6ce5c245df804fb35ae5e03ab1c86bc9480da
hadoop:3.3.0:9ac5a5a8d29de4d2edfb5e554c178b04863375c5644d6fea1f6464ab4a7e22a50a6c43253ea348edbd114fc534dcde5bdd2826007e24b2a6b0ce0d704c5b4f5b
hadoop:3.2.1:d62709c3d7144fcaafc60e18d0fa03d7d477cc813e45526f3646030cd87dbf010aeccf3f4ce795b57b08d2884b3a55f91fe9d74ac144992d2dfe444a4bbf34ee
hadoop:3.2.0:79676a7dadbc4740cb2ff4ae7af75f5b0a45b4748f217f4179ab64827b840eef58c63b9132260c5682cb28b6d12a27d4a4d09a15173aca158fb1fc3cdb0b1609
+hadoop:3.1.3:c790711a61e9052a7d9c02d97b1d5acbe3d1cc2cd7045bb387791cc5321e97e27edf118d3b4b319cc1538c9493d8f4fbbef4dda5ef8996157438d9db9ba2cfdb
hadoop:3.1.2:0e0ee817c89b3c4eb761eca7f16640742a83b0e99b6fda26c1bee2baabedad93aab86e252bf5f1e2381c6d464bc4003d10c7cc0f61b2062f4c59732ca24d1bd9
hadoop:3.1.1:0821685c2f77710f189cf6a37309cd6ba35b63432bae19f7b3db05fdbdd1d375d8333e47461d82762d5f7f4133c6b33216689a6403d7dff3f8f41dcbe5477030
hadoop:3.1.0:8c620d1c82cc04629b7ada90ba0691c734196295e0103d74569de1f29e914327c281c0c9f1e48881df3d567f6482c288bd493a16257c801c82247f5eb5d7b1e4
hadoop:3.0.3:db96e2c0d0d5352d8984892dfac4e27c0e682d98a497b7e04ee97c3e2019277a
hadoop:3.0.2:0d507aa71007b2685e292343c11c2cb90a92ea7625446b57d1fb47c5721e2f82
+hadoop:2.10.0:76592efe09cd9887adb9e058c43d28858b19bcbc829ea1de3d7f7d2e54e4b37f415984dcac5f401deb9c30e69e85b7f3ac29785ac6eb17cd15b7664c731bcd85
+hadoop:2.9.2:3d2023c46b1156c1b102461ad08cbc17c8cc53004eae95dab40a1f659839f28a
hadoop:2.9.0:8d48666f29f9ade6ed2762b7a9edab177bad2c57396f43d0ffd6a269d54f6fe1
+hadoop:2.8.5:f9c726df693ce2daa4107886f603270d66e7257f77a92c9886502d6cd4a884a4
hadoop:2.8.4:63007792ecaf566aa8b97779db22805461ff3542fd18f14a60b3bcca1c6831bdeb6c9bb6d59596914fc6cc92c2049ce183e29c41aa10a97f5193fd3284a47acb
hadoop:2.8.3:e8bf9a53337b1dca3b152b0a5b5e277dc734e76520543e525c301a050bb27eae
+hadoop:2.7.7:d129d08a2c9dafec32855a376cbd2ab90c6a42790898cabbac6be4d29f9c2026
hadoop:2.7.6:f2327ea93f4bc5a5d7150dee8e0ede196d3a77ff8526a7dd05a48a09aae25669
hadoop:2.7.5:0bfc4d9b04be919be2fdf36f67fa3b4526cdbd406c512a7a1f5f1b715661f831
hadoop:2.6.5:001ad18d4b6d0fe542b15ddadba2d092bc97df1c4d2d797381c8d12887691898
diff --git a/conf/uno.conf b/conf/uno.conf
index 1104a40..58c3307 100644
--- a/conf/uno.conf
+++ b/conf/uno.conf
@@ -13,11 +13,12 @@ export FLUO_YARN_VERSION=${FLUO_YARN_VERSION:-1.0.0}
# --------------
# Hashes below match default versions above. If you change a version above,
# you must also change the hash below.
-export HADOOP_HASH=$(grep -F hadoop:${HADOOP_VERSION}: $UNO_HOME/conf/checksums | cut -d : -f 3)
-export ZOOKEEPER_HASH=$(grep -F zookeeper:${ZOOKEEPER_VERSION}: $UNO_HOME/conf/checksums | cut -d : -f 3)
-export ACCUMULO_HASH=$(grep -F accumulo:${ACCUMULO_VERSION}: $UNO_HOME/conf/checksums | cut -d : -f 3)
-export FLUO_HASH=037f89cd2bfdaf76a1368256c52de46d6b9a85c9c1bfc776ec4447d02c813fb2
-export FLUO_YARN_HASH=c6220d35cf23127272f3b5638c44586504dc17a46f5beecdfee5027b5ff874b0
+HADOOP_HASH=$(grep -F hadoop:"${HADOOP_VERSION}": "${UNO_HOME:?}"/conf/checksums | cut -d : -f 3)
+ZOOKEEPER_HASH=$(grep -F zookeeper:"${ZOOKEEPER_VERSION}": "${UNO_HOME:?}"/conf/checksums | cut -d : -f 3)
+ACCUMULO_HASH=$(grep -F accumulo:"${ACCUMULO_VERSION}": "${UNO_HOME:?}"/conf/checksums | cut -d : -f 3)
+FLUO_HASH=037f89cd2bfdaf76a1368256c52de46d6b9a85c9c1bfc776ec4447d02c813fb2
+FLUO_YARN_HASH=c6220d35cf23127272f3b5638c44586504dc17a46f5beecdfee5027b5ff874b0
+export HADOOP_HASH ZOOKEEPER_HASH ACCUMULO_HASH FLUO_HASH FLUO_YARN_HASH
# Network configuration
# ---------------------
@@ -148,7 +149,8 @@ export POST_RUN_PLUGINS=""
export SPARK_VERSION=${SPARK_VERSION:-2.3.2}
export SPARK_HOME=$INSTALL/spark-${SPARK_VERSION}-bin-without-hadoop
export SPARK_TARBALL=spark-${SPARK_VERSION}-bin-without-hadoop.tgz
-export SPARK_HASH=$(grep -F spark:${SPARK_VERSION}: $UNO_HOME/conf/checksums | cut -d : -f 3)
+SPARK_HASH=$(grep -F spark:"${SPARK_VERSION}": "${UNO_HOME:?}"/conf/checksums | cut -d : -f 3)
+export SPARK_HASH
# Configuration for 'influxdb-metrics' plugin
# InfluxDB metrics can only be set up on Linux. Mac OS X is not supported.
export INFLUXDB_VERSION=0.9.4.2
diff --git a/contrib/run-shellcheck b/contrib/run-shellcheck
new file mode 100755
index 0000000..2935f62
--- /dev/null
+++ b/contrib/run-shellcheck
@@ -0,0 +1,12 @@
+#! /usr/bin/env bash
+
+set -e
+set -x
+
+cd "$(dirname "${BASH_SOURCE[0]}")/../"
+
+mapfile -t filestocheck < <(find bin/ -type f)
+for x in "${filestocheck[@]}"; do
+ shellcheck conf/uno.conf bin/impl/util.sh bin/impl/load-env.sh bin/impl/commands.sh "$x"
+done
+