You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@accumulo.apache.org by ct...@apache.org on 2022/10/14 13:31:02 UTC

[accumulo-testing] branch main updated: Standardize formatting of shell scripts (#245)

This is an automated email from the ASF dual-hosted git repository.

ctubbsii pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/accumulo-testing.git


The following commit(s) were added to refs/heads/main by this push:
     new c803776  Standardize formatting of shell scripts (#245)
c803776 is described below

commit c80377616e04fcb513f5d3be3dae7b73871e7561
Author: Christopher Tubbs <ct...@apache.org>
AuthorDate: Fri Oct 14 09:30:56 2022 -0400

    Standardize formatting of shell scripts (#245)
    
    * Standardize shebang lines for bash scripts
    * Run `shfmt -ln bash -l -d -i 2 -ci -s .` to format scripts
---
 bin/agitator                                       |  41 +++++----
 bin/build                                          |   4 +-
 bin/cingest                                        |   6 +-
 bin/gcs                                            |   3 +-
 bin/mapred                                         |   3 +-
 bin/monitor                                        |   3 +-
 bin/performance                                    |  22 +++--
 bin/rwalk                                          |   4 +-
 conf/env.sh                                        |  23 +++--
 .../files/azure-format-lvm-data-disk.sh            |  27 +++---
 .../files/update-hosts-genders.sh                  |  10 +--
 src/main/docker/docker-entry                       |   8 +-
 test/automation/automateEC2.sh                     | 100 ++++++++++-----------
 test/automation/cluster_props.sh                   |   1 -
 test/merkle-replication/configure-replication.sh   |  57 ++++++------
 test/merkle-replication/ingest-data.sh             |  18 ++--
 test/merkle-replication/merkle-env.sh              |   1 +
 test/merkle-replication/verify-data.sh             |  60 ++++++-------
 test/stress/reader.sh                              |  12 +--
 test/stress/start-readers.sh                       |  12 +--
 test/stress/start-writers.sh                       |  12 +--
 test/stress/stop-readers.sh                        |  14 +--
 test/stress/stop-writers.sh                        |  14 +--
 test/stress/stress-env.sh                          |   4 +-
 test/stress/writer.sh                              |  12 +--
 test/test1/verify_test.sh                          |  10 +--
 test/test1/verify_test_2.sh                        |  10 +--
 test/test2/concurrent.sh                           |  84 +++++++++--------
 test/test3/bigrow.sh                               |  10 +--
 test/test4/bulk_import_test.sh                     |   1 -
 test/upgrade/upgrade_test.sh                       |   8 +-
 31 files changed, 300 insertions(+), 294 deletions(-)

diff --git a/bin/agitator b/bin/agitator
index 50157b5..23d2bbd 100755
--- a/bin/agitator
+++ b/bin/agitator
@@ -18,8 +18,8 @@
 # under the License.
 #
 
-bin_dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
-at_home=$( cd "$( dirname "$bin_dir" )" && pwd )
+bin_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
+at_home=$(cd "$(dirname "$bin_dir")" && pwd)
 source "${bin_dir}/build"
 
 function print_usage() {
@@ -44,22 +44,23 @@ function start_app_agitator() {
   local max_kill=$5
   local start_cmd=$6
   local kill_cmd=$7
-  local hosts_array; read -a hosts_array < <(get_app_hosts "$app_name")
+  local hosts_array
+  read -a hosts_array < <(get_app_hosts "$app_name")
   local num_hosts=${#hosts_array[@]}
   local node_to_kill
   nodes_to_kill_array=()
   local T
   local ENV_VARS="ACCUMULO_HOME=$ACCUMULO_HOME ZOOKEEPER_HOME=$ZOOKEEPER_HOME HADOOP_HOME=$HADOOP_HOME JAVA_HOME=$JAVA_HOME"
 
-  if (( num_hosts == 0 )); then
+  if ((num_hosts == 0)); then
     echo "ERROR: No hosts were found in env for $app_name"
     exit 1
   fi
-  if (( max_kill > num_hosts )); then
+  if ((max_kill > num_hosts)); then
     echo "ERROR: Max kill $max_kill is greater than number of hosts $num_hosts"
     exit 1
   fi
-  if (( max_kill < min_kill )); then
+  if ((max_kill < min_kill)); then
     echo "ERROR: Max kill $max_kill is less than min kill $min_kill"
     exit 1
   fi
@@ -72,7 +73,7 @@ function start_app_agitator() {
     sleep $((kill_sleep_time * 60))
 
     T="$(date +'%Y%m%d %H:%M:%S')"
-    if ((max_kill == 1)) ; then
+    if ((max_kill == 1)); then
       node_to_kill=${hosts_array[0]}
       echo "$T Killing $app_name at $node_to_kill"
       ssh "$node_to_kill" "$kill_cmd"
@@ -100,7 +101,7 @@ function start_app_agitator() {
     sleep $((restart_sleep_time * 60))
 
     T="$(date +'%Y%m%d %H:%M:%S')"
-    if ((max_kill == 1)) ; then
+    if ((max_kill == 1)); then
       echo "$T Restarting $app_name at $node_to_kill"
       ssh "$node_to_kill" "bash -c '${ENV_VARS} $start_cmd'"
     else
@@ -120,7 +121,8 @@ function start_agitator() {
   elif hash parallel-ssh 2>/dev/null; then
     export PSSH=parallel-ssh
   else
-    echo >&2 "The agitator requires pssh/parallel-ssh to be installed. Aborting."; exit 1;
+    echo >&2 "The agitator requires pssh/parallel-ssh to be installed. Aborting."
+    exit 1
   fi
   ## read configuration into env variables
   read_cluster_conf
@@ -143,16 +145,16 @@ function start_agitator() {
     sudo -i -u "$AGTR_ACCUMULO_USER"
   fi
   echo "Starting manager and tserver agitation as $(whoami)"
-  start_app_agitator manager "$AGTR_MANAGER_KILL_SLEEP_TIME" "$AGTR_MANAGER_RESTART_SLEEP_TIME" 1 1 "$manager_start_cmd" "$manager_kill_cmd" > "$manager_log" 2>&1 &
-  start_app_agitator tserver "$AGTR_TSERVER_KILL_SLEEP_TIME" "$AGTR_TSERVER_RESTART_SLEEP_TIME" "$AGTR_TSERVER_MIN_KILL" "$AGTR_TSERVER_MAX_KILL" "$tserver_start_cmd" "$tserver_kill_cmd" > "$tserver_log" 2>&1 &
+  start_app_agitator manager "$AGTR_MANAGER_KILL_SLEEP_TIME" "$AGTR_MANAGER_RESTART_SLEEP_TIME" 1 1 "$manager_start_cmd" "$manager_kill_cmd" >"$manager_log" 2>&1 &
+  start_app_agitator tserver "$AGTR_TSERVER_KILL_SLEEP_TIME" "$AGTR_TSERVER_RESTART_SLEEP_TIME" "$AGTR_TSERVER_MIN_KILL" "$AGTR_TSERVER_MAX_KILL" "$tserver_start_cmd" "$tserver_kill_cmd" >"$tserver_log" 2>&1 &
 
   if [[ $AGITATOR_USER != "$AGTR_HDFS_USER" ]]; then
     sudo -i -u "$AGTR_HDFS_USER"
   fi
   echo "Running datanode agitator as $(whoami)"
-  start_app_agitator datanode "$AGTR_DATANODE_KILL_SLEEP_TIME" "$AGTR_DATANODE_RESTART_SLEEP_TIME" "$AGTR_DATANODE_MIN_KILL" "$AGTR_DATANODE_MAX_KILL" "$datanode_start_cmd" "$datanode_kill_cmd" > "${datanode_log}" 2>&1 &
+  start_app_agitator datanode "$AGTR_DATANODE_KILL_SLEEP_TIME" "$AGTR_DATANODE_RESTART_SLEEP_TIME" "$AGTR_DATANODE_MIN_KILL" "$AGTR_DATANODE_MAX_KILL" "$datanode_start_cmd" "$datanode_kill_cmd" >"${datanode_log}" 2>&1 &
 
-  if ${AGTR_HDFS:-false} ; then
+  if ${AGTR_HDFS:-false}; then
     agitator_log=${log_base}_hdfs-agitator
     sudo -u "$AGTR_HDFS_SUPERUSER" nohup "${at_home}/libexec/hdfs-agitator.pl" --sleep "${AGTR_HDFS_SLEEP_TIME}" --hdfs-cmd "${AGTR_HDFS_COMMAND}" --superuser "${AGTR_HDFS_SUPERUSER}" >"${agitator_log}.out" 2>"${agitator_log}.err" &
   fi
@@ -162,9 +164,11 @@ function stop_agitator() {
   [[ -n $AGITATOR_USER ]] || AGITATOR_USER=$(whoami)
   echo "Stopping all processes in the same process group as 'agitator' as user $AGITATOR_USER"
   ## get process ids of all agitator processes (return 1 if none found)
-  local agitator_pids=(); read -a agitator_pids < <(pgrep -f "agitator start") || return 1
+  local agitator_pids=()
+  read -a agitator_pids < <(pgrep -f "agitator start") || return 1
   ## get the group process ids of all agitator processes
-  local group_pids=(); read -a group_pids < <(ps -o pgid= -p "${agitator_pids[@]}")
+  local group_pids=()
+  read -a group_pids < <(ps -o pgid= -p "${agitator_pids[@]}")
   ## kill all processes in the process groups (should include agitators and their sleep processes)
   kill -- "${group_pids[@]/#/-}"
 }
@@ -180,7 +184,7 @@ function read_cluster_conf() {
   echo "Reading cluster config from ${conf}/cluster.yaml"
   trap 'rm -f "$CONFIG_FILE"' EXIT
   CONFIG_FILE=$(mktemp) || exit 1
-  accumulo org.apache.accumulo.core.conf.cluster.ClusterConfigParser "${conf}"/cluster.yaml > "$CONFIG_FILE" || parse_fail
+  accumulo org.apache.accumulo.core.conf.cluster.ClusterConfigParser "${conf}"/cluster.yaml >"$CONFIG_FILE" || parse_fail
   . "$CONFIG_FILE"
   rm -f "$CONFIG_FILE"
 
@@ -197,8 +201,8 @@ function read_cluster_conf() {
 # Given an app_name $1, return the space separated string of hosts through echo
 function get_app_hosts() {
   case "$1" in
-    manager)  echo -n "$MANAGER_HOSTS" ;;
-    tserver|datanode) echo -n "$TSERVER_HOSTS" ;;
+    manager) echo -n "$MANAGER_HOSTS" ;;
+    tserver | datanode) echo -n "$TSERVER_HOSTS" ;;
     *) return 1 ;;
   esac
 }
@@ -214,4 +218,5 @@ case "$1" in
     echo "ERROR: unknown command - $2"
     print_usage
     exit 1
+    ;;
 esac
diff --git a/bin/build b/bin/build
index e76599b..6fa80bc 100755
--- a/bin/build
+++ b/bin/build
@@ -18,8 +18,8 @@
 # under the License.
 #
 
-bin_dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
-at_home=$( cd "$( dirname "$bin_dir" )" && pwd )
+bin_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
+at_home=$(cd "$(dirname "$bin_dir")" && pwd)
 
 env_sh="$at_home/conf/env.sh"
 echo "Using $env_sh for setup"
diff --git a/bin/cingest b/bin/cingest
index e579394..43328ea 100755
--- a/bin/cingest
+++ b/bin/cingest
@@ -18,7 +18,7 @@
 # under the License.
 #
 
-bin_dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+bin_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
 source "${bin_dir}/build"
 
 function print_usage() {
@@ -80,12 +80,13 @@ case "$1" in
     echo "Unknown application: $1"
     print_usage
     exit 1
+    ;;
 esac
 
 export CLASSPATH="$TEST_JAR_PATH:$HADOOP_API_JAR:$HADOOP_RUNTIME_JAR:$CLASSPATH"
 
 case "$1" in
-  verify|moru|bulk)
+  verify | moru | bulk)
     if [ -n "$HADOOP_HOME" ]; then
       export HADOOP_USE_CLIENT_CLASSLOADER=true
       "$HADOOP_HOME"/bin/yarn jar "$TEST_JAR_PATH" "$ci_main" "${@:2}" "$TEST_PROPS" "$ACCUMULO_CLIENT_PROPS"
@@ -96,4 +97,5 @@ case "$1" in
     ;;
   *)
     java $JAVA_OPTS -Dlog4j.configurationFile="file:$TEST_LOG4J" "$ci_main" "${@:2}" "$TEST_PROPS" "$ACCUMULO_CLIENT_PROPS"
+    ;;
 esac
diff --git a/bin/gcs b/bin/gcs
index 7b7c9be..5227925 100755
--- a/bin/gcs
+++ b/bin/gcs
@@ -18,7 +18,7 @@
 # under the License.
 #
 
-bin_dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+bin_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
 source "${bin_dir}/build"
 
 function print_usage() {
@@ -60,6 +60,7 @@ case "$1" in
     echo "Unknown application: $1"
     print_usage
     exit 1
+    ;;
 esac
 
 export CLASSPATH="$TEST_JAR_PATH:$HADOOP_API_JAR:$HADOOP_RUNTIME_JAR:$CLASSPATH"
diff --git a/bin/mapred b/bin/mapred
index 95b51aa..65442b8 100755
--- a/bin/mapred
+++ b/bin/mapred
@@ -18,7 +18,7 @@
 # under the License.
 #
 
-bin_dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+bin_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
 source "${bin_dir}/build"
 
 function print_usage() {
@@ -51,6 +51,7 @@ case "$1" in
     echo "Unknown application: $1"
     print_usage
     exit 1
+    ;;
 esac
 
 export CLASSPATH="$TEST_JAR_PATH:$HADOOP_API_JAR:$HADOOP_RUNTIME_JAR:$CLASSPATH"
diff --git a/bin/monitor b/bin/monitor
index bdf4fd7..50c43a7 100755
--- a/bin/monitor
+++ b/bin/monitor
@@ -18,7 +18,7 @@
 # under the License.
 #
 
-bin_dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+bin_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
 source "${bin_dir}/build"
 
 function print_usage() {
@@ -48,6 +48,7 @@ case "$1" in
     echo "Unknown application: $1"
     print_usage
     exit 1
+    ;;
 esac
 
 export CLASSPATH="$TEST_JAR_PATH:$HADOOP_API_JAR:$HADOOP_RUNTIME_JAR:$CLASSPATH"
diff --git a/bin/performance b/bin/performance
index a526303..395bf9b 100755
--- a/bin/performance
+++ b/bin/performance
@@ -22,8 +22,8 @@
 # config and only relies on cluster-control.sh script to get all environmental
 # information.
 
-bin_dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
-at_home=$( cd "$( dirname "$bin_dir" )" && pwd )
+bin_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
+at_home=$(cd "$(dirname "$bin_dir")" && pwd)
 at_version=2.1.0-SNAPSHOT
 
 function print_usage() {
@@ -39,7 +39,6 @@ Possible commands:
 EOF
 }
 
-
 function build_shade_jar() {
   at_shaded_jar="$at_home/target/accumulo-testing-shaded.jar"
   if [ ! -f "$at_shaded_jar" ]; then
@@ -55,7 +54,6 @@ if [ ! -f "$log4j_config" ]; then
   exit 1
 fi
 
-
 if [ ! -f "$at_home/conf/cluster-control.sh" ]; then
   echo "Could not find cluster-control.sh"
   exit 1
@@ -63,7 +61,7 @@ fi
 
 source "$at_home"/conf/cluster-control.sh
 export TEST_JAR_PATH="${at_home}/target/accumulo-testing-shaded.jar"
-if [[ ! -f "$TEST_JAR_PATH" ]]; then
+if [[ ! -f $TEST_JAR_PATH ]]; then
   build_shade_jar
 fi
 CP="$at_shaded_jar:$(get_hadoop_client)"
@@ -79,7 +77,7 @@ case "$1" in
     # don't start unless we can find the class provided
     found=false
     CLASSPATH="$CP" java $JAVA_OPTS -Dlog4j.configurationFile="file:$log4j_config" ${perf_pkg}.ListTests | while read -r test_class; do
-      if [[ "$test_class" == "$3" ]]; then
+      if [[ $test_class == "$3" ]]; then
         found=true
       fi
     done
@@ -96,28 +94,28 @@ case "$1" in
         pt_tmp=$(mktemp -d -t accumulo_pt_XXXXXXX)
         setup_accumulo
         get_config_file accumulo.properties "$pt_tmp"
-        CLASSPATH="$CP" java $JAVA_OPTS -Dlog4j.configurationFile="file:$log4j_config"  ${perf_pkg}.MergeSiteConfig "$test_class" "$pt_tmp"
+        CLASSPATH="$CP" java $JAVA_OPTS -Dlog4j.configurationFile="file:$log4j_config" ${perf_pkg}.MergeSiteConfig "$test_class" "$pt_tmp"
         put_config_file "$pt_tmp/accumulo.properties"
         put_server_code "$at_home/target/accumulo-testing-$at_version.jar"
         start_accumulo
         get_config_file accumulo-client.properties "$pt_tmp"
-        CLASSPATH="$CP" java $JAVA_OPTS -Dlog4j.configurationFile="file:$log4j_config"  ${perf_pkg}.PerfTestRunner "$pt_tmp/accumulo-client.properties" "$test_class" "$(get_version 'ACCUMULO')" "$2"
+        CLASSPATH="$CP" java $JAVA_OPTS -Dlog4j.configurationFile="file:$log4j_config" ${perf_pkg}.PerfTestRunner "$pt_tmp/accumulo-client.properties" "$test_class" "$(get_version 'ACCUMULO')" "$2"
       fi
     done
     stop_cluster
     ;;
   compare)
-    CLASSPATH="$CP" java $JAVA_OPTS -Dlog4j.configurationFile="file:$log4j_config"  ${perf_pkg}.Compare "$2" "$3"
+    CLASSPATH="$CP" java $JAVA_OPTS -Dlog4j.configurationFile="file:$log4j_config" ${perf_pkg}.Compare "$2" "$3"
     ;;
   csv)
-    CLASSPATH="$CP" java $JAVA_OPTS -Dlog4j.configurationFile="file:$log4j_config"  ${perf_pkg}.Csv "${@:2}"
+    CLASSPATH="$CP" java $JAVA_OPTS -Dlog4j.configurationFile="file:$log4j_config" ${perf_pkg}.Csv "${@:2}"
     ;;
   list)
-    CLASSPATH="$CP" java $JAVA_OPTS -Dlog4j.configurationFile="file:$log4j_config"  ${perf_pkg}.ListTests
+    CLASSPATH="$CP" java $JAVA_OPTS -Dlog4j.configurationFile="file:$log4j_config" ${perf_pkg}.ListTests
     ;;
   *)
     echo "Unknown command : $1"
     print_usage
     exit 1
+    ;;
 esac
-
diff --git a/bin/rwalk b/bin/rwalk
index 4832c0c..b3302cb 100755
--- a/bin/rwalk
+++ b/bin/rwalk
@@ -18,8 +18,8 @@
 # under the License.
 #
 
-bin_dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
-at_home=$( cd "$( dirname "$bin_dir" )" && pwd )
+bin_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
+at_home=$(cd "$(dirname "$bin_dir")" && pwd)
 source "${bin_dir}/build"
 
 function print_usage() {
diff --git a/conf/env.sh b/conf/env.sh
index 7d5dcd1..d2d8d67 100644
--- a/conf/env.sh
+++ b/conf/env.sh
@@ -48,7 +48,7 @@ fi
 
 # Configuration
 # =============
-conf_dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+conf_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
 export TEST_PROPS="${conf_dir}/accumulo-testing.properties"
 if [ ! -f "$TEST_PROPS" ]; then
   echo "Please create and edit accumulo-testing.properties in $conf_dir"
@@ -63,13 +63,15 @@ fi
 # Shaded test jar
 # ===============
 # Versions set below will be what is included in the shaded jar
-ACCUMULO_VERSION="$("$ACCUMULO_HOME"/bin/accumulo version | grep -v 'DEBUG')"; export ACCUMULO_VERSION
-HADOOP_VERSION="$(hadoop version | head -n1 | awk '{print $2}')"; export HADOOP_VERSION
+ACCUMULO_VERSION="$("$ACCUMULO_HOME"/bin/accumulo version | grep -v 'DEBUG')"
+export ACCUMULO_VERSION
+HADOOP_VERSION="$(hadoop version | head -n1 | awk '{print $2}')"
+export HADOOP_VERSION
 export ZOOKEEPER_VERSION=3.8.0
 # Path to shaded test jar
-at_home=$( cd "$( dirname "$conf_dir" )" && pwd )
+at_home=$(cd "$(dirname "$conf_dir")" && pwd)
 export TEST_JAR_PATH="${at_home}/target/accumulo-testing-shaded.jar"
-if [[ ! -f "$TEST_JAR_PATH" ]]; then
+if [[ ! -f $TEST_JAR_PATH ]]; then
   echo "Building $TEST_JAR_PATH"
   cd "${at_home}" || exit 1
   mvn clean package -P create-shade-jar -D skipTests -D accumulo.version="$ACCUMULO_VERSION" -D hadoop.version="$HADOOP_VERSION" -D zookeeper.version="$ZOOKEEPER_VERSION"
@@ -79,14 +81,15 @@ fi
 # ===========
 export HADOOP_API_JAR="${at_home}"/target/dependency/hadoop-client-api.jar
 export HADOOP_RUNTIME_JAR="${at_home}"/target/dependency/hadoop-client-runtime.jar
-if [[ ! -f "$HADOOP_API_JAR" || ! -f "$HADOOP_RUNTIME_JAR" ]]; then
+if [[ ! -f $HADOOP_API_JAR || ! -f $HADOOP_RUNTIME_JAR ]]; then
   mvn dependency:copy-dependencies -Dmdep.stripVersion=true -DincludeArtifactIds=hadoop-client-api,hadoop-client-runtime -Dhadoop.version="$HADOOP_VERSION"
 fi
 
 # Agitator
 # ========
 # Accumulo user
-AGTR_ACCUMULO_USER=$(whoami); export AGTR_ACCUMULO_USER
+AGTR_ACCUMULO_USER=$(whoami)
+export AGTR_ACCUMULO_USER
 # Time (in minutes) between killing Accumulo managers
 export AGTR_MANAGER_KILL_SLEEP_TIME=60
 export AGTR_MANAGER_RESTART_SLEEP_TIME=2
@@ -104,9 +107,11 @@ export AGTR_DATANODE_RESTART_SLEEP_TIME=10
 export AGTR_DATANODE_MIN_KILL=1
 export AGTR_DATANODE_MAX_KILL=1
 # HDFS agitation
-AGTR_HDFS_USER=$(whoami); export AGTR_HDFS_USER
+AGTR_HDFS_USER=$(whoami)
+export AGTR_HDFS_USER
 export AGTR_HDFS=false
 export AGTR_HDFS_SLEEP_TIME=10
 export AGTR_HDFS_SUPERUSER=hdfs
 export AGTR_HDFS_COMMAND="${HADOOP_HOME}/bin/hdfs"
-AGTR_HDFS_SUDO=$(command -v sudo); export AGTR_HDFS_SUDO
+AGTR_HDFS_SUDO=$(command -v sudo)
+export AGTR_HDFS_SUDO
diff --git a/contrib/terraform-testing-infrastructure/modules/cloud-init-config/files/azure-format-lvm-data-disk.sh b/contrib/terraform-testing-infrastructure/modules/cloud-init-config/files/azure-format-lvm-data-disk.sh
index 4a346c9..a901af0 100644
--- a/contrib/terraform-testing-infrastructure/modules/cloud-init-config/files/azure-format-lvm-data-disk.sh
+++ b/contrib/terraform-testing-infrastructure/modules/cloud-init-config/files/azure-format-lvm-data-disk.sh
@@ -1,4 +1,4 @@
-#!/usr/bin/env bash
+#! /usr/bin/env bash
 #
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
@@ -18,15 +18,18 @@
 # under the License.
 #
 
-[ $# -eq 3 ] || { echo "usage: $0 disk_count mount_point user.group"; exit 1; }
+[ $# -eq 3 ] || {
+  echo "usage: $0 disk_count mount_point user.group"
+  exit 1
+}
 
 diskCount=$1
 mountPoint=$2
 owner=$3
 
 until [[ $(ls -1 /dev/disk/azure/scsi1/ | wc -l) == "$diskCount" ]]; do
-	echo "Waiting for $diskCount disks to be attached..."
-	sleep 10
+  echo "Waiting for $diskCount disks to be attached..."
+  sleep 10
 done
 
 VG_GROUP_NAME=storage_vg
@@ -35,20 +38,18 @@ LG_GROUP_NAME=storage_lv
 DISK_PATH="/dev/disk/azure/scsi1"
 declare -a REAL_PATH_ARR
 
-for i in $(ls ${DISK_PATH} 2>/dev/null);
-do
-	REAL_PATH=`realpath ${DISK_PATH}/${i}|tr '\n' ' ' `
-	REAL_PATH_ARR+=($REAL_PATH)
-done;
-
+for i in $(ls ${DISK_PATH} 2>/dev/null); do
+  REAL_PATH=$(realpath ${DISK_PATH}/${i} | tr '\n' ' ')
+  REAL_PATH_ARR+=($REAL_PATH)
+done
 
-RAID_DEVICE_LIST=`echo "${REAL_PATH_ARR[@]}"|sort`
-RAID_DEVICES_COUNT=`echo "${#REAL_PATH_ARR[@]}"`
+RAID_DEVICE_LIST=$(echo "${REAL_PATH_ARR[@]}" | sort)
+RAID_DEVICES_COUNT=$(echo "${#REAL_PATH_ARR[@]}")
 pvcreate ${RAID_DEVICE_LIST}
 vgcreate -s 4M ${VG_GROUP_NAME} ${RAID_DEVICE_LIST}
 lvcreate -n $LG_GROUP_NAME -l 100%FREE -i ${RAID_DEVICES_COUNT} ${VG_GROUP_NAME}
 mkfs.xfs -K -f /dev/${VG_GROUP_NAME}/${LG_GROUP_NAME}
 mkdir -p ${mountPoint}
-printf "/dev/${VG_GROUP_NAME}/${LG_GROUP_NAME}\t${mountPoint}\tauto\tdefaults,noatime\t0\t2\n" >> /etc/fstab
+printf "/dev/${VG_GROUP_NAME}/${LG_GROUP_NAME}\t${mountPoint}\tauto\tdefaults,noatime\t0\t2\n" >>/etc/fstab
 mount --target ${mountPoint}
 chown ${owner} ${mountPoint}
diff --git a/contrib/terraform-testing-infrastructure/modules/cloud-init-config/files/update-hosts-genders.sh b/contrib/terraform-testing-infrastructure/modules/cloud-init-config/files/update-hosts-genders.sh
index df405ec..b9389f3 100644
--- a/contrib/terraform-testing-infrastructure/modules/cloud-init-config/files/update-hosts-genders.sh
+++ b/contrib/terraform-testing-infrastructure/modules/cloud-init-config/files/update-hosts-genders.sh
@@ -21,7 +21,7 @@
 set -eo pipefail
 
 if [ $# -ne 2 ]; then
-  echo  "usage: $0 additional_hosts_file additional_genders_file" >&2
+  echo "usage: $0 additional_hosts_file additional_genders_file" >&2
   exit 1
 fi
 
@@ -36,7 +36,7 @@ end_genders_marker="##### END GENERATED GENDERS #####"
 # Update the hosts file locally
 # Wrap the supplied host additions with markers that we'll use to strip it back out.
 TMPHOSTS=/tmp/hosts$$
-cat > $TMPHOSTS <<EOF
+cat >$TMPHOSTS <<EOF
 $begin_hosts_marker
 ##### DO NOT EDIT THIS SECTION #####
 $(<"$HOSTS_ADDITIONS")
@@ -44,17 +44,17 @@ $end_hosts_marker
 EOF
 # Strip out any previously applied hosts additiona, and then tack the new ones on to the end of /etc/hosts.
 sudo sed -ri '/^'"$begin_hosts_marker"'$/,/^'"$end_hosts_marker"'$/d' /etc/hosts
-cat "$TMPHOSTS" | sudo tee -a /etc/hosts > /dev/null
+cat "$TMPHOSTS" | sudo tee -a /etc/hosts >/dev/null
 
 # Update the genders file locally
 TMPGENDERS=/tmp/genders$$
-cat > $TMPGENDERS <<EOF
+cat >$TMPGENDERS <<EOF
 $begin_genders_marker
 $(<"$GENDERS_ADDITIONS")
 $end_genders_marker
 EOF
 [[ -f /etc/genders ]] && sudo sed -ri '/^'"$begin_genders_marker"'$/,/^'"$end_genders_marker"'$/d' /etc/genders
-cat "$TMPGENDERS" | sudo tee -a /etc/genders > /dev/null
+cat "$TMPGENDERS" | sudo tee -a /etc/genders >/dev/null
 echo "Check genders file validity..."
 nodeattr -k
 
diff --git a/src/main/docker/docker-entry b/src/main/docker/docker-entry
index 6e92875..ddcfd4f 100755
--- a/src/main/docker/docker-entry
+++ b/src/main/docker/docker-entry
@@ -18,8 +18,8 @@
 # under the License.
 #
 
-bin_dir=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
-at_home=$( cd "$( dirname "$bin_dir" )" && pwd )
+bin_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
+at_home=$(cd "$(dirname "$bin_dir")" && pwd)
 
 function print_usage() {
   cat <<EOF
@@ -47,10 +47,10 @@ if [ -z "$HADOOP_HOME" ]; then
 fi
 
 case "$1" in
-  cingest|rwalk|gcs|monitor)
+  cingest | rwalk | gcs | monitor)
     "${at_home}"/bin/"$1" "${@:2}"
     ;;
-  -h|help)
+  -h | help)
     print_usage
     exit 1
     ;;
diff --git a/test/automation/automateEC2.sh b/test/automation/automateEC2.sh
index 146415a..a5d6162 100755
--- a/test/automation/automateEC2.sh
+++ b/test/automation/automateEC2.sh
@@ -18,68 +18,64 @@
 # under the License.
 #
 
-
 # reads all git repositories and properties from cluster_props.sh
 source cluster_props.sh
 while true; do
 
-	# check for null properties
-	[ -z "$ACCUMULO_REPO" ] && echo "Check accumulo in cluster_props.sh" && break
-	[ -z "$ACCUMULO_BRANCH" ] && echo "Check accumulo branch in cluster_props.sh" && break
-	[ -z "$ACCUMULO_TESTING_REPO" ] && echo "Check accumulo-testing in cluster_props.sh" && break
-	[ -z "$ACCUMULO_TESTING_BRANCH" ] && echo "Check accumulo-testing branch in cluster_props.sh" && break
-	[ -z "$FLUO_MUCHOS_REPO" ] && echo "Check fluo-muchos in cluster_props.sh" && break
-	[ -z "$FLUO_MUCHOS_BRANCH" ] && echo "Check fluo-muchos branch in cluster_props.sh" && break
-	[[ -z "$MUCHOS_PROPS" || ! -f "$MUCHOS_PROPS" ]] && echo "Check muchos.props in cluster_props.sh" && break
-
-
+  # check for null properties
+  [ -z "$ACCUMULO_REPO" ] && echo "Check accumulo in cluster_props.sh" && break
+  [ -z "$ACCUMULO_BRANCH" ] && echo "Check accumulo branch in cluster_props.sh" && break
+  [ -z "$ACCUMULO_TESTING_REPO" ] && echo "Check accumulo-testing in cluster_props.sh" && break
+  [ -z "$ACCUMULO_TESTING_BRANCH" ] && echo "Check accumulo-testing branch in cluster_props.sh" && break
+  [ -z "$FLUO_MUCHOS_REPO" ] && echo "Check fluo-muchos in cluster_props.sh" && break
+  [ -z "$FLUO_MUCHOS_BRANCH" ] && echo "Check fluo-muchos branch in cluster_props.sh" && break
+  [[ -z $MUCHOS_PROPS || ! -f $MUCHOS_PROPS ]] && echo "Check muchos.props in cluster_props.sh" && break
 
-	# builds Accumulo tarball and installs fluo-muchos in a temporary directory
-	TMPDIR=`mktemp -d`
-	echo "Directory created: $TMPDIR " && cd $TMPDIR
-	git clone --single-branch --branch $FLUO_MUCHOS_BRANCH $FLUO_MUCHOS_REPO 
-	git clone --single-branch --branch $ACCUMULO_BRANCH $ACCUMULO_REPO && cd accumulo
-	mvn clean package -DskipFormat -PskipQA
+  # builds Accumulo tarball and installs fluo-muchos in a temporary directory
+  TMPDIR=$(mktemp -d)
+  echo "Directory created: $TMPDIR " && cd $TMPDIR
+  git clone --single-branch --branch $FLUO_MUCHOS_BRANCH $FLUO_MUCHOS_REPO
+  git clone --single-branch --branch $ACCUMULO_BRANCH $ACCUMULO_REPO && cd accumulo
+  mvn clean package -DskipFormat -PskipQA
 
-	# copies the tarball to the given muchos directory
-	cp ./assemble/target/*.gz $TMPDIR/fluo-muchos/conf/upload/
-	if [ $? -eq 0 ]; then
-		echo "Accumulo tarball copied to fluo-muchos"
-	else
-		break
-	fi
+  # copies the tarball to the given muchos directory
+  cp ./assemble/target/*.gz $TMPDIR/fluo-muchos/conf/upload/
+  if [ $? -eq 0 ]; then
+    echo "Accumulo tarball copied to fluo-muchos"
+  else
+    break
+  fi
 
-	# sets up the cluster
-	cd $TMPDIR/fluo-muchos || (echo "Could not find Fluo-Muchos" && break)
-	cp conf/muchos.props.example conf/muchos.props
-	cp $MUCHOS_PROPS ./conf/muchos.props || (echo Could not use custom config. Check path in cluster_props.sh)
+  # sets up the cluster
+  cd $TMPDIR/fluo-muchos || (echo "Could not find Fluo-Muchos" && break)
+  cp conf/muchos.props.example conf/muchos.props
+  cp $MUCHOS_PROPS ./conf/muchos.props || (echo Could not use custom config. Check path in cluster_props.sh)
 
-	./bin/muchos launch -c "$USER-cluster" && echo "Setting up cluster.."
-	# repeat setup until all nodes are intialized
-	until ./bin/muchos setup; do
-		echo "Script will resume once nodes are intialized."
-		echo "Retrying in 20 seconds..."
-		sleep 20
-	done
+  ./bin/muchos launch -c "$USER-cluster" && echo "Setting up cluster.."
+  # repeat setup until all nodes are intialized
+  until ./bin/muchos setup; do
+    echo "Script will resume once nodes are intialized."
+    echo "Retrying in 20 seconds..."
+    sleep 20
+  done
 
+  if [ $? -eq 0 ]; then
+    echo "EC2 cluster setup as $USER-cluster"
+  else
+    echo "Terminating cluster"
+    ./bin/muchos terminate -c $USER-cluster
+    break
+  fi
 
-	if [ $? -eq 0 ]; then
-                echo "EC2 cluster setup as $USER-cluster"
-        else
-		echo "Terminating cluster"
-		./bin/muchos terminate -c $USER-cluster
-                break
-        fi
+  CLUSTERUSER=$(./bin/muchos config -p cluster_user)
+  PROXYIP=$(./bin/muchos config -p proxy.public.ip)
+  M2='/home/centos/install/apache-maven*/bin'
 
-	CLUSTERUSER=`./bin/muchos config -p cluster_user`
-	PROXYIP=`./bin/muchos config -p proxy.public.ip`
-	M2='/home/centos/install/apache-maven*/bin'
-	
-	# clones and builds accumulo and  accumulo-testing to EC2
-	ssh $CLUSTERUSER@$PROXYIP "git clone --single-branch --branch $ACCUMULO_BRANCH $ACCUMULO_REPO"
-	ssh $CLUSTERUSER@$PROXYIP "cd accumulo && $M2/mvn clean install -PskipQA && cd .."
-	ssh $CLUSTERUSER@$PROXYIP "git clone --single-branch --branch $ACCUMULO_TESTING_BRANCH $ACCUMULO_TESTING_REPO"
-	ssh $CLUSTERUSER@$PROXYIP "cd accumulo-testing && $M2/mvn clean package"
+  # clones and builds accumulo and  accumulo-testing to EC2
+  ssh $CLUSTERUSER@$PROXYIP "git clone --single-branch --branch $ACCUMULO_BRANCH $ACCUMULO_REPO"
+  ssh $CLUSTERUSER@$PROXYIP "cd accumulo && $M2/mvn clean install -PskipQA && cd .."
+  ssh $CLUSTERUSER@$PROXYIP "git clone --single-branch --branch $ACCUMULO_TESTING_BRANCH $ACCUMULO_TESTING_REPO"
+  ssh $CLUSTERUSER@$PROXYIP "cd accumulo-testing && $M2/mvn clean package"
 
-	break
+  break
 done
diff --git a/test/automation/cluster_props.sh b/test/automation/cluster_props.sh
index 51330db..a04794d 100644
--- a/test/automation/cluster_props.sh
+++ b/test/automation/cluster_props.sh
@@ -28,4 +28,3 @@ export FLUO_MUCHOS_REPO=https://github.com/apache/fluo-muchos
 export FLUO_MUCHOS_BRANCH=main
 
 export MUCHOS_PROPS=${MUCHOS_PROPS:-/path/to/muchos/props}
-
diff --git a/test/merkle-replication/configure-replication.sh b/test/merkle-replication/configure-replication.sh
index b6bf607..800feb8 100755
--- a/test/merkle-replication/configure-replication.sh
+++ b/test/merkle-replication/configure-replication.sh
@@ -1,4 +1,4 @@
-#!/usr/bin/env bash
+#! /usr/bin/env bash
 #
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
@@ -20,13 +20,13 @@
 
 # Start: Resolve Script Directory
 SOURCE="${BASH_SOURCE[0]}"
-while [[ -h "${SOURCE}" ]]; do # resolve $SOURCE until the file is no longer a symlink
-   dir=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
-   SOURCE=$(readlink "${SOURCE}")
-   [[ "${SOURCE}" != /* ]] && SOURCE="${dir}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
+while [[ -L ${SOURCE} ]]; do # resolve $SOURCE until the file is no longer a symlink
+  dir=$(cd -P "$(dirname "${SOURCE}")" && pwd)
+  SOURCE=$(readlink "${SOURCE}")
+  [[ ${SOURCE} != /* ]] && SOURCE="${dir}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
 done
-dir=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
-script=$( basename "${SOURCE}" )
+dir=$(cd -P "$(dirname "${SOURCE}")" && pwd)
+script=$(basename "${SOURCE}")
 # Stop: Resolve Script Directory
 
 # Guess at ACCUMULO_HOME and ACCUMULO_CONF_DIR if not already defined
@@ -42,52 +42,52 @@ source_commands="${tmpdir}/source_commands.txt"
 
 echo 'Removing old tables and setting replication name on source'
 
-echo "deletetable -f $SOURCE_TABLE_NAME" >> $source_commands
-echo "createtable $SOURCE_TABLE_NAME" >> $source_commands
-echo "config -s replication.name=source" >> $source_commands
-echo "quit" >> $source_commands
+echo "deletetable -f $SOURCE_TABLE_NAME" >>$source_commands
+echo "createtable $SOURCE_TABLE_NAME" >>$source_commands
+echo "config -s replication.name=source" >>$source_commands
+echo "quit" >>$source_commands
 
 # Source: drop and create tables, configure unique name for replication and grant perms
 echo $SOURCE_ACCUMULO_PASSWORD | ${ACCUMULO_HOME}/bin/accumulo shell -u $SOURCE_ACCUMULO_USER -z \
-    $SOURCE_INSTANCE $SOURCE_ZOOKEEPERS -f $source_commands
+  $SOURCE_INSTANCE $SOURCE_ZOOKEEPERS -f $source_commands
 
 destination_commands="${tmpdir}/destination_commands.txt"
 
 echo 'Removing old tables and setting replication name on destination'
 
-echo "deletetable -f $DESTINATION_TABLE_NAME" >> $destination_commands
-echo "createtable $DESTINATION_TABLE_NAME" >> $destination_commands
-echo "config -s replication.name=destination" >> $destination_commands
-echo "quit" >> $destination_commands
+echo "deletetable -f $DESTINATION_TABLE_NAME" >>$destination_commands
+echo "createtable $DESTINATION_TABLE_NAME" >>$destination_commands
+echo "config -s replication.name=destination" >>$destination_commands
+echo "quit" >>$destination_commands
 
 # Destination: drop and create tables, configure unique name for replication and grant perms
 echo $DESTINATION_ACCUMULO_PASSWORD | ${ACCUMULO_HOME}/bin/accumulo shell -u $DESTINATION_ACCUMULO_USER -z \
-    $DESTINATION_INSTANCE $DESTINATION_ZOOKEEPERS -f $destination_commands
+  $DESTINATION_INSTANCE $DESTINATION_ZOOKEEPERS -f $destination_commands
 
 rm $source_commands
 rm $destination_commands
 
 table_id=$(echo $DESTINATION_ACCUMULO_PASSWORD | ${ACCUMULO_HOME}/bin/accumulo shell -u $DESTINATION_ACCUMULO_USER -z \
-    $DESTINATION_INSTANCE $DESTINATION_ZOOKEEPERS -e 'tables -l' | grep "${DESTINATION_TABLE_NAME}" \
-    | grep -v "${DESTINATION_MERKLE_TABLE_NAME}" | awk '{print $3}')
+  $DESTINATION_INSTANCE $DESTINATION_ZOOKEEPERS -e 'tables -l' | grep "${DESTINATION_TABLE_NAME}" |
+  grep -v "${DESTINATION_MERKLE_TABLE_NAME}" | awk '{print $3}')
 
 echo "Configuring $SOURCE_TABLE_NAME to replicate to $DESTINATION_TABLE_NAME (id=$table_id)"
 
 # Define our peer 'destination' with the ReplicaSystem impl, instance name and ZKs
-echo "config -s replication.peer.destination=org.apache.accumulo.tserver.replication.AccumuloReplicaSystem,$DESTINATION_INSTANCE,$DESTINATION_ZOOKEEPERS" >> $source_commands
+echo "config -s replication.peer.destination=org.apache.accumulo.tserver.replication.AccumuloReplicaSystem,$DESTINATION_INSTANCE,$DESTINATION_ZOOKEEPERS" >>$source_commands
 # Username for 'destination'
-echo "config -s replication.peer.user.destination=$DESTINATION_ACCUMULO_USER" >> $source_commands
+echo "config -s replication.peer.user.destination=$DESTINATION_ACCUMULO_USER" >>$source_commands
 # Password for 'destination'
-echo "config -s replication.peer.password.destination=$DESTINATION_ACCUMULO_PASSWORD" >> $source_commands
+echo "config -s replication.peer.password.destination=$DESTINATION_ACCUMULO_PASSWORD" >>$source_commands
 # Configure replication to 'destination' for $SOURCE_TABLE_NAME
-echo "config -t $SOURCE_TABLE_NAME -s table.replication.target.destination=$table_id" >> $source_commands
+echo "config -t $SOURCE_TABLE_NAME -s table.replication.target.destination=$table_id" >>$source_commands
 # Enable replication for the table
-echo "config -t $SOURCE_TABLE_NAME -s table.replication=true" >> $source_commands
-echo "quit" >> $source_commands
+echo "config -t $SOURCE_TABLE_NAME -s table.replication=true" >>$source_commands
+echo "quit" >>$source_commands
 
 # Configure replication from source to destination and then enable it
 echo $SOURCE_ACCUMULO_PASSWORD | ${ACCUMULO_HOME}/bin/accumulo shell -u $SOURCE_ACCUMULO_USER -z \
-    $SOURCE_INSTANCE $SOURCE_ZOOKEEPERS -f $source_commands
+  $SOURCE_INSTANCE $SOURCE_ZOOKEEPERS -f $source_commands
 
 rm $source_commands
 
@@ -95,8 +95,7 @@ rm $source_commands
 echo 'Adding splits...'
 
 echo $SOURCE_ACCUMULO_PASSWORD | ${ACCUMULO_HOME}/bin/accumulo shell -u $SOURCE_ACCUMULO_USER -z \
-    $SOURCE_INSTANCE $SOURCE_ZOOKEEPERS -e "addsplits -t $SOURCE_TABLE_NAME 1 2 3 4 5 6 7 8 9"
+  $SOURCE_INSTANCE $SOURCE_ZOOKEEPERS -e "addsplits -t $SOURCE_TABLE_NAME 1 2 3 4 5 6 7 8 9"
 
 echo $DESTINATION_ACCUMULO_PASSWORD | ${ACCUMULO_HOME}/bin/accumulo shell -u $DESTINATION_ACCUMULO_USER -z \
-    $DESTINATION_INSTANCE $DESTINATION_ZOOKEEPERS -e "addsplits -t $DESTINATION_TABLE_NAME 1 2 3 4 5 6 7 8 9"
-
+  $DESTINATION_INSTANCE $DESTINATION_ZOOKEEPERS -e "addsplits -t $DESTINATION_TABLE_NAME 1 2 3 4 5 6 7 8 9"
diff --git a/test/merkle-replication/ingest-data.sh b/test/merkle-replication/ingest-data.sh
index 72203b2..485aa96 100755
--- a/test/merkle-replication/ingest-data.sh
+++ b/test/merkle-replication/ingest-data.sh
@@ -1,4 +1,4 @@
-#!/usr/bin/env bash
+#! /usr/bin/env bash
 #
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
@@ -20,13 +20,13 @@
 
 # Start: Resolve Script Directory
 SOURCE="${BASH_SOURCE[0]}"
-while [[ -h "${SOURCE}" ]]; do # resolve $SOURCE until the file is no longer a symlink
-   dir=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
-   SOURCE=$(readlink "${SOURCE}")
-   [[ "${SOURCE}" != /* ]] && SOURCE="${dir}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
+while [[ -L ${SOURCE} ]]; do # resolve $SOURCE until the file is no longer a symlink
+  dir=$(cd -P "$(dirname "${SOURCE}")" && pwd)
+  SOURCE=$(readlink "${SOURCE}")
+  [[ ${SOURCE} != /* ]] && SOURCE="${dir}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
 done
-dir=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
-script=$( basename "${SOURCE}" )
+dir=$(cd -P "$(dirname "${SOURCE}")" && pwd)
+script=$(basename "${SOURCE}")
 # Stop: Resolve Script Directory
 
 # Guess at ACCUMULO_HOME and ACCUMULO_CONF_DIR if not already defined
@@ -38,5 +38,5 @@ ACCUMULO_CONF_DIR=${ACCUMULO_CONF_DIR:-"$ACCUMULO_HOME/conf"}
 
 # Ingest data into the source table
 $ACCUMULO_HOME/bin/accumulo org.apache.accumulo.test.replication.merkle.ingest.RandomWorkload --table $SOURCE_TABLE_NAME \
-    -i $SOURCE_INSTANCE -z $SOURCE_ZOOKEEPERS -u $SOURCE_ACCUMULO_USER -p $SOURCE_ACCUMULO_PASSWORD -d $DELETE_PERCENT \
-    -cf $MAX_CF -cq $MAX_CQ -r $MAX_ROW -n $NUM_RECORDS
+  -i $SOURCE_INSTANCE -z $SOURCE_ZOOKEEPERS -u $SOURCE_ACCUMULO_USER -p $SOURCE_ACCUMULO_PASSWORD -d $DELETE_PERCENT \
+  -cf $MAX_CF -cq $MAX_CQ -r $MAX_ROW -n $NUM_RECORDS
diff --git a/test/merkle-replication/merkle-env.sh b/test/merkle-replication/merkle-env.sh
index 6db6921..3408829 100755
--- a/test/merkle-replication/merkle-env.sh
+++ b/test/merkle-replication/merkle-env.sh
@@ -1,3 +1,4 @@
+#! /usr/bin/env bash
 #
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
diff --git a/test/merkle-replication/verify-data.sh b/test/merkle-replication/verify-data.sh
index 9f554c4..ac8388c 100755
--- a/test/merkle-replication/verify-data.sh
+++ b/test/merkle-replication/verify-data.sh
@@ -1,4 +1,4 @@
-#!/usr/bin/env bash
+#! /usr/bin/env bash
 #
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
@@ -20,13 +20,13 @@
 
 # Start: Resolve Script Directory
 SOURCE="${BASH_SOURCE[0]}"
-while [[ -h "${SOURCE}" ]]; do # resolve $SOURCE until the file is no longer a symlink
-   dir=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
-   SOURCE=$(readlink "${SOURCE}")
-   [[ "${SOURCE}" != /* ]] && SOURCE="${dir}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
+while [[ -L ${SOURCE} ]]; do # resolve $SOURCE until the file is no longer a symlink
+  dir=$(cd -P "$(dirname "${SOURCE}")" && pwd)
+  SOURCE=$(readlink "${SOURCE}")
+  [[ ${SOURCE} != /* ]] && SOURCE="${dir}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
 done
-dir=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
-script=$( basename "${SOURCE}" )
+dir=$(cd -P "$(dirname "${SOURCE}")" && pwd)
+script=$(basename "${SOURCE}")
 # Stop: Resolve Script Directory
 
 # Guess at ACCUMULO_HOME and ACCUMULO_CONF_DIR if not already defined
@@ -40,55 +40,55 @@ tmpdir=$(mktemp -dt "$0.XXXXXXXXXX")
 
 splits=${tmpdir}/splits
 
-echo 1 >> $splits
-echo 2 >> $splits
-echo 3 >> $splits
-echo 4 >> $splits
-echo 5 >> $splits
-echo 6 >> $splits
-echo 7 >> $splits
-echo 8 >> $splits
-echo 9 >> $splits
+echo 1 >>$splits
+echo 2 >>$splits
+echo 3 >>$splits
+echo 4 >>$splits
+echo 5 >>$splits
+echo 6 >>$splits
+echo 7 >>$splits
+echo 8 >>$splits
+echo 9 >>$splits
 
 commands=${tmpdir}/commands
 
 # Generate leaves of merkle trees for source
-echo "deletetable -f $SOURCE_MERKLE_TABLE_NAME" >> $commands
-echo "createtable $SOURCE_MERKLE_TABLE_NAME" >> $commands
-echo "quit" >> $commands
+echo "deletetable -f $SOURCE_MERKLE_TABLE_NAME" >>$commands
+echo "createtable $SOURCE_MERKLE_TABLE_NAME" >>$commands
+echo "quit" >>$commands
 
 echo $SOURCE_ACCUMULO_PASSWORD | ${ACCUMULO_HOME}/bin/accumulo shell -u $SOURCE_ACCUMULO_USER -z \
-    $SOURCE_INSTANCE $SOURCE_ZOOKEEPERS -f $commands
+  $SOURCE_INSTANCE $SOURCE_ZOOKEEPERS -f $commands
 
 echo -e "\nGenerating merkle tree hashes for $SOURCE_TABLE_NAME"
 
 $ACCUMULO_HOME/bin/accumulo org.apache.accumulo.test.replication.merkle.cli.GenerateHashes -t $SOURCE_TABLE_NAME \
-    -o $SOURCE_MERKLE_TABLE_NAME -i $SOURCE_INSTANCE -z $SOURCE_ZOOKEEPERS -u $SOURCE_ACCUMULO_USER \
-    -p $SOURCE_ACCUMULO_PASSWORD -nt 8 -hash MD5 --splits $splits
+  -o $SOURCE_MERKLE_TABLE_NAME -i $SOURCE_INSTANCE -z $SOURCE_ZOOKEEPERS -u $SOURCE_ACCUMULO_USER \
+  -p $SOURCE_ACCUMULO_PASSWORD -nt 8 -hash MD5 --splits $splits
 
 rm $commands
 
 # Generate leaves of merkle trees for destination
-echo "deletetable -f $DESTINATION_MERKLE_TABLE_NAME" >> $commands
-echo "createtable $DESTINATION_MERKLE_TABLE_NAME" >> $commands
-echo "quit" >> $commands
+echo "deletetable -f $DESTINATION_MERKLE_TABLE_NAME" >>$commands
+echo "createtable $DESTINATION_MERKLE_TABLE_NAME" >>$commands
+echo "quit" >>$commands
 
 echo $DESTINATION_ACCUMULO_PASSWORD | ${ACCUMULO_HOME}/bin/accumulo shell -u $DESTINATION_ACCUMULO_USER -z \
-    $DESTINATION_INSTANCE $DESTINATION_ZOOKEEPERS -f $commands
+  $DESTINATION_INSTANCE $DESTINATION_ZOOKEEPERS -f $commands
 
 echo -e "\nGenerating merkle tree hashes for $DESTINATION_TABLE_NAME"
 
 $ACCUMULO_HOME/bin/accumulo org.apache.accumulo.test.replication.merkle.cli.GenerateHashes -t $DESTINATION_TABLE_NAME \
-    -o $DESTINATION_MERKLE_TABLE_NAME -i $DESTINATION_INSTANCE -z $DESTINATION_ZOOKEEPERS -u $DESTINATION_ACCUMULO_USER \
-    -p $DESTINATION_ACCUMULO_PASSWORD -nt 8 -hash MD5 --splits $splits
+  -o $DESTINATION_MERKLE_TABLE_NAME -i $DESTINATION_INSTANCE -z $DESTINATION_ZOOKEEPERS -u $DESTINATION_ACCUMULO_USER \
+  -p $DESTINATION_ACCUMULO_PASSWORD -nt 8 -hash MD5 --splits $splits
 
 echo -e "\nComputing root hash:"
 
 #Compute root node of merkle tree
 $ACCUMULO_HOME/bin/accumulo org.apache.accumulo.test.replication.merkle.cli.ComputeRootHash -t $SOURCE_MERKLE_TABLE_NAME \
-    -i $SOURCE_INSTANCE -z $SOURCE_ZOOKEEPERS -u $SOURCE_ACCUMULO_USER -p $SOURCE_ACCUMULO_PASSWORD -hash MD5
+  -i $SOURCE_INSTANCE -z $SOURCE_ZOOKEEPERS -u $SOURCE_ACCUMULO_USER -p $SOURCE_ACCUMULO_PASSWORD -hash MD5
 
 $ACCUMULO_HOME/bin/accumulo org.apache.accumulo.test.replication.merkle.cli.ComputeRootHash -t $DESTINATION_MERKLE_TABLE_NAME \
-    -i $DESTINATION_INSTANCE -z $DESTINATION_ZOOKEEPERS -u $DESTINATION_ACCUMULO_USER -p $DESTINATION_ACCUMULO_PASSWORD -hash MD5
+  -i $DESTINATION_INSTANCE -z $DESTINATION_ZOOKEEPERS -u $DESTINATION_ACCUMULO_USER -p $DESTINATION_ACCUMULO_PASSWORD -hash MD5
 
 rm -rf $tmpdir
diff --git a/test/stress/reader.sh b/test/stress/reader.sh
index e5b79dc..ac96237 100755
--- a/test/stress/reader.sh
+++ b/test/stress/reader.sh
@@ -22,11 +22,11 @@
 # Ref: https://stackoverflow.com/questions/59895/
 SOURCE="${BASH_SOURCE[0]}"
 while [ -h "${SOURCE}" ]; do # resolve $SOURCE until the file is no longer a symlink
-   DIR=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
-   SOURCE=$(readlink "${SOURCE}")
-   [[ "${SOURCE}" != /* ]] && SOURCE="${DIR}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
+  DIR=$(cd -P "$(dirname "${SOURCE}")" && pwd)
+  SOURCE=$(readlink "${SOURCE}")
+  [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
 done
-DIR=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
+DIR=$(cd -P "$(dirname "${SOURCE}")" && pwd)
 # Stop: Resolve Script Directory
 LOG_DIR=${DIR}/logs
 mkdir -p "$LOG_DIR"
@@ -38,5 +38,5 @@ ts=$(date +%Y%m%d%H%M%S)
 host=$(hostname)
 # We want USERPASS to word split
 "${ACCUMULO_HOME}/bin/accumulo org.apache.accumulo.test.stress.random.Scan" "$INSTANCE" $USERPASS "$SCAN_SEED" "$CONTINUOUS_SCAN" "$SCAN_BATCH_SIZE" \
-    > "$LOG_DIR/${ts}_${host}_reader.out" \
-    2> "$LOG_DIR/${ts}_${host}_reader.err"
+  >"$LOG_DIR/${ts}_${host}_reader.out" \
+  2>"$LOG_DIR/${ts}_${host}_reader.err"
diff --git a/test/stress/start-readers.sh b/test/stress/start-readers.sh
index 8405c48..24949b4 100755
--- a/test/stress/start-readers.sh
+++ b/test/stress/start-readers.sh
@@ -22,19 +22,19 @@
 # Ref: https://stackoverflow.com/questions/59895/
 SOURCE="${BASH_SOURCE[0]}"
 while [ -h "${SOURCE}" ]; do # resolve $SOURCE until the file is no longer a symlink
-   DIR="$( cd -P "$( dirname "${SOURCE}" )" && pwd )"
-   SOURCE="$(readlink "${SOURCE}")"
-   [[ "${SOURCE}" != /* ]] && SOURCE="${DIR}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
+  DIR="$(cd -P "$(dirname "${SOURCE}")" && pwd)"
+  SOURCE="$(readlink "${SOURCE}")"
+  [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
 done
-DIR="$( cd -P "$( dirname "${SOURCE}" )" && pwd )"
+DIR="$(cd -P "$(dirname "${SOURCE}")" && pwd)"
 # Stop: Resolve Script Directory
 
 # Source environment
 . "${DIR}/stress-env.sh"
 
 if [[ ! -f ${DIR}/readers ]]; then
-    echo readers file is missing
-    exit 1
+  echo readers file is missing
+  exit 1
 fi
 
 # Copy environment out
diff --git a/test/stress/start-writers.sh b/test/stress/start-writers.sh
index d41dd02..0e8d41f 100755
--- a/test/stress/start-writers.sh
+++ b/test/stress/start-writers.sh
@@ -22,19 +22,19 @@
 # Ref: https://stackoverflow.com/questions/59895/
 SOURCE="${BASH_SOURCE[0]}"
 while [ -h "${SOURCE}" ]; do # resolve $SOURCE until the file is no longer a symlink
-   DIR=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
-   SOURCE=$(readlink "${SOURCE}")
-   [[ "${SOURCE}" != /* ]] && SOURCE="${DIR}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
+  DIR=$(cd -P "$(dirname "${SOURCE}")" && pwd)
+  SOURCE=$(readlink "${SOURCE}")
+  [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
 done
-DIR=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
+DIR=$(cd -P "$(dirname "${SOURCE}")" && pwd)
 # Stop: Resolve Script Directory
 
 # Source environment
 . "${DIR}/stress-env.sh"
 
 if [[ ! -f ${DIR}/writers ]]; then
-    echo writers file is missing
-    exit 1
+  echo writers file is missing
+  exit 1
 fi
 
 # Copy environment out
diff --git a/test/stress/stop-readers.sh b/test/stress/stop-readers.sh
index fc39cb7..37d4ab9 100755
--- a/test/stress/stop-readers.sh
+++ b/test/stress/stop-readers.sh
@@ -22,18 +22,18 @@
 # Ref: https://stackoverflow.com/questions/59895/
 SOURCE="${BASH_SOURCE[0]}"
 while [ -h "${SOURCE}" ]; do # resolve $SOURCE until the file is no longer a symlink
-   DIR=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
-   SOURCE=$(readlink "${SOURCE}")
-   [[ "${SOURCE}" != /* ]] && SOURCE="${DIR}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
+  DIR=$(cd -P "$(dirname "${SOURCE}")" && pwd)
+  SOURCE=$(readlink "${SOURCE}")
+  [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
 done
-DIR=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
+DIR=$(cd -P "$(dirname "${SOURCE}")" && pwd)
 # Stop: Resolve Script Directory
 
 # Source environment
 . "${DIR}/stress-env.sh"
 
 if [[ ! -f "${DIR}/readers" ]]; then
-    echo readers file is missing
-    exit 1
+  echo readers file is missing
+  exit 1
 fi
-$PSSH -h "${DIR}/readers" "pkill -f '[o]rg.apache.accumulo.test.stress.random.Scan'" < /dev/null
+$PSSH -h "${DIR}/readers" "pkill -f '[o]rg.apache.accumulo.test.stress.random.Scan'" </dev/null
diff --git a/test/stress/stop-writers.sh b/test/stress/stop-writers.sh
index e7038e4..067ceba 100755
--- a/test/stress/stop-writers.sh
+++ b/test/stress/stop-writers.sh
@@ -22,18 +22,18 @@
 # Ref: https://stackoverflow.com/questions/59895/
 SOURCE="${BASH_SOURCE[0]}"
 while [ -h "${SOURCE}" ]; do # resolve $SOURCE until the file is no longer a symlink
-   DIR=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
-   SOURCE=$(readlink "${SOURCE}")
-   [[ "${SOURCE}" != /* ]] && SOURCE="${DIR}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
+  DIR=$(cd -P "$(dirname "${SOURCE}")" && pwd)
+  SOURCE=$(readlink "${SOURCE}")
+  [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
 done
-DIR=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
+DIR=$(cd -P "$(dirname "${SOURCE}")" && pwd)
 # Stop: Resolve Script Directory
 
 # Source environment
 . "${DIR}/stress-env.sh"
 
 if [[ ! -f "${DIR}/writers" ]]; then
-    echo writers file is missing
-    exit 1
+  echo writers file is missing
+  exit 1
 fi
-$PSSH -h "${DIR}/writers" "pkill -f '[o]rg.apache.accumulo.test.stress.random.Write'" < /dev/null
+$PSSH -h "${DIR}/writers" "pkill -f '[o]rg.apache.accumulo.test.stress.random.Write'" </dev/null
diff --git a/test/stress/stress-env.sh b/test/stress/stress-env.sh
index e96d8c7..dd89133 100644
--- a/test/stress/stress-env.sh
+++ b/test/stress/stress-env.sh
@@ -1,3 +1,4 @@
+#! /usr/bin/env bash
 #
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
@@ -28,7 +29,8 @@ elif hash parallel-ssh 2>/dev/null; then
   PSSH=parallel-ssh
   PSCP=parallel-scp
 else
-  echo >&2 "The stress test requires pssh/parallel-ssh and pscp/parallel-scp to be installed. Aborting."; exit 1;
+  echo >&2 "The stress test requires pssh/parallel-ssh and pscp/parallel-scp to be installed. Aborting."
+  exit 1
 fi
 
 # Edit the credentials to match your system
diff --git a/test/stress/writer.sh b/test/stress/writer.sh
index 8b2f4ce..c028000 100755
--- a/test/stress/writer.sh
+++ b/test/stress/writer.sh
@@ -22,11 +22,11 @@
 # Ref: https://stackoverflow.com/questions/59895/
 SOURCE="${BASH_SOURCE[0]}"
 while [ -h "${SOURCE}" ]; do # resolve $SOURCE until the file is no longer a symlink
-   DIR=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
-   SOURCE=$(readlink "${SOURCE}")
-   [[ "${SOURCE}" != /* ]] && SOURCE="${DIR}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
+  DIR=$(cd -P "$(dirname "${SOURCE}")" && pwd)
+  SOURCE=$(readlink "${SOURCE}")
+  [[ ${SOURCE} != /* ]] && SOURCE="${DIR}/${SOURCE}" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
 done
-DIR=$( cd -P "$( dirname "${SOURCE}" )" && pwd )
+DIR=$(cd -P "$(dirname "${SOURCE}")" && pwd)
 # Stop: Resolve Script Directory
 LOG_DIR=${DIR}/logs
 mkdir -p "$LOG_DIR"
@@ -43,5 +43,5 @@ host=$(hostname)
 "${ACCUMULO_HOME}/bin/accumulo org.apache.accumulo.test.stress.random.Write" "$INSTANCE" $USERPASS "$ROW_RANGE" "$CF_RANGE" "$CQ_RANGE" "$VALUE_RANGE" \
   "$ROW_SEED" "$CF_SEED" "$CQ_SEED" "$VALUE_SEED" \
   "$ROW_WIDTH" "$ROW_WIDTH_SEED" "$MAX_CELLS_PER_MUTATION" "$WRITE_DELAY" \
-    > "$LOG_DIR/${ts}_${host}_writer.out" \
-    2> "$LOG_DIR/${ts}_${host}_writer.err"
+  >"$LOG_DIR/${ts}_${host}_writer.out" \
+  2>"$LOG_DIR/${ts}_${host}_writer.err"
diff --git a/test/test1/verify_test.sh b/test/test1/verify_test.sh
index 0fe35a0..3a49ba3 100755
--- a/test/test1/verify_test.sh
+++ b/test/test1/verify_test.sh
@@ -18,8 +18,8 @@
 # under the License.
 #
 
-../../../bin/accumulo  org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 1 --random 56 --rows 1000000 --start 0 --cols 1 &
-../../../bin/accumulo  org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 1 --random 56 --rows 1000000 --start 1000000 --cols 1 &
-../../../bin/accumulo  org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 1 --random 56 --rows 1000000 --start 2000000 --cols 1 &
-../../../bin/accumulo  org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 1 --random 56 --rows 1000000 --start 3000000 --cols 1 &
-../../../bin/accumulo  org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 1 --random 56 --rows 1000000 --start 4000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 1 --random 56 --rows 1000000 --start 0 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 1 --random 56 --rows 1000000 --start 1000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 1 --random 56 --rows 1000000 --start 2000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 1 --random 56 --rows 1000000 --start 3000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 1 --random 56 --rows 1000000 --start 4000000 --cols 1 &
diff --git a/test/test1/verify_test_2.sh b/test/test1/verify_test_2.sh
index febc0d7..a9b566f 100755
--- a/test/test1/verify_test_2.sh
+++ b/test/test1/verify_test_2.sh
@@ -18,8 +18,8 @@
 # under the License.
 #
 
-../../../bin/accumulo  org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 2 --random 57 --rows 1000000 --start 0 --cols 1 &
-../../../bin/accumulo  org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 2 --random 57 --rows 1000000 --start 1000000 --cols 1 &
-../../../bin/accumulo  org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 2 --random 57 --rows 1000000 --start 2000000 --cols 1 &
-../../../bin/accumulo  org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 2 --random 57 --rows 1000000 --start 3000000 --cols 1 &
-../../../bin/accumulo  org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 2 --random 57 --rows 1000000 --start 4000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 2 --random 57 --rows 1000000 --start 0 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 2 --random 57 --rows 1000000 --start 1000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 2 --random 57 --rows 1000000 --start 2000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 2 --random 57 --rows 1000000 --start 3000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest -u root -p secret --size 50 --timestamp 2 --random 57 --rows 1000000 --start 4000000 --cols 1 &
diff --git a/test/test2/concurrent.sh b/test/test2/concurrent.sh
index 587c15b..378fc2f 100755
--- a/test/test2/concurrent.sh
+++ b/test/test2/concurrent.sh
@@ -33,70 +33,68 @@ wait
 
 echo "ingesting second halves (500K to (1M - 1), 1.5M to (2M - 1), etc) and verifying first halves"
 
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 5000000 --start 0 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 5000000 --start 1000000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 5000000 --start 2000000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 5000000 --start 3000000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 5000000 --start 4000000 --cols 1  &
-
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56 --rows 5000000 --start 500000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56 --rows 5000000 --start 1500000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56 --rows 5000000 --start 2500000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56 --rows 5000000 --start 3500000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56 --rows 5000000 --start 4500000 --cols 1  &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 5000000 --start 0 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 5000000 --start 1000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 5000000 --start 2000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 5000000 --start 3000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 5000000 --start 4000000 --cols 1 &
+
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56 --rows 5000000 --start 500000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56 --rows 5000000 --start 1500000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56 --rows 5000000 --start 2500000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56 --rows 5000000 --start 3500000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56 --rows 5000000 --start 4500000 --cols 1 &
 
 wait
 
 echo "verifying complete range"
 
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 1000000 --start 0 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 1000000 --start 1000000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 1000000 --start 2000000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 1000000 --start 3000000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 1000000 --start 4000000 --cols 1  &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 1000000 --start 0 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 1000000 --start 1000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 1000000 --start 2000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 1000000 --start 3000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 1000000 --start 4000000 --cols 1 &
 
 wait
 
 echo "ingesting first halves (0 to (500K - 1), 1M to (1.5M - 1), etc) w/ new timestamp AND verifying second half w/ old timestamp"
 
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 0 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 1000000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 2000000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 3000000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 4000000 --cols 1  &
-
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 500000 --start 500000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 500000 --start 1500000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 500000 --start 2500000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 500000 --start 3500000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 500000 --start 4500000 --cols 1  &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 0 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 1000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 2000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 3000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 4000000 --cols 1 &
 
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 500000 --start 500000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 500000 --start 1500000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 500000 --start 2500000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 500000 --start 3500000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 500000 --start 4500000 --cols 1 &
 
 wait
 
 echo "ingesting second halves (500K to (1M - 1), 1.5M to (2M - 1), etc) and verifying first halves"
 
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 500000 --start 0 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 500000 --start 1000000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 500000 --start 2000000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 500000 --start 3000000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 500000 --start 4000000 --cols 1  &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 500000 --start 0 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 500000 --start 1000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 500000 --start 2000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 500000 --start 3000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 500000 --start 4000000 --cols 1 &
 
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 500000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 1500000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 2500000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 3500000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 4500000 --cols 1  &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 500000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 1500000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 2500000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 3500000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 2 --size 50 --random 57 --rows 500000 --start 4500000 --cols 1 &
 
 wait
 
 echo "verifying complete range"
 
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 1000000 --start 0 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 1000000 --start 1000000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 1000000 --start 2000000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 1000000 --start 3000000 --cols 1  &
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 1000000 --start 4000000 --cols 1  &
-
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 1000000 --start 0 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 1000000 --start 1000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 1000000 --start 2000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 1000000 --start 3000000 --cols 1 &
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 1000000 --start 4000000 --cols 1 &
 
 wait
diff --git a/test/test3/bigrow.sh b/test/test3/bigrow.sh
index 11fa90f..1ae1b67 100755
--- a/test/test3/bigrow.sh
+++ b/test/test3/bigrow.sh
@@ -22,9 +22,9 @@ USERPASS='-u root -p secret'
 ../../../bin/accumulo shell $USERPASS -e 'deletetable -f test_ingest'
 ../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --rows 0 --createTable
 
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56   --rows 1       --start 5000000 --cols 2000000;
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56   --rows 1000000 --start 0       --cols 1;
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56 --rows 1 --start 5000000 --cols 2000000
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56 --rows 1000000 --start 0 --cols 1
 #../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --timestamp 1 --size 50 --random 56 --rows 1      --start 5000000 --cols 2000000;
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 1000000 --start 0       --cols 1;
-../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56   --rows 1000000 --start 7000000 --cols 1;
-../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 1000000 --start 7000000 --cols 1;
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 1000000 --start 0 --cols 1
+../../../bin/accumulo org.apache.accumulo.test.TestIngest $USERPASS --timestamp 1 --size 50 --random 56 --rows 1000000 --start 7000000 --cols 1
+../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 1 --random 56 --rows 1000000 --start 7000000 --cols 1
diff --git a/test/test4/bulk_import_test.sh b/test/test4/bulk_import_test.sh
index 65795a5..c6c3e38 100755
--- a/test/test4/bulk_import_test.sh
+++ b/test/test4/bulk_import_test.sh
@@ -72,4 +72,3 @@ echo "creating second set of map files"
 ../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 1000000 --start 2000000 --cols 1 &
 ../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 1000000 --start 3000000 --cols 1 &
 ../../../bin/accumulo org.apache.accumulo.test.VerifyIngest $USERPASS --size 50 --timestamp 2 --random 57 --rows 1000000 --start 4000000 --cols 1 &
-
diff --git a/test/upgrade/upgrade_test.sh b/test/upgrade/upgrade_test.sh
index 57d3aea..6cede65 100755
--- a/test/upgrade/upgrade_test.sh
+++ b/test/upgrade/upgrade_test.sh
@@ -18,11 +18,10 @@
 # under the License.
 #
 
-
 # This script test upgrading from Accumulo 1.9 to 2.0. This script is
 # not self verifying, its output must be inspected for correctness.
 
-if [[ $# != 1 ]] ; then
+if [[ $# != 1 ]]; then
   BASENAME=$(basename "$0")
   echo "Usage: $BASENAME clean|dirty"
   exit -1
@@ -51,12 +50,11 @@ cd $UNO_DIR
 
   hadoop fs -ls /accumulo/version
 
-
   hadoop fs -rmr "$BULK"
   hadoop fs -mkdir -p "$BULK/fail"
   accumulo org.apache.accumulo.test.TestIngest -i uno -u root -p secret --rfile $BULK/bulk/test --timestamp 1 --size 50 --random 56 --rows 200000 --start 200000 --cols 1
 
-  accumulo org.apache.accumulo.test.TestIngest -i uno -u root -p secret --timestamp 1 --size 50 --random 56 --rows 200000 --start 0 --cols 1  --createTable --splits 10
+  accumulo org.apache.accumulo.test.TestIngest -i uno -u root -p secret --timestamp 1 --size 50 --random 56 --rows 200000 --start 0 --cols 1 --createTable --splits 10
 
   accumulo shell -u root -p secret <<EOF
    table test_ingest
@@ -72,7 +70,7 @@ EOF
 )
 
 if [[ $1 == dirty ]]; then
-	pkill -9 -f accumulo\\.start
+  pkill -9 -f accumulo\\.start
 else
   (
     eval "$(./bin/uno env)"