You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@fluo.apache.org by GitBox <gi...@apache.org> on 2018/09/05 14:37:07 UTC

[GitHub] mikewalch closed pull request #191: Several updates to Uno

mikewalch closed pull request #191: Several updates to Uno
URL: https://github.com/apache/fluo-uno/pull/191
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/bin/impl/fetch.sh b/bin/impl/fetch.sh
index 7163095..acaf193 100755
--- a/bin/impl/fetch.sh
+++ b/bin/impl/fetch.sh
@@ -16,22 +16,46 @@
 
 source "$UNO_HOME"/bin/impl/util.sh
 
-function download_verify() {
-  url_prefix=$1
-  tarball=$2
-  expected_hash=$3
+function download_other() {
+  local url_prefix=$1
+  local tarball=$2
+  local expected_hash=$3
+
+  wget -c -P "$DOWNLOADS" "$url_prefix/$tarball"
+  verify_exist_hash "$tarball" "$expected_hash"
+  echo "$tarball exists in downloads/ and matches expected checksum ($expected_hash)"
+}
+
+function download_apache() {
+  local url_prefix=$1
+  local tarball=$2
+  local expected_hash=$3
 
   if [ -n "$apache_mirror" ]; then
-    wget -c -P "$DOWNLOADS" "$url_prefix/$tarball"
+    wget -c -P "$DOWNLOADS" "$apache_mirror/$url_prefix/$tarball"
   fi 
+
+  if [[ ! -f "$DOWNLOADS/$tarball" ]]; then
+    echo "Downloading $tarball from Apache archive"
+    wget -c -P "$DOWNLOADS" "https://archive.apache.org/dist/$url_prefix/$tarball"
+  fi
+
   verify_exist_hash "$tarball" "$expected_hash"
   echo "$tarball exists in downloads/ and matches expected checksum ($expected_hash)"
 }
 
+function fetch_hadoop() {
+  download_apache "hadoop/common/hadoop-$HADOOP_VERSION" "$HADOOP_TARBALL" "$HADOOP_HASH"
+}
+
+function fetch_zookeeper() {
+  download_apache "zookeeper/zookeeper-$ZOOKEEPER_VERSION" "$ZOOKEEPER_TARBALL" "$ZOOKEEPER_HASH"
+}
+
 function fetch_accumulo() {
   if [[ $1 != "--no-deps" ]]; then
-    download_verify "$apache_mirror/zookeeper/zookeeper-$ZOOKEEPER_VERSION" "$ZOOKEEPER_TARBALL" "$ZOOKEEPER_HASH"
-    download_verify "$apache_mirror/hadoop/common/hadoop-$HADOOP_VERSION" "$HADOOP_TARBALL" "$HADOOP_HASH"
+    fetch_hadoop
+    fetch_zookeeper
   fi
 
   if [[ -n "$ACCUMULO_REPO" ]]; then
@@ -53,7 +77,7 @@ function fetch_accumulo() {
     popd
     cp "$accumulo_built_tarball" "$DOWNLOADS"/
   else
-    download_verify "$apache_mirror/accumulo/$ACCUMULO_VERSION" "$ACCUMULO_TARBALL" "$ACCUMULO_HASH"
+    download_apache "accumulo/$ACCUMULO_VERSION" "$ACCUMULO_TARBALL" "$ACCUMULO_HASH"
   fi
 }
 
@@ -75,7 +99,7 @@ function fetch_fluo() {
     cp "$fluo_built_tarball" "$DOWNLOADS"/
   else
     [[ $FLUO_VERSION =~ .*-incubating ]] && apache_mirror="${apache_mirror}/incubator"
-    download_verify "$apache_mirror/fluo/fluo/$FLUO_VERSION" "$FLUO_TARBALL" "$FLUO_HASH"
+    download_apache "fluo/fluo/$FLUO_VERSION" "$FLUO_TARBALL" "$FLUO_HASH"
   fi
 }
 
@@ -89,7 +113,7 @@ fi
 
 case "$1" in
 spark)
-  download_verify "$apache_mirror/spark/spark-$SPARK_VERSION" "$SPARK_TARBALL" "$SPARK_HASH"
+  download_apache "spark/spark-$SPARK_VERSION" "$SPARK_TARBALL" "$SPARK_HASH"
   ;;
 accumulo)
   fetch_accumulo "$2"
@@ -115,10 +139,12 @@ fluo-yarn)
     cp "$built_tarball" "$DOWNLOADS"/
   else
     [[ $FLUO_VERSION =~ .*-incubating ]] && apache_mirror="${apache_mirror}/incubator"
-    download_verify "$apache_mirror/fluo/fluo/$FLUO_VERSION" "$FLUO_TARBALL" "$FLUO_HASH"
+    download_apache "fluo/fluo/$FLUO_VERSION" "$FLUO_TARBALL" "$FLUO_HASH"
   fi
   ;;
- 
+hadoop)
+  fetch_hadoop
+  ;;
 metrics)
   if [[ "$OSTYPE" == "darwin"* ]]; then
     echo "The metrics services (InfluxDB and Grafana) are not supported on Mac OS X at this time."
@@ -134,7 +160,7 @@ metrics)
   GF_PATH=$BUILD/$GF_DIR
 
   INFLUXDB_TARBALL=influxdb_"$INFLUXDB_VERSION"_x86_64.tar.gz
-  download_verify https://s3.amazonaws.com/influxdb "$INFLUXDB_TARBALL" "$INFLUXDB_HASH"
+  download_other https://s3.amazonaws.com/influxdb "$INFLUXDB_TARBALL" "$INFLUXDB_HASH"
 
   tar xzf "$DOWNLOADS/$INFLUXDB_TARBALL" -C "$BUILD"
   mv "$BUILD/influxdb_${INFLUXDB_VERSION}_x86_64" "$IF_PATH"
@@ -147,7 +173,7 @@ metrics)
   rm -rf "$IF_PATH"
 
   GRAFANA_TARBALL=grafana-"$GRAFANA_VERSION".linux-x64.tar.gz
-  download_verify https://grafanarel.s3.amazonaws.com/builds "$GRAFANA_TARBALL" "$GRAFANA_HASH"
+  download_other https://grafanarel.s3.amazonaws.com/builds "$GRAFANA_TARBALL" "$GRAFANA_HASH"
 
   tar xzf "$DOWNLOADS/$GRAFANA_TARBALL" -C "$BUILD"
 
@@ -155,14 +181,19 @@ metrics)
   tar czf grafana-"$GRAFANA_VERSION".tar.gz "$GF_DIR"
   rm -rf "$GF_PATH"
   ;;
+zookeeper)
+  fetch_zookeeper
+  ;;
 *)
   echo "Usage: uno fetch <component>"
   echo -e "\nPossible components:\n"
   echo "    all        Fetches all binary tarballs of the following components"
   echo "    accumulo   Downloads Accumulo, Hadoop & ZooKeeper. Builds Accumulo if repo set in uno.conf"
   echo "    fluo       Downloads Fluo, Accumulo, Hadoop & ZooKeeper. Builds Fluo or Accumulo if repo set in uno.conf"
+  echo "    hadoop     Downloads Hadoop"
   echo "    metrics    Downloads InfluxDB and Grafana"
   echo "    spark      Downloads Spark"
+  echo "    zookeeper  Downloads ZooKeeper"
   echo "Options:"
   echo "    --no-deps  Dependencies will be fetched unless this option is specified. Only works for fluo & accumulo components."
   exit 1
diff --git a/bin/impl/load-env.sh b/bin/impl/load-env.sh
index b828f9e..df507eb 100755
--- a/bin/impl/load-env.sh
+++ b/bin/impl/load-env.sh
@@ -34,7 +34,7 @@ then
   exit 1
 fi
 
-HP=$HADOOP_PREFIX
+HH=$HADOOP_HOME
 HC=$HADOOP_CONF_DIR
 ZH=$ZOOKEEPER_HOME
 SH=$SPARK_HOME
@@ -53,8 +53,8 @@ fi
 
 # Confirm that hadoop, accumulo, and zookeeper env variables are not set
 if [[ ! "version env" =~ $1 ]]; then
-  if [[ -n "$HP" && "$HP" != "$HADOOP_PREFIX" ]]; then
-    echo "HADOOP_PREFIX in your shell env '$HP' needs to match your uno uno.conf '$HADOOP_PREFIX'"
+  if [[ -n "$HH" && "$HH" != "$HADOOP_HOME" ]]; then
+    echo "HADOOP_HOME in your shell env '$HH' needs to match your uno uno.conf '$HADOOP_HOME'"
     exit 1
   fi
   if [[ -n "$HC" && "$HC" != "$HADOOP_CONF_DIR" ]]; then
@@ -115,7 +115,7 @@ fi
 : "${ZOOKEEPER_TARBALL:?"ZOOKEEPER_TARBALL is not set in uno.conf"}"
 : "${FLUO_HOME:?"FLUO_HOME is not set in uno.conf"}"
 : "${ZOOKEEPER_HOME:?"ZOOKEEPER_HOME is not set in uno.conf"}"
-: "${HADOOP_PREFIX:?"HADOOP_PREFIX is not set in uno.conf"}"
+: "${HADOOP_HOME:?"HADOOP_HOME is not set in uno.conf"}"
 : "${ACCUMULO_HOME:?"ACCUMULO_HOME is not set in uno.conf"}"
 : "${ACCUMULO_INSTANCE:?"ACCUMULO_INSTANCE is not set in uno.conf"}"
 : "${ACCUMULO_USER:?"ACCUMULO_USER is not set in uno.conf"}"
@@ -123,7 +123,6 @@ fi
 : "${LOGS_DIR:?"LOGS_DIR is not set in uno.conf"}"
 : "${ACCUMULO_LOG_DIR:?"ACCUMULO_LOG_DIR is not set in uno.conf"}"
 : "${HADOOP_LOG_DIR:?"HADOOP_LOG_DIR is not set in uno.conf"}"
-: "${YARN_LOG_DIR:?"YARN_LOG_DIR is not set in uno.conf"}"
 : "${ZOO_LOG_DIR:?"ZOO_LOG_DIR is not set in uno.conf"}"
 
 hash shasum 2>/dev/null || { echo >&2 "shasum must be installed & on PATH. Aborting."; exit 1; }
diff --git a/bin/impl/print-env.sh b/bin/impl/print-env.sh
index 4c34576..6cb1908 100755
--- a/bin/impl/print-env.sh
+++ b/bin/impl/print-env.sh
@@ -15,7 +15,11 @@
 # limitations under the License.
 
 if [[ -z "$1" || "$1" == "--vars" ]]; then
-  echo "export HADOOP_PREFIX=\"$HADOOP_PREFIX\""
+  if [[ $HADOOP_VERSION =~ ^2\..*$ ]]; then
+    echo "export HADOOP_PREFIX=\"$HADOOP_HOME\""
+  else
+    echo "export HADOOP_HOME=\"$HADOOP_HOME\""
+  fi
   echo "export HADOOP_CONF_DIR=\"$HADOOP_CONF_DIR\""
   echo "export ZOOKEEPER_HOME=\"$ZOOKEEPER_HOME\""
   echo "export SPARK_HOME=\"$SPARK_HOME\""
@@ -25,7 +29,7 @@ if [[ -z "$1" || "$1" == "--vars" ]]; then
 fi
 
 if [[ -z "$1" || "$1" == "--paths" ]]; then
-  echo -n "export PATH=\"\$PATH:$UNO_HOME/bin:$HADOOP_PREFIX/bin:$ZOOKEEPER_HOME/bin:$ACCUMULO_HOME/bin"
+  echo -n "export PATH=\"\$PATH:$UNO_HOME/bin:$HADOOP_HOME/bin:$ZOOKEEPER_HOME/bin:$ACCUMULO_HOME/bin"
   if [[ -d "$SPARK_HOME" ]]; then
     echo -n ":$SPARK_HOME/bin"
   fi
diff --git a/bin/impl/setup-accumulo.sh b/bin/impl/setup-accumulo.sh
index b05b2a6..a924eb2 100755
--- a/bin/impl/setup-accumulo.sh
+++ b/bin/impl/setup-accumulo.sh
@@ -57,7 +57,7 @@ else
 fi
 $SED "s#localhost#$UNO_HOST#" "$conf/masters" "$conf/monitor" "$conf/gc"
 $SED "s#export ZOOKEEPER_HOME=[^ ]*#export ZOOKEEPER_HOME=$ZOOKEEPER_HOME#" "$conf"/accumulo-env.sh
-$SED "s#export HADOOP_PREFIX=[^ ]*#export HADOOP_PREFIX=$HADOOP_PREFIX#" "$conf"/accumulo-env.sh
+$SED "s#export HADOOP_PREFIX=[^ ]*#export HADOOP_PREFIX=$HADOOP_HOME#" "$conf"/accumulo-env.sh
 $SED "s#export ACCUMULO_LOG_DIR=[^ ]*#export ACCUMULO_LOG_DIR=$ACCUMULO_LOG_DIR#" "$conf"/accumulo-env.sh
 if [[ $ACCUMULO_VERSION =~ ^1\..*$ ]]; then
   $SED "s#ACCUMULO_TSERVER_OPTS=.*#ACCUMULO_TSERVER_OPTS=\"-Xmx$ACCUMULO_TSERV_MEM -Xms$ACCUMULO_TSERV_MEM\"#" "$conf"/accumulo-env.sh
@@ -91,7 +91,7 @@ if [[ "$ACCUMULO_USE_NATIVE_MAP" == "true" ]]; then
   fi
 fi
 
-"$HADOOP_PREFIX"/bin/hadoop fs -rm -r /accumulo 2> /dev/null || true
+"$HADOOP_HOME"/bin/hadoop fs -rm -r /accumulo 2> /dev/null || true
 "$ACCUMULO_HOME"/bin/accumulo init --clear-instance-name --instance-name "$ACCUMULO_INSTANCE" --password "$ACCUMULO_PASSWORD"
 
 if [[ $ACCUMULO_VERSION =~ ^1\..*$ ]]; then
diff --git a/bin/impl/setup-fluo-yarn.sh b/bin/impl/setup-fluo-yarn.sh
index 8bf0d31..bd98b0a 100755
--- a/bin/impl/setup-fluo-yarn.sh
+++ b/bin/impl/setup-fluo-yarn.sh
@@ -46,7 +46,7 @@ if [[ -f "$DOWNLOADS/$FLUO_YARN_TARBALL" ]]; then
   $SED "s/.*fluo.yarn.worker.max.memory.mb=.*/fluo.yarn.worker.max.memory.mb=$FLUO_WORKER_MEM_MB/g" "$yarn_props"
   $SED "s/.*fluo.yarn.worker.instances=.*/fluo.yarn.worker.instances=$FLUO_WORKER_INSTANCES/g" "$yarn_props"
   $SED "s#FLUO_HOME=.*#FLUO_HOME=$FLUO_HOME#g" "$FLUO_YARN_HOME"/conf/fluo-yarn-env.sh
-  $SED "s#HADOOP_PREFIX=.*#HADOOP_PREFIX=$HADOOP_PREFIX#g" "$FLUO_YARN_HOME"/conf/fluo-yarn-env.sh
+  $SED "s#HADOOP_PREFIX=.*#HADOOP_PREFIX=$HADOOP_HOME#g" "$FLUO_YARN_HOME"/conf/fluo-yarn-env.sh
   $SED "s#ZOOKEEPER_HOME=.*#ZOOKEEPER_HOME=$ZOOKEEPER_HOME#g" "$FLUO_YARN_HOME"/conf/fluo-yarn-env.sh
 
   "$FLUO_YARN_HOME"/lib/fetch.sh
diff --git a/bin/impl/setup-fluo.sh b/bin/impl/setup-fluo.sh
index bd487e8..19abe54 100755
--- a/bin/impl/setup-fluo.sh
+++ b/bin/impl/setup-fluo.sh
@@ -64,7 +64,7 @@ if [[ -f "$DOWNLOADS/$FLUO_TARBALL" ]]; then
   $SED "s/.*fluo.yarn.worker.max.memory.mb=.*/fluo.yarn.worker.max.memory.mb=$FLUO_WORKER_MEM_MB/g" "$fluo_props"
   $SED "s/.*fluo.yarn.worker.instances=.*/fluo.yarn.worker.instances=$FLUO_WORKER_INSTANCES/g" "$fluo_props"
 
-  $SED "s#HADOOP_PREFIX=.*#HADOOP_PREFIX=$HADOOP_PREFIX#g" "$FLUO_HOME"/conf/fluo-env.sh
+  $SED "s#HADOOP_PREFIX=.*#HADOOP_PREFIX=$HADOOP_HOME#g" "$FLUO_HOME"/conf/fluo-env.sh
   $SED "s#ACCUMULO_HOME=.*o#ACCUMULO_HOME=$ACCUMULO_HOME#g" "$FLUO_HOME"/conf/fluo-env.sh
   $SED "s#ZOOKEEPER_HOME=.*#ZOOKEEPER_HOME=$ZOOKEEPER_HOME#g" "$FLUO_HOME"/conf/fluo-env.sh
 
diff --git a/bin/impl/setup-hadoop.sh b/bin/impl/setup-hadoop.sh
index 483223e..2f8f6f8 100755
--- a/bin/impl/setup-hadoop.sh
+++ b/bin/impl/setup-hadoop.sh
@@ -24,30 +24,32 @@ pkill -f hadoop.yarn
 # stop if any command fails
 set -e
 
-print_to_console "Setting up Apache Hadoop at $HADOOP_PREFIX"
+print_to_console "Setting up Apache Hadoop at $HADOOP_HOME"
+print_to_console "Apache Hadoop logs are at $HADOOP_LOG_DIR"
 
 rm -rf "$INSTALL"/hadoop-*
 rm -f "$HADOOP_LOG_DIR"/*
-rm -rf "$YARN_LOG_DIR"/application_*
-rm -f "$YARN_LOG_DIR"/*
+rm -rf "$HADOOP_LOG_DIR"/application_*
 rm -rf "$DATA_DIR"/hadoop
 mkdir -p "$HADOOP_LOG_DIR"
-mkdir -p "$YARN_LOG_DIR"
 
 tar xzf "$DOWNLOADS/$HADOOP_TARBALL" -C "$INSTALL"
 
-hadoop_conf="$HADOOP_PREFIX"/etc/hadoop
+hadoop_conf="$HADOOP_HOME"/etc/hadoop
 cp "$UNO_HOME"/conf/hadoop/* "$hadoop_conf/"
 $SED "s#UNO_HOST#$UNO_HOST#g" "$hadoop_conf/core-site.xml" "$hadoop_conf/hdfs-site.xml" "$hadoop_conf/yarn-site.xml"
 $SED "s#DATA_DIR#$DATA_DIR#g" "$hadoop_conf/hdfs-site.xml" "$hadoop_conf/yarn-site.xml" "$hadoop_conf/mapred-site.xml"
-$SED "s#YARN_LOGS#$YARN_LOG_DIR#g" "$hadoop_conf/yarn-site.xml"
+$SED "s#HADOOP_LOG_DIR#$HADOOP_LOG_DIR#g" "$hadoop_conf/yarn-site.xml"
 $SED "s#YARN_NM_MEM_MB#$YARN_NM_MEM_MB#g" "$hadoop_conf/yarn-site.xml"
 $SED "s#YARN_NM_CPU_VCORES#$YARN_NM_CPU_VCORES#g" "$hadoop_conf/yarn-site.xml"
-$SED "s#\#export HADOOP_LOG_DIR=[^ ]*#export HADOOP_LOG_DIR=$HADOOP_LOG_DIR#g" "$hadoop_conf/hadoop-env.sh"
-$SED "s#\${JAVA_HOME}#${JAVA_HOME}#g" "$hadoop_conf/hadoop-env.sh"
-$SED "s#YARN_LOG_DIR=[^ ]*#YARN_LOG_DIR=$YARN_LOG_DIR#g" "$hadoop_conf/yarn-env.sh"
 
-"$HADOOP_PREFIX"/bin/hdfs namenode -format
-"$HADOOP_PREFIX"/sbin/start-dfs.sh
-"$HADOOP_PREFIX"/sbin/start-yarn.sh
+echo "export JAVA_HOME=$JAVA_HOME" >> "$hadoop_conf/hadoop-env.sh"
+echo "export HADOOP_LOG_DIR=$HADOOP_LOG_DIR" >> "$hadoop_conf/hadoop-env.sh"
+if [[ $HADOOP_VERSION =~ ^2\..*$ ]]; then
+  echo "export YARN_LOG_DIR=$HADOOP_LOG_DIR" >> "$hadoop_conf/yarn-env.sh"
+fi
+
+"$HADOOP_HOME"/bin/hdfs namenode -format
+"$HADOOP_HOME"/sbin/start-dfs.sh
+"$HADOOP_HOME"/sbin/start-yarn.sh
 
diff --git a/bin/impl/setup-spark.sh b/bin/impl/setup-spark.sh
index 46f8b5c..efea054 100755
--- a/bin/impl/setup-spark.sh
+++ b/bin/impl/setup-spark.sh
@@ -18,7 +18,7 @@ source "$UNO_HOME"/bin/impl/util.sh
 
 verify_exist_hash "$SPARK_TARBALL" "$SPARK_HASH"
 
-if [[ ! -d "$HADOOP_PREFIX" ]]; then
+if [[ ! -d "$HADOOP_HOME" ]]; then
   print_to_console "Apache Hadoop needs to be setup before Apache Spark can be setup."
   exit 1
 fi
diff --git a/bin/impl/start.sh b/bin/impl/start.sh
index 8fe8b00..6117e13 100755
--- a/bin/impl/start.sh
+++ b/bin/impl/start.sh
@@ -21,7 +21,7 @@ case "$1" in
     check_dirs ACCUMULO_HOME
 
     if [[ "$2" != "--no-deps" ]]; then
-      check_dirs ZOOKEEPER_HOME HADOOP_PREFIX
+      check_dirs ZOOKEEPER_HOME HADOOP_HOME
 
       tmp="$(pgrep -f QuorumPeerMain | tr '\n' ' ')"
       if [[ -z "$tmp" ]]; then
@@ -31,13 +31,13 @@ case "$1" in
 
       tmp="$(pgrep -f hadoop\\.hdfs | tr '\n' ' ')"
       if [[ -z "$tmp" ]]; then
-        "$HADOOP_PREFIX"/sbin/start-dfs.sh
+        "$HADOOP_HOME"/sbin/start-dfs.sh
       else echo "Hadoop DFS  already running at: $tmp"  
       fi
       
       tmp="$(pgrep -f hadoop\\.yarn | tr '\n' ' ')"
       if [[ -z "$tmp" ]]; then
-        "$HADOOP_PREFIX"/sbin/start-yarn.sh
+        "$HADOOP_HOME"/sbin/start-yarn.sh
       else echo "Hadoop Yarn already running at: $tmp"  
       fi
     fi
@@ -53,17 +53,17 @@ case "$1" in
     fi
     ;;
   hadoop)
-    check_dirs HADOOP_PREFIX
+    check_dirs HADOOP_HOME
     
     tmp="$(pgrep -f hadoop\\.hdfs | tr '\n' ' ')"
     if [[ -z "$tmp" ]]; then
-      "$HADOOP_PREFIX"/sbin/start-dfs.sh
+      "$HADOOP_HOME"/sbin/start-dfs.sh
     else echo "Hadoop DFS  already running at: $tmp"  
     fi
 
     tmp="$(pgrep -f hadoop\\.yarn | tr '\n' ' ')"
     if [[ -z "$tmp" ]]; then
-      "$HADOOP_PREFIX"/sbin/start-yarn.sh
+      "$HADOOP_HOME"/sbin/start-yarn.sh
     else echo "Hadoop Yarn already running at: $tmp"  
     fi
     ;;
diff --git a/bin/impl/stop.sh b/bin/impl/stop.sh
index 617e70c..ac4af5c 100755
--- a/bin/impl/stop.sh
+++ b/bin/impl/stop.sh
@@ -29,14 +29,14 @@ case "$1" in
     fi
 
     if [[ "$2" != "--no-deps" ]]; then
-      check_dirs ZOOKEEPER_HOME HADOOP_PREFIX
+      check_dirs ZOOKEEPER_HOME HADOOP_HOME
 
       if [[ ! -z "$(pgrep -f hadoop\\.yarn)" ]]; then
-        "$HADOOP_PREFIX"/sbin/stop-yarn.sh
+        "$HADOOP_HOME"/sbin/stop-yarn.sh
       fi
 
       if [[ ! -z "$(pgrep -f hadoop\\.hdfs)" ]]; then
-        "$HADOOP_PREFIX"/sbin/stop-dfs.sh
+        "$HADOOP_HOME"/sbin/stop-dfs.sh
       fi
 
       if [[ ! -z "$(pgrep -f QuorumPeerMain)" ]]; then
@@ -45,14 +45,14 @@ case "$1" in
     fi
     ;;
   hadoop)
-    check_dirs HADOOP_PREFIX
+    check_dirs HADOOP_HOME
     
     if [[ ! -z "$(pgrep -f hadoop\\.yarn)" ]]; then
-      "$HADOOP_PREFIX"/sbin/stop-yarn.sh
+      "$HADOOP_HOME"/sbin/stop-yarn.sh
     fi
 
     if [[ ! -z "$(pgrep -f hadoop\\.hdfs)" ]]; then
-      "$HADOOP_PREFIX"/sbin/stop-dfs.sh
+      "$HADOOP_HOME"/sbin/stop-dfs.sh
     fi
     ;;
   zookeeper)
diff --git a/conf/hadoop/yarn-site.xml b/conf/hadoop/yarn-site.xml
index c3ccb43..465122d 100644
--- a/conf/hadoop/yarn-site.xml
+++ b/conf/hadoop/yarn-site.xml
@@ -31,7 +31,7 @@
   </property>
   <property>
     <name>yarn.nodemanager.log-dirs</name>
-    <value>YARN_LOGS</value>
+    <value>HADOOP_LOG_DIR</value>
   </property>
   <property>
     <name>yarn.nodemanager.aux-services</name>
diff --git a/conf/spark/spark-env.sh b/conf/spark/spark-env.sh
index ccbd7ff..016dd17 100755
--- a/conf/spark/spark-env.sh
+++ b/conf/spark/spark-env.sh
@@ -20,5 +20,5 @@
 # This file is sourced when running various Spark programs.
 # Copy it as spark-env.sh and edit that to configure Spark for your site.
 
-SPARK_DIST_CLASSPATH=$("$HADOOP_PREFIX"/bin/hadoop classpath)
+SPARK_DIST_CLASSPATH=$("$HADOOP_HOME"/bin/hadoop classpath)
 export SPARK_DIST_CLASSPATH
diff --git a/conf/uno.conf b/conf/uno.conf
index 5fc87f2..8f4c4a4 100644
--- a/conf/uno.conf
+++ b/conf/uno.conf
@@ -45,7 +45,7 @@ export FLUO_YARN_TARBALL=fluo-yarn-$FLUO_YARN_VERSION-bin.tar.gz
 # If set, 'uno fetch' will build (instead of downloading) an Accumulo tarball
 # from that directory and copy it to the downloads directory.
 
-#export ACCUMULO_REPO=
+#export ACCUMULO_REPO=/path/to/accumulo
 
 # Comment out the following if block if you don't want to automatically detect
 # version from the pom.xml. This could be useful if you want to switch branches
@@ -62,7 +62,7 @@ fi
 # If set, 'uno fetch' will build (rather than download) a Fluo tarball
 # from that directory and copy it to the downloads directory.
 
-#export FLUO_REPO=
+#export FLUO_REPO=/path/to/fluo
 
 # Comment out the following if block if you don't want to automatically detect
 # version from the pom.xml. This could be useful if you want to switch branches
@@ -79,7 +79,7 @@ fi
 # If set, 'uno fetch' will build (rather than download) a Fluo YARN tarball
 # from that directory and copy it to the downloads directory.
 
-#export FLUO_YARN_REPO=
+#export FLUO_YARN_REPO=/path/to/fluo-yarn
 
 # Comment out the following if block if you don't want to automatically detect
 # version from the pom.xml. This could be useful if you want to switch branches
@@ -99,18 +99,20 @@ export INSTALL=$UNO_HOME/install
 export DATA_DIR=$INSTALL/data
 # Home directories
 export ZOOKEEPER_HOME=$INSTALL/zookeeper-$ZOOKEEPER_VERSION
-export HADOOP_PREFIX=$INSTALL/hadoop-$HADOOP_VERSION
+export HADOOP_HOME=$INSTALL/hadoop-$HADOOP_VERSION
+if [[ $HADOOP_VERSION =~ ^2\..*$ ]]; then
+  export HADOOP_PREFIX=$HADOOP_HOME
+fi
 export ACCUMULO_HOME=$INSTALL/accumulo-$ACCUMULO_VERSION
 export SPARK_HOME=$INSTALL/spark-$SPARK_VERSION-bin-without-hadoop
 export FLUO_HOME=$INSTALL/fluo-$FLUO_VERSION
 export FLUO_YARN_HOME=$INSTALL/fluo-yarn-$FLUO_YARN_VERSION
 # Config directories
-export HADOOP_CONF_DIR=$HADOOP_PREFIX/etc/hadoop
+export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
 # Log directories
 export LOGS_DIR=$INSTALL/logs
 export ACCUMULO_LOG_DIR=$LOGS_DIR/accumulo
 export HADOOP_LOG_DIR=$LOGS_DIR/hadoop
-export YARN_LOG_DIR=$LOGS_DIR/yarn
 export ZOO_LOG_DIR=$LOGS_DIR/zookeeper
 
 # Accumulo configuration


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services