You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by rv...@apache.org on 2012/04/16 18:10:32 UTC

svn commit: r1326670 [3/47] - in /incubator/bigtop/trunk: ./ bigtop-deploy/puppet/manifests/ bigtop-deploy/puppet/modules/hadoop-hbase/manifests/ bigtop-deploy/puppet/modules/hadoop-hbase/templates/ bigtop-deploy/puppet/modules/hadoop-oozie/manifests/ ...

Added: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-hdfs-datanode.svc
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-hdfs-datanode.svc?rev=1326670&view=auto
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-hdfs-datanode.svc (added)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-hdfs-datanode.svc Mon Apr 16 16:10:22 2012
@@ -0,0 +1,66 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+HADOOP_NAME="datanode"
+DAEMON="hadoop-hdfs-$HADOOP_NAME"
+DESC="Hadoop $HADOOP_NAME"
+EXEC_PATH="/usr/lib/hadoop/sbin/hadoop-daemon.sh"
+SVC_USER="hdfs"
+DAEMON_FLAGS="$HADOOP_NAME"
+CONF_DIR="/etc/hadoop/conf"
+PIDFILE="/var/run/hadoop-hdfs/hadoop-$SVC_USER-$HADOOP_NAME.pid"
+LOCKFILE="/var/lock/subsys/hadoop-$HADOOP_NAME"
+
+CHKCONFIG="2345 85 15"
+INIT_DEFAULT_START="3 4 5"
+INIT_DEFAULT_STOP="0 1 2 6"
+
+generate_start() {
+
+cat <<'__EOT__'
+start() {
+  [ -x $EXEC_PATH ] || exit $ERROR_PROGRAM_NOT_INSTALLED
+  [ -d $CONF_DIR ] || exit $ERROR_PROGRAM_NOT_CONFIGURED
+  log_success_msg "Starting ${DESC}: "
+
+  TARGET_USER_NAME="HADOOP_`echo datanode | tr a-z A-Z`_USER"
+  TARGET_USER=$(eval "echo \$$TARGET_USER_NAME")
+
+  # The following needs to be removed once HDFS-1943 gets finally put to rest.
+  # The logic of this ugly hack is this: IFF we do NOT have jsvc installed it is
+  # guaranteed that we can NOT be running in a secure mode and thus we need to
+  # workaround HDFS-1943 (start as non-root). As soon as jsvc gets installed
+  # we are assuming a secure installation and starting a data node as root.
+  # This leaves 2 corner cases:
+  #    1. HADOOP_DATANODE_USER being set to root
+  #    2. jsvc is installed but Hadoop is configures to run in an unsecure mode
+  # Both will currently fail
+  if [ -n "$HADOOP_SECURE_DN_USER" ]; then
+    TARGET_USER=root
+  fi
+
+  export HADOOP_IDENT_STRING=$TARGET_USER
+  su -s /bin/bash $TARGET_USER -c "$EXEC_PATH --config '$CONF_DIR' start $DAEMON_FLAGS"
+
+  # Some processes are slow to start
+  sleep $SLEEP_TIME
+  checkstatusofproc
+  RETVAL=$?
+
+  [ $RETVAL -eq $RETVAL_SUCCESS ] && touch $LOCKFILE
+  return $RETVAL
+}
+__EOT__
+
+}

Added: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-hdfs-namenode.svc
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-hdfs-namenode.svc?rev=1326670&view=auto
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-hdfs-namenode.svc (added)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-hdfs-namenode.svc Mon Apr 16 16:10:22 2012
@@ -0,0 +1,51 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+HADOOP_NAME="namenode"
+DAEMON="hadoop-hdfs-$HADOOP_NAME"
+DESC="Hadoop $HADOOP_NAME"
+EXEC_PATH="/usr/lib/hadoop/sbin/hadoop-daemon.sh"
+SVC_USER="hdfs"
+DAEMON_FLAGS="$HADOOP_NAME"
+CONF_DIR="/etc/hadoop/conf"
+PIDFILE="/var/run/hadoop-hdfs/hadoop-$SVC_USER-$HADOOP_NAME.pid"
+LOCKFILE="/var/lock/subsys/hadoop-$HADOOP_NAME"
+
+CHKCONFIG="2345 85 15"
+INIT_DEFAULT_START="3 4 5"
+INIT_DEFAULT_STOP="0 1 2 6"
+
+generate_extra_commands() {
+
+cat <<'__EOT__'
+    upgrade|rollback)
+      DAEMON_FLAGS="$DAEMON_FLAGS -${1}"
+      start
+      ;;
+    init)
+      check_for_root
+      checkstatusofproc
+      if [ "$?" = "$STATUS_RUNNING" ] ; then
+        echo "Error: $DESC is running. Stop it first."
+        exit 1
+      else
+        su -s /bin/bash $SVC_USER -c "hdfs namenode -format"
+      fi
+      ;;
+    *)
+      echo $"Usage: $0 {start|stop|status|restart|try-restart|condrestart|upgrade|rollback|init}"
+      exit 1
+__EOT__
+
+}

Copied: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-hdfs-secondarynamenode.svc (from r1325587, incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build)
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-hdfs-secondarynamenode.svc?p2=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-hdfs-secondarynamenode.svc&p1=incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build&r1=1325587&r2=1326670&rev=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-hdfs-secondarynamenode.svc Mon Apr 16 16:10:22 2012
@@ -1,4 +1,3 @@
-#!/bin/sh
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.
@@ -13,7 +12,16 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+HADOOP_NAME="secondarynamenode"
+DAEMON="hadoop-hdfs-$HADOOP_NAME"
+DESC="Hadoop $HADOOP_NAME"
+EXEC_PATH="/usr/lib/hadoop/sbin/hadoop-daemon.sh"
+SVC_USER="hdfs"
+DAEMON_FLAGS="$HADOOP_NAME"
+CONF_DIR="/etc/hadoop/conf"
+PIDFILE="/var/run/hadoop-hdfs/hadoop-$SVC_USER-$HADOOP_NAME.pid"
+LOCKFILE="/var/lock/subsys/hadoop-$HADOOP_NAME"
 
-set -ex
-
-ant -f src/build.xml -Dhadoop.mirror=http://archive.cloudera.com/hive-deps package "$@"
+CHKCONFIG="2345 85 15"
+INIT_DEFAULT_START="3 4 5"
+INIT_DEFAULT_STOP="0 1 2 6"

Added: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc?rev=1326670&view=auto
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc (added)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-httpfs.svc Mon Apr 16 16:10:22 2012
@@ -0,0 +1,70 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+HADOOP_NAME="httpfs"
+DAEMON="hadoop-$HADOOP_NAME"
+DESC="Hadoop $HADOOP_NAME"
+EXEC_PATH="/usr/lib/hadoop-httpfs/sbin/httpfs.sh"
+SVC_USER="$HADOOP_NAME"
+DAEMON_FLAGS="$HADOOP_NAME"
+CONF_DIR="/etc/$DAEMON/conf"
+PIDFILE="/var/run/$DAEMON/hadoop-$SVC_USER-$HADOOP_NAME.pid"
+LOCKFILE="/var/lock/subsys/$DAEMON"
+
+CHKCONFIG="345 90 10"
+INIT_DEFAULT_START="3 4 5"
+INIT_DEFAULT_STOP="0 1 2 6"
+
+generate_start() {
+
+cat <<'__EOT__'
+start() {
+  [ -x $EXEC_PATH ] || exit $ERROR_PROGRAM_NOT_INSTALLED
+  [ -d $CONF_DIR ] || exit $ERROR_PROGRAM_NOT_CONFIGURED
+  log_success_msg "Starting ${DESC}: "
+
+  export HTTPFS_USER="$SVC_USER"
+  export HTTPFS_CONFIG="$CONF_DIR"
+  export HTTPFS_LOG=${HTTPFS_LOG:-"/var/log/hadoop-httpfs/"}
+  export HTTPFS_TEMP="$(dirname $PIDFILE)"
+  export HTTPFS_SLEEP_TIME="$SLEEP_TIME"
+  export CATALINA_BASE=${CATALINA_BASE:-"/usr/lib/hadoop-httpfs"}
+  export CATALINA_PID="$PIDFILE"
+  export CATALINA_TMPDIR="$HTTPFS_TEMP"
+
+  su -s /bin/bash -c "${EXEC_PATH} start $DAEMON_FLAGS" $HTTPFS_USER
+
+  checkstatusofproc
+  RETVAL=$?
+
+  [ $RETVAL -eq $RETVAL_SUCCESS ] && touch $LOCKFILE
+  return $RETVAL
+}
+__EOT__
+
+}
+
+generate_stop() {
+
+cat <<'__EOT__'
+stop() {
+  log_success_msg "Stopping ${DESC}: "
+  su -s /bin/bash $SVC_USER -c "${EXEC_PATH} stop $SLEEP_TIME -force"
+  RETVAL=$?
+
+  [ $RETVAL -eq $RETVAL_SUCCESS ] && rm -f $LOCKFILE $PIDFILE
+}
+__EOT__
+
+}

Copied: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-layout.sh (from r1325587, incubator/bigtop/trunk/bigtop-packages/src/common/hbase/do-component-build)
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-layout.sh?p2=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-layout.sh&p1=incubator/bigtop/trunk/bigtop-packages/src/common/hbase/do-component-build&r1=1325587&r2=1326670&rev=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hbase/do-component-build (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-layout.sh Mon Apr 16 16:10:22 2012
@@ -1,4 +1,3 @@
-#!/bin/sh
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.
@@ -13,8 +12,19 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+HADOOP_COMMON_DIR="./"
+HADOOP_COMMON_LIB_JARS_DIR="lib"
+HADOOP_COMMON_LIB_NATIVE_DIR="lib/native"
+HDFS_DIR="./"
+HDFS_LIB_JARS_DIR="lib"
+YARN_DIR="./"
+YARN_LIB_JARS_DIR="lib"
+MAPRED_DIR="./"
+MAPRED_LIB_JARS_DIR="lib"
 
-set -ex
-
-mvn clean -DskipTests -Dhbase.version=${HBASE_VERSION} -Dhadoop.version=0.20.205.0 install assembly:assembly "$@"
-tar --strip-components=1 -xzf target/hbase*.tar.gz
+HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-"/usr/lib/hadoop/libexec"}
+HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/etc/hadoop/conf"}
+HADOOP_COMMON_HOME=${HADOOP_COMMON_HOME:-"/usr/lib/hadoop"}
+HADOOP_HDFS_HOME=${HADOOP_HDFS_HOME:-"/usr/lib/hadoop-hdfs"}
+HADOOP_MAPRED_HOME=${HADOOP_MAPRED_HOME:-"/usr/lib/hadoop-mapreduce"}
+YARN_HOME=${YARN_HOME:-"/usr/lib/hadoop-yarn"}

Copied: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.svc (from r1325587, incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build)
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.svc?p2=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.svc&p1=incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build&r1=1325587&r2=1326670&rev=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-mapreduce-historyserver.svc Mon Apr 16 16:10:22 2012
@@ -1,4 +1,3 @@
-#!/bin/sh
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.
@@ -13,7 +12,16 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+HADOOP_NAME="historyserver"
+DAEMON="hadoop-mapreduce-$HADOOP_NAME"
+DESC="Hadoop $HADOOP_NAME"
+EXEC_PATH="/usr/lib/hadoop-mapreduce/sbin/mr-jobhistory-daemon.sh"
+SVC_USER="mapred"
+DAEMON_FLAGS="$HADOOP_NAME"
+CONF_DIR="/etc/hadoop/conf"
+PIDFILE="/var/run/hadoop-mapreduce/yarn-$SVC_USER-$HADOOP_NAME.pid"
+LOCKFILE="/var/lock/subsys/hadoop-$HADOOP_NAME"
 
-set -ex
-
-ant -f src/build.xml -Dhadoop.mirror=http://archive.cloudera.com/hive-deps package "$@"
+CHKCONFIG="345 85 15"
+INIT_DEFAULT_START="3 4 5"
+INIT_DEFAULT_STOP="0 1 2 6"

Copied: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-yarn-nodemanager.svc (from r1325587, incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build)
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-yarn-nodemanager.svc?p2=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-yarn-nodemanager.svc&p1=incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build&r1=1325587&r2=1326670&rev=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-yarn-nodemanager.svc Mon Apr 16 16:10:22 2012
@@ -1,4 +1,3 @@
-#!/bin/sh
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.
@@ -13,7 +12,16 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+HADOOP_NAME="nodemanager"
+DAEMON="hadoop-yarn-$HADOOP_NAME"
+DESC="Hadoop $HADOOP_NAME"
+EXEC_PATH="/usr/lib/hadoop-yarn/sbin/yarn-daemon.sh"
+SVC_USER="yarn"
+DAEMON_FLAGS="$HADOOP_NAME"
+CONF_DIR="/etc/hadoop/conf"
+PIDFILE="/var/run/hadoop-yarn/yarn-$SVC_USER-$HADOOP_NAME.pid"
+LOCKFILE="/var/lock/subsys/hadoop-$HADOOP_NAME"
 
-set -ex
-
-ant -f src/build.xml -Dhadoop.mirror=http://archive.cloudera.com/hive-deps package "$@"
+CHKCONFIG="345 85 15"
+INIT_DEFAULT_START="3 4 5"
+INIT_DEFAULT_STOP="0 1 2 6"

Copied: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-yarn-proxyserver.svc (from r1325587, incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build)
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-yarn-proxyserver.svc?p2=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-yarn-proxyserver.svc&p1=incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build&r1=1325587&r2=1326670&rev=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-yarn-proxyserver.svc Mon Apr 16 16:10:22 2012
@@ -1,4 +1,3 @@
-#!/bin/sh
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.
@@ -13,7 +12,16 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+HADOOP_NAME="proxyserver"
+DAEMON="hadoop-yarn-$HADOOP_NAME"
+DESC="Hadoop $HADOOP_NAME"
+EXEC_PATH="/usr/lib/hadoop-yarn/sbin/yarn-daemon.sh"
+SVC_USER="yarn"
+DAEMON_FLAGS="$HADOOP_NAME"
+CONF_DIR="/etc/hadoop/conf"
+PIDFILE="/var/run/hadoop-yarn/yarn-$SVC_USER-$HADOOP_NAME.pid"
+LOCKFILE="/var/lock/subsys/hadoop-$HADOOP_NAME"
 
-set -ex
-
-ant -f src/build.xml -Dhadoop.mirror=http://archive.cloudera.com/hive-deps package "$@"
+CHKCONFIG="345 85 15"
+INIT_DEFAULT_START="3 4 5"
+INIT_DEFAULT_STOP="0 1 2 6"

Copied: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-yarn-resourcemanager.svc (from r1325587, incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build)
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-yarn-resourcemanager.svc?p2=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-yarn-resourcemanager.svc&p1=incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build&r1=1325587&r2=1326670&rev=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop-yarn-resourcemanager.svc Mon Apr 16 16:10:22 2012
@@ -1,4 +1,3 @@
-#!/bin/sh
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.
@@ -13,7 +12,16 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+HADOOP_NAME="resourcemanager"
+DAEMON="hadoop-yarn-$HADOOP_NAME"
+DESC="Hadoop $HADOOP_NAME"
+EXEC_PATH="/usr/lib/hadoop-yarn/sbin/yarn-daemon.sh"
+SVC_USER="yarn"
+DAEMON_FLAGS="$HADOOP_NAME"
+CONF_DIR="/etc/hadoop/conf"
+PIDFILE="/var/run/hadoop-yarn/yarn-$SVC_USER-$HADOOP_NAME.pid"
+LOCKFILE="/var/lock/subsys/hadoop-$HADOOP_NAME"
 
-set -ex
-
-ant -f src/build.xml -Dhadoop.mirror=http://archive.cloudera.com/hive-deps package "$@"
+CHKCONFIG="345 85 15"
+INIT_DEFAULT_START="3 4 5"
+INIT_DEFAULT_STOP="0 1 2 6"

Modified: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop.default
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop.default?rev=1326670&r1=1326669&r2=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop.default (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop.default Mon Apr 16 16:10:22 2012
@@ -14,11 +14,12 @@
 # limitations under the License.
 export HADOOP_HOME_WARN_SUPPRESS=true
 export HADOOP_HOME=/usr/lib/hadoop
-export HADOOP_NAMENODE_USER=hdfs
-export HADOOP_SECONDARYNAMENODE_USER=hdfs
-export HADOOP_DATANODE_USER=hdfs
-export HADOOP_JOBTRACKER_USER=mapred
-export HADOOP_TASKTRACKER_USER=mapred
-export HADOOP_IDENT_STRING=hadoop
-export HADOOP_PID_DIR=/var/run/hadoop
-export HADOOP_LOG_DIR=/var/log/hadoop
+export HADOOP_PREFIX=/usr/lib/hadoop
+
+export HADOOP_LIBEXEC_DIR=/usr/lib/hadoop/libexec
+export HADOOP_CONF_DIR=/etc/hadoop/conf
+
+export HADOOP_COMMON_HOME=/usr/lib/hadoop
+export HADOOP_HDFS_HOME=/usr/lib/hadoop-hdfs
+export HADOOP_MAPRED_HOME=/usr/lib/hadoop-mapreduce
+export YARN_HOME=/usr/lib/hadoop-yarn

Copied: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hdfs.default (from r1325587, incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop.default)
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hdfs.default?p2=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hdfs.default&p1=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop.default&r1=1325587&r2=1326670&rev=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop.default (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hdfs.default Mon Apr 16 16:10:22 2012
@@ -12,13 +12,13 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-export HADOOP_HOME_WARN_SUPPRESS=true
-export HADOOP_HOME=/usr/lib/hadoop
+export HADOOP_PID_DIR=/var/run/hadoop-hdfs
+export HADOOP_LOG_DIR=/var/log/hadoop-hdfs
 export HADOOP_NAMENODE_USER=hdfs
 export HADOOP_SECONDARYNAMENODE_USER=hdfs
 export HADOOP_DATANODE_USER=hdfs
-export HADOOP_JOBTRACKER_USER=mapred
-export HADOOP_TASKTRACKER_USER=mapred
-export HADOOP_IDENT_STRING=hadoop
-export HADOOP_PID_DIR=/var/run/hadoop
-export HADOOP_LOG_DIR=/var/log/hadoop
+export HADOOP_IDENT_STRING=hdfs
+
+# export HADOOP_SECURE_DN_USER=hdfs
+# export HADOOP_SECURE_DN_PID_DIR=/var/run/hadoop-hdfs
+# export HADOOP_SECURE_DN_LOG_DIR=/var/log/hadoop-hdfs

Copied: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/httpfs.default (from r1325587, incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop.default)
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/httpfs.default?p2=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/httpfs.default&p1=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop.default&r1=1325587&r2=1326670&rev=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/hadoop.default (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/httpfs.default Mon Apr 16 16:10:22 2012
@@ -12,13 +12,13 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-export HADOOP_HOME_WARN_SUPPRESS=true
-export HADOOP_HOME=/usr/lib/hadoop
-export HADOOP_NAMENODE_USER=hdfs
-export HADOOP_SECONDARYNAMENODE_USER=hdfs
-export HADOOP_DATANODE_USER=hdfs
-export HADOOP_JOBTRACKER_USER=mapred
-export HADOOP_TASKTRACKER_USER=mapred
-export HADOOP_IDENT_STRING=hadoop
-export HADOOP_PID_DIR=/var/run/hadoop
-export HADOOP_LOG_DIR=/var/log/hadoop
+export HTTPFS_USER=httpfs
+export HTTPFS_CONFIG=/etc/hadoop-httpfs/conf
+export HTTPFS_LOG=/var/log/hadoop-httpfs/
+export HTTPFS_TEMP=/var/run/hadoop-httpfs/
+export HTTPFS_CATALINA_HOME=/usr/lib/bigtop-tomcat
+export CATALINA_PID=/var/run/hadoop-httpfs/hadoop-httpfs-httpfs.pid
+export CATALINA_BASE=/usr/lib/hadoop-httpfs
+export CATALINA_TMPDIR=/var/run/hadoop-httpfs/
+# HTTPFS_HTTP_PORT
+# HTTPFS_ADMIN_PORT

Added: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/init.d.tmpl
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/init.d.tmpl?rev=1326670&view=auto
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/init.d.tmpl (added)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/init.d.tmpl Mon Apr 16 16:10:22 2012
@@ -0,0 +1,273 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This is a poor man's templating engine for generating init.d scripts to
+# support all the Apache services that Bigtop distro has. An actual init.d
+# script gets generate via running this script under bash and giving it
+# a mandatory argument of a file containing the configuration for the service. 
+# The argument file should be a valid piece of bash code since it gets directly
+# source into this template. E.g.
+#    $ bash ./init.d.tmpl hadoop-hdfs-namenode.svc > hadoop-hdfs-namenode
+# 
+# You must declare the following in your .svc configuration file:
+#     DAEMON="name of the resulting init.d script"
+#     DESC="Free form human readable description of the service"
+#     EXEC_PATH="path to the upstream daemon management script"
+#     SVC_USER="user to run this service as"
+#     DAEMON_FLAGS="flags to be passed to the $EXEC_PATH"
+#     CONF_DIR="path to the configuration directory"
+#     PIDFILE="file holding a PID of the running daemon"
+#     LOCKFILE="file signifying the service lock"
+#
+#     CHKCONFIG="chkconfig(8) registration signature"
+#     INIT_DEFAULT_START="run levels to use"
+#     INIT_DEFAULT_STOP="run levels not to use ;-)"
+# 
+# You can, also, override parts of the generated init.d script by providing
+# function definitions for: generate_start, generate_stop and generate_extra_commands.
+# See the default implemenations below and feel free to customize. Also look
+# for exising .svc files in common to see how different services are tweaking
+# the defaults.
+#
+# Of course, if this whole templating thing grows too big we might need to
+# consider a real templating engine (I have played with m4, but it seems
+# qutie brittle when used for manipulating pieces of the shell code -- think
+# $0 the like).
+
+if [ $# -lt 1 ] ; then
+  echo "Usage: ${BASH_SOURCE-0} service.definition.svc"
+  exit 1
+fi
+
+generate_start() {
+
+cat <<'__EOT__'
+
+start() {
+  [ -x $EXEC_PATH ] || exit $ERROR_PROGRAM_NOT_INSTALLED
+  [ -d $CONF_DIR ] || exit $ERROR_PROGRAM_NOT_CONFIGURED
+  log_success_msg "Starting ${DESC}: "
+
+  su -s /bin/bash $SVC_USER -c "$EXEC_PATH --config '$CONF_DIR' start $DAEMON_FLAGS"
+
+  # Some processes are slow to start
+  sleep $SLEEP_TIME
+  checkstatusofproc
+  RETVAL=$?
+
+  [ $RETVAL -eq $RETVAL_SUCCESS ] && touch $LOCKFILE
+  return $RETVAL
+}
+
+__EOT__
+
+}
+
+generate_stop() {
+
+cat <<'__EOT__'
+
+stop() {
+  log_success_msg "Stopping ${DESC}: "
+  start_daemon $EXEC_PATH --config "$CONF_DIR" stop $DAEMON_FLAGS
+  RETVAL=$?
+
+  [ $RETVAL -eq $RETVAL_SUCCESS ] && rm -f $LOCKFILE $PIDFILE
+}
+
+__EOT__
+
+}
+
+generate_extra_commands() {
+
+cat <<'__EOT__'
+    *)
+      echo $"Usage: $0 {start|stop|status|restart|try-restart|condrestart}"
+      exit 1
+__EOT__
+
+}
+
+###################################################################
+# NOTE how we are sourcing the argument here so that a user-defined
+# settings have a chance to override the default values for 
+# generate_start, generate_stop and generate_extra_commands. If you
+# ever want to make this template even more flexible -- define the
+# default values above this line
+. $1
+
+cat <<__EOT__
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Starts a $DESC
+#
+# chkconfig: $CHKCONFIG
+# description: $DESC
+#
+### BEGIN INIT INFO
+# Provides:          $DAEMON
+# Short-Description: $DESC
+# Default-Start:     $INIT_DEFAULT_START
+# Default-Stop:      $INIT_DEFAULT_STOP
+# Required-Start:    \$syslog \$remote_fs
+# Required-Stop:     \$syslog \$remote_fs
+# Should-Start:
+# Should-Stop:
+### END INIT INFO
+
+. /lib/lsb/init-functions
+. /etc/default/hadoop
+
+if [ -f /etc/default/$DAEMON ] ; then
+  . /etc/default/$DAEMON
+fi
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+  . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+  . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+
+RETVAL_SUCCESS=0
+
+STATUS_RUNNING=0
+STATUS_DEAD=1
+STATUS_DEAD_AND_LOCK=2
+STATUS_NOT_RUNNING=3
+STATUS_OTHER_ERROR=102
+
+
+ERROR_PROGRAM_NOT_INSTALLED=5
+ERROR_PROGRAM_NOT_CONFIGURED=6
+
+
+RETVAL=0
+SLEEP_TIME=5
+PROC_NAME="su"
+
+DAEMON="$DAEMON"
+DESC="$DESC"
+EXEC_PATH="$EXEC_PATH"
+SVC_USER="$SVC_USER"
+DAEMON_FLAGS="$DAEMON_FLAGS"
+CONF_DIR="$CONF_DIR"
+PIDFILE="$PIDFILE"
+LOCKFILE="$LOCKFILE"
+
+install -d -m 0755 -o $SVC_USER -g $SVC_USER $(dirname $PIDFILE) 1>/dev/null 2>&1 || :
+__EOT__
+
+generate_start
+generate_stop
+
+cat <<'__EOT__'
+restart() {
+  stop
+  start
+}
+
+checkstatusofproc(){
+  pidofproc -p $PIDFILE $PROC_NAME > /dev/null
+}
+
+checkstatus(){
+  checkstatusofproc
+  status=$?
+
+  case "$status" in
+    $STATUS_RUNNING)
+      log_success_msg "${DESC} is running"
+      ;;
+    $STATUS_DEAD)
+      log_failure_msg "${DESC} is dead and pid file exists"
+      ;;
+    $STATUS_DEAD_AND_LOCK)
+      log_failure_msg "${DESC} is dead and lock file exists"
+      ;;
+    $STATUS_NOT_RUNNING)
+      log_failure_msg "${DESC} is not running"
+      ;;
+    *)
+      log_failure_msg "${DESC} status is unknown"
+      ;;
+  esac
+  return $status
+}
+
+condrestart(){
+  [ -e $LOCKFILE ] && restart || :
+}
+
+check_for_root() {
+  if [ $(id -ur) -ne 0 ]; then
+    echo 'Error: root user required'
+    echo
+    exit 1
+  fi
+}
+
+service() {
+  case "$1" in
+    start)
+      check_for_root
+      start
+      ;;
+    stop)
+      check_for_root
+      stop
+      ;;
+    status)
+      checkstatus
+      RETVAL=$?
+      ;;
+    restart)
+      check_for_root
+      restart
+      ;;
+    condrestart|try-restart)
+      check_for_root
+      condrestart
+      ;;
+__EOT__
+
+generate_extra_commands
+
+cat <<'__EOT__'
+  esac
+}
+
+service "$1"
+
+exit $RETVAL
+__EOT__

Propchange: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/init.d.tmpl
------------------------------------------------------------------------------
    svn:executable = *

Modified: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/install_hadoop.sh
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/install_hadoop.sh?rev=1326670&r1=1326669&r2=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/install_hadoop.sh (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/install_hadoop.sh Mon Apr 16 16:10:22 2012
@@ -34,15 +34,22 @@ usage: $0 <options>
 OPTS=$(getopt \
   -n $0 \
   -o '' \
-  -l 'distro-dir:' \
   -l 'prefix:' \
+  -l 'distro-dir:' \
   -l 'build-dir:' \
   -l 'native-build-string:' \
   -l 'installed-lib-dir:' \
-  -l 'lib-dir:' \
+  -l 'hadoop-dir:' \
+  -l 'httpfs-dir:' \
+  -l 'hdfs-dir:' \
+  -l 'yarn-dir:' \
+  -l 'mapreduce-dir:' \
+  -l 'client-dir:' \
+  -l 'system-include-dir:' \
   -l 'system-lib-dir:' \
-  -l 'src-dir:' \
-  -l 'etc-dir:' \
+  -l 'system-libexec-dir:' \
+  -l 'hadoop-etc-dir:' \
+  -l 'httpfs-etc-dir:' \
   -l 'doc-dir:' \
   -l 'man-dir:' \
   -l 'example-dir:' \
@@ -62,12 +69,33 @@ while true ; do
         --distro-dir)
         DISTRO_DIR=$2 ; shift 2
         ;;
-        --lib-dir)
-        LIB_DIR=$2 ; shift 2
+        --httpfs-dir)
+        HTTPFS_DIR=$2 ; shift 2
+        ;;
+        --hadoop-dir)
+        HADOOP_DIR=$2 ; shift 2
+        ;;
+        --hdfs-dir)
+        HDFS_DIR=$2 ; shift 2
+        ;;
+        --yarn-dir)
+        YARN_DIR=$2 ; shift 2
+        ;;
+        --mapreduce-dir)
+        MAPREDUCE_DIR=$2 ; shift 2
+        ;;
+        --client-dir)
+        CLIENT_DIR=$2 ; shift 2
+        ;;
+        --system-include-dir)
+        SYSTEM_INCLUDE_DIR=$2 ; shift 2
         ;;
         --system-lib-dir)
         SYSTEM_LIB_DIR=$2 ; shift 2
         ;;
+        --system-libexec-dir)
+        SYSTEM_LIBEXEC_DIR=$2 ; shift 2
+        ;;
         --build-dir)
         BUILD_DIR=$2 ; shift 2
         ;;
@@ -77,8 +105,11 @@ while true ; do
         --doc-dir)
         DOC_DIR=$2 ; shift 2
         ;;
-        --etc-dir)
-        ETC_DIR=$2 ; shift 2
+        --hadoop-etc-dir)
+        HADOOP_ETC_DIR=$2 ; shift 2
+        ;;
+        --httpfs-etc-dir)
+        HTTPFS_ETC_DIR=$2 ; shift 2
         ;;
         --installed-lib-dir)
         INSTALLED_LIB_DIR=$2 ; shift 2
@@ -89,9 +120,6 @@ while true ; do
         --example-dir)
         EXAMPLE_DIR=$2 ; shift 2
         ;;
-        --src-dir)
-        SRC_DIR=$2 ; shift 2
-        ;;
         --)
         shift ; break
         ;;
@@ -110,200 +138,227 @@ for var in PREFIX BUILD_DIR; do
   fi
 done
 
-LIB_DIR=${LIB_DIR:-$PREFIX/usr/lib/hadoop}
+HADOOP_DIR=${HADOOP_DIR:-$PREFIX/usr/lib/hadoop}
+HDFS_DIR=${HDFS_DIR:-$PREFIX/usr/lib/hadoop-hdfs}
+YARN_DIR=${YARN_DIR:-$PREFIX/usr/lib/hadoop-yarn}
+MAPREDUCE_DIR=${MAPREDUCE_DIR:-$PREFIX/usr/lib/hadoop-mapreduce}
+CLIENT_DIR=${CLIENT_DIR:-$PREFIX/usr/lib/hadoop/client}
+HTTPFS_DIR=${HTTPFS_DIR:-$PREFIX/usr/lib/hadoop-httpfs}
 SYSTEM_LIB_DIR=${SYSTEM_LIB_DIR:-/usr/lib}
 BIN_DIR=${BIN_DIR:-$PREFIX/usr/bin}
 DOC_DIR=${DOC_DIR:-$PREFIX/usr/share/doc/hadoop}
 MAN_DIR=${MAN_DIR:-$PREFIX/usr/man}
+SYSTEM_INCLUDE_DIR=${SYSTEM_INCLUDE_DIR:-$PREFIX/usr/include}
+SYSTEM_LIBEXEC_DIR=${SYSTEM_LIBEXEC_DIR:-$PREFIX/usr/libexec}
 EXAMPLE_DIR=${EXAMPLE_DIR:-$DOC_DIR/examples}
-SRC_DIR=${SRC_DIR:-$PREFIX/usr/src/hadoop}
-ETC_DIR=${ETC_DIR:-$PREFIX/etc/hadoop}
+HADOOP_ETC_DIR=${HADOOP_ETC_DIR:-$PREFIX/etc/hadoop}
+HTTPFS_ETC_DIR=${HTTPFS_ETC_DIR:-$PREFIX/etc/hadoop-httpfs}
 
-INSTALLED_LIB_DIR=${INSTALLED_LIB_DIR:-/usr/lib/hadoop}
-BUILD_SRC_DIR=./src
+INSTALLED_HADOOP_DIR=${INSTALLED_HADOOP_DIR:-/usr/lib/hadoop}
+HADOOP_NATIVE_LIB_DIR=${HADOOP_DIR}/lib/native
 
-mkdir -p $LIB_DIR
-(cd $BUILD_DIR && tar -cf - .) | (cd $LIB_DIR && tar xf - )
+HADOOP_VERSION=0.23.1
 
-# Take out things we've installed elsewhere
-for x in docs lib/native c++ src conf contrib/fuse-dfs/fuse_dfs usr/bin/fuse_dfs contrib/fuse share sbin/task-controller 'lib/lib*so*' 'lib/lib*a' ; do
-  rm -rf $LIB_DIR/$x 
-done
+##Needed for some distros to find ldconfig
+export PATH="/sbin/:$PATH"
 
 # Make bin wrappers
 mkdir -p $BIN_DIR
 
-for bin_wrapper in hadoop ; do
-  wrapper=$BIN_DIR/$bin_wrapper
+for component in $HADOOP_DIR/bin/hadoop $HDFS_DIR/bin/hdfs $YARN_DIR/bin/yarn $MAPREDUCE_DIR/bin/mapred ; do
+  wrapper=$BIN_DIR/${component#*/bin/}
   cat > $wrapper <<EOF
 #!/bin/sh
 
 # Autodetect JAVA_HOME if not defined
 if [ -e /usr/libexec/bigtop-detect-javahome ]; then
-  . /usr/libexec/bigtop-detect-javahome
+. /usr/libexec/bigtop-detect-javahome
 elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
-  . /usr/lib/bigtop-utils/bigtop-detect-javahome
+. /usr/lib/bigtop-utils/bigtop-detect-javahome
 fi
 
-. /etc/default/hadoop
-exec $INSTALLED_LIB_DIR/bin/$bin_wrapper "\$@"
+export HADOOP_LIBEXEC_DIR=/${SYSTEM_LIBEXEC_DIR#${PREFIX}}
+
+exec ${component#${PREFIX}} "\$@"
 EOF
   chmod 755 $wrapper
 done
 
-# Link examples to /usr/share
-mkdir -p $EXAMPLE_DIR
-for x in $LIB_DIR/*examples*jar ; do
-  INSTALL_LOC=`echo $x | sed -e "s,$LIB_DIR,$INSTALLED_LIB_DIR,"`
-  ln -sf $INSTALL_LOC $EXAMPLE_DIR/
+#libexec
+install -d -m 0755 ${SYSTEM_LIBEXEC_DIR}
+cp ${BUILD_DIR}/libexec/* ${SYSTEM_LIBEXEC_DIR}/
+cp ${DISTRO_DIR}/hadoop-layout.sh ${SYSTEM_LIBEXEC_DIR}/
+
+# hadoop jar
+install -d -m 0755 ${HADOOP_DIR}
+cp ${BUILD_DIR}/share/hadoop/common/*.jar ${HADOOP_DIR}/
+cp ${BUILD_DIR}/share/hadoop/common/lib/hadoop-auth*.jar ${HADOOP_DIR}/
+cp ${BUILD_DIR}/share/hadoop/mapreduce/lib/hadoop-annotations*.jar ${HADOOP_DIR}/
+install -d -m 0755 ${MAPREDUCE_DIR}
+cp ${BUILD_DIR}/share/hadoop/mapreduce/hadoop-mapreduce*.jar ${MAPREDUCE_DIR}
+cp ${BUILD_DIR}/share/hadoop/tools/lib/*.jar ${MAPREDUCE_DIR}
+install -d -m 0755 ${HDFS_DIR}
+cp ${BUILD_DIR}/share/hadoop/hdfs/*.jar ${HDFS_DIR}/
+install -d -m 0755 ${YARN_DIR}
+cp ${BUILD_DIR}/share/hadoop/mapreduce/hadoop-yarn*.jar ${YARN_DIR}/
+chmod 644 ${HADOOP_DIR}/*.jar ${MAPREDUCE_DIR}/*.jar ${HDFS_DIR}/*.jar ${YARN_DIR}/*.jar
+
+# lib jars
+install -d -m 0755 ${HADOOP_DIR}/lib
+cp ${BUILD_DIR}/share/hadoop/common/lib/*.jar ${HADOOP_DIR}/lib
+install -d -m 0755 ${MAPREDUCE_DIR}/lib
+cp ${BUILD_DIR}/share/hadoop/mapreduce/lib/*.jar ${MAPREDUCE_DIR}/lib
+install -d -m 0755 ${HDFS_DIR}/lib 
+cp ${BUILD_DIR}/share/hadoop/hdfs/lib/*.jar ${HDFS_DIR}/lib
+install -d -m 0755 ${YARN_DIR}/lib
+cp ${BUILD_DIR}/share/hadoop/mapreduce/lib/*.jar ${YARN_DIR}/lib
+chmod 644 ${HADOOP_DIR}/lib/*.jar ${MAPREDUCE_DIR}/lib/*.jar ${HDFS_DIR}/lib/*.jar ${YARN_DIR}/lib/*.jar
+
+# Install webapps
+cp -ra ${BUILD_DIR}/share/hadoop/hdfs/webapps ${HDFS_DIR}/
+
+# bin
+install -d -m 0755 ${HADOOP_DIR}/bin
+cp -a ${BUILD_DIR}/bin/{hadoop,rcc,fuse_dfs,fuse_dfs_wrapper.sh} ${HADOOP_DIR}/bin
+install -d -m 0755 ${HDFS_DIR}/bin
+cp -a ${BUILD_DIR}/bin/hdfs ${HDFS_DIR}/bin
+install -d -m 0755 ${YARN_DIR}/bin
+cp -a ${BUILD_DIR}/bin/{yarn,container-executor} ${YARN_DIR}/bin
+install -d -m 0755 ${MAPREDUCE_DIR}/bin
+cp -a ${BUILD_DIR}/bin/mapred ${MAPREDUCE_DIR}/bin
+# FIXME: MAPREDUCE-3980
+cp -a ${BUILD_DIR}/bin/mapred ${YARN_DIR}/bin
+
+# sbin
+install -d -m 0755 ${HADOOP_DIR}/sbin
+cp -a ${BUILD_DIR}/sbin/{hadoop-daemon,hadoop-daemons,slaves}.sh ${HADOOP_DIR}/sbin
+install -d -m 0755 ${HDFS_DIR}/sbin
+cp -a ${BUILD_DIR}/sbin/{distribute-exclude,refresh-namenodes}.sh ${HDFS_DIR}/sbin
+install -d -m 0755 ${YARN_DIR}/sbin
+cp -a ${BUILD_DIR}/sbin/{yarn-daemon,yarn-daemons}.sh ${YARN_DIR}/sbin
+install -d -m 0755 ${MAPREDUCE_DIR}/sbin
+cp -a ${BUILD_DIR}/sbin/mr-jobhistory-daemon.sh ${MAPREDUCE_DIR}/sbin
+
+# native libs
+install -d -m 0755 ${SYSTEM_LIB_DIR}
+install -d -m 0755 ${HADOOP_NATIVE_LIB_DIR}
+for library in libhdfs.so.0.0.0; do
+  cp ${BUILD_DIR}/lib/native/${library} ${SYSTEM_LIB_DIR}/
+  ldconfig -vlN ${SYSTEM_LIB_DIR}/${library}
+  ln -s ${library} ${SYSTEM_LIB_DIR}/${library/.so.*/}.so
 done
-# And copy the source
-mkdir -p $EXAMPLE_DIR/src
-cp -a $BUILD_SRC_DIR/examples/* $EXAMPLE_DIR/src
-
-# Install docs
-mkdir -p $DOC_DIR
-cp -r $BUILD_DIR/docs/* $DOC_DIR
-
-# Install source
-mkdir -p ${SRC_DIR}
-rm -f ${BUILD_SRC_DIR}/contrib/fuse-dfs/src/*.o 
-rm -f ${BUILD_SRC_DIR}/contrib/fuse-dfs/src/fuse_dfs
-rm -rf ${BUILD_SRC_DIR}/contrib/hod
-rm -f ${SRC_DIR}/contrib/fuse-dfs/fuse_dfs
-
-
-cp -a ${BUILD_SRC_DIR}/* ${SRC_DIR}/
-
-# Make the empty config
-install -d -m 0755 $ETC_DIR/conf.empty
-(cd ${BUILD_DIR}/conf && tar cf - .) | (cd $ETC_DIR/conf.empty && tar xf -)
-
-# Create symlinks to preserve old jar names
-# Also create symlinks of versioned jars to jars without version names, which other
-# packages can depend on
-(cd $LIB_DIR &&
-for j in hadoop-*.jar; do
-  if [[ $j =~ hadoop-(.*)-([^-]+).jar ]]; then
-    name=${BASH_REMATCH[1]}
-    ver=${BASH_REMATCH[2]}
-    ln -s hadoop-$name-$ver.jar hadoop-$name.jar
-  fi
-done)
 
-# Link the HADOOP_HOME conf, log and pid dir to installed locations
-rm -rf $LIB_DIR/conf
-ln -s ${ETC_DIR#$PREFIX}/conf $LIB_DIR/conf
-rm -rf $LIB_DIR/logs
-ln -s /var/log/hadoop $LIB_DIR/logs
-rm -rf $LIB_DIR/pids
-ln -s /var/run/hadoop $LIB_DIR/pids
+install -d -m 0755 ${SYSTEM_INCLUDE_DIR}
+cp ${BUILD_DIR}/include/hdfs.h ${SYSTEM_INCLUDE_DIR}/
 
-# Make the pseudo-distributed config
-for conf in conf.pseudo ; do
-  install -d -m 0755 $ETC_DIR/$conf
-  # Install the default configurations and set some permissions.
-  (cd ${BUILD_DIR}/conf && tar -cf - .) | (cd $ETC_DIR/$conf && tar -xf -)
-  chmod -R 0644 $ETC_DIR/$conf/*
-  # Overlay the -site files
-  (cd $DISTRO_DIR/$conf && tar -cf - .) | (cd $ETC_DIR/$conf && tar -xf -)
-  # When building straight out of svn we have to account for pesky .svn subdirs
-  rm -rf `find $ETC_DIR/$conf -name .svn -type d`
+cp ${BUILD_DIR}/lib/native/*.a ${HADOOP_NATIVE_LIB_DIR}/
+for library in `cd ${BUILD_DIR}/lib ; ls libsnappy.so.1.* 2>/dev/null` libhadoop.so.1.0.0; do
+  cp ${BUILD_DIR}/lib/native/${library} ${HADOOP_NATIVE_LIB_DIR}/
+  ldconfig -vlN ${HADOOP_NATIVE_LIB_DIR}/${library}
+  ln -s ${library} ${HADOOP_NATIVE_LIB_DIR}/${library/.so.*/}.so
 done
 
-# man pages
-mkdir -p $MAN_DIR/man1
-gzip -c < $DISTRO_DIR/hadoop.1 > $MAN_DIR/man1/hadoop.1.gz
-
-############################################################
-# ARCH DEPENDENT STUFF
-############################################################
-
-if [ ! -z "$NATIVE_BUILD_STRING" ]; then
-  # Fuse 
-  mkdir -p $LIB_DIR/bin
-  if [ -d ./src/contrib/fuse-dfs ]; then
-    gzip -c < $DISTRO_DIR/hadoop-fuse-dfs.1 > $MAN_DIR/man1/hadoop-fuse-dfs.1.gz
-
-    # Fuse 
-    mkdir -p $LIB_DIR/bin
-    mv  ${BUILD_DIR}/contrib/fuse-dfs/* $LIB_DIR/bin
-    rmdir ${BUILD_DIR}/contrib/fuse-dfs
-
-    fuse_wrapper=${BIN_DIR}/hadoop-fuse-dfs
-  cat > $fuse_wrapper << EOF
+# Install fuse wrapper
+fuse_wrapper=${BIN_DIR}/hadoop-fuse-dfs
+cat > $fuse_wrapper << EOF
 #!/bin/bash
 
 /sbin/modprobe fuse
 
 # Autodetect JAVA_HOME if not defined
 if [ -e /usr/libexec/bigtop-detect-javahome ]; then
-  . /usr/libexec/bigtop-detect-javahome
+. /usr/libexec/bigtop-detect-javahome
 elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
-  . /usr/lib/bigtop-utils/bigtop-detect-javahome
+. /usr/lib/bigtop-utils/bigtop-detect-javahome
 fi
 
-. /etc/default/hadoop
+export HADOOP_HOME=\${HADOOP_HOME:-${HADOOP_DIR#${PREFIX}}}
 
-if [ -f /etc/default/hadoop-fuse ] 
-  then . /etc/default/hadoop-fuse
+if [ -f /etc/default/hadoop-fuse ]
+then . /etc/default/hadoop-fuse
 fi
 
-if [ -f \$HADOOP_HOME/bin/hadoop-config.sh ] 
-  then . \$HADOOP_HOME/bin/hadoop-config.sh
-fi
+export HADOOP_LIBEXEC_DIR=${SYSTEM_LIBEXEC_DIR#${PREFIX}}
 
 if [ "\${LD_LIBRARY_PATH}" = "" ]; then
   export LD_LIBRARY_PATH=/usr/lib
-  for f in \`find \${JAVA_HOME}/jre/lib -name client -prune -o -name libjvm.so -exec dirname {} \;\`; do
+  for f in \`find \${JAVA_HOME} -name client -prune -o -name libjvm.so -exec dirname {} \;\`; do
     export LD_LIBRARY_PATH=\$f:\${LD_LIBRARY_PATH}
   done
 fi
 
-for i in \${HADOOP_HOME}/*.jar \${HADOOP_HOME}/lib/*.jar
-  do CLASSPATH+=\$i:
+# Pulls all jars from hadoop client package
+for jar in \${HADOOP_HOME}/client/*.jar; do
+  CLASSPATH+="\$jar:"
 done
 
-export PATH=\$PATH:\${HADOOP_HOME}/bin/
-
-env CLASSPATH=\$CLASSPATH \${HADOOP_HOME}/bin/fuse_dfs \$@
+env CLASSPATH="\${CLASSPATH}" \${HADOOP_HOME}/bin/fuse_dfs \$@
 EOF
 
-    chmod 755 $fuse_wrapper
-  fi
+chmod 755 $fuse_wrapper
 
-  # Security related binaries
-  mkdir -p $LIB_DIR/sbin/${NATIVE_BUILD_STRING}
-  # FIXME: workaround for BIGTOP-139
-  cp -f $LIB_DIR/bin/task-controller $LIB_DIR/sbin/${NATIVE_BUILD_STRING}
-  cp -f $LIB_DIR/libexec/jsvc* $LIB_DIR/sbin/${NATIVE_BUILD_STRING}/jsvc
-
-  # Native compression libs
-  mkdir -p $LIB_DIR/lib/native/
-  cp -r ${BUILD_DIR}/lib/native/${NATIVE_BUILD_STRING} $LIB_DIR/lib/native/
-
-  # Pipes
-  mkdir -p $PREFIX/$SYSTEM_LIB_DIR $PREFIX/usr/include
-  cp ${BUILD_DIR}/c++/${NATIVE_BUILD_STRING}/lib/libhadooppipes.a \
-      ${BUILD_DIR}/c++/${NATIVE_BUILD_STRING}/lib/libhadooputils.a \
-      $PREFIX/$SYSTEM_LIB_DIR
-  cp -r ${BUILD_DIR}/c++/${NATIVE_BUILD_STRING}/include/hadoop $PREFIX/usr/include/
-
-  # libhdfs
-  cp ${BUILD_DIR}/c++/${NATIVE_BUILD_STRING}/lib/libhdfs.so.0.0.0 $PREFIX/$SYSTEM_LIB_DIR
-  ln -sf libhdfs.so.0.0.0 $PREFIX/$SYSTEM_LIB_DIR/libhdfs.so.0
-
-  # libhdfs-devel - hadoop doesn't realy install these things in nice places :(
-  mkdir -p $PREFIX/usr/share/doc/libhdfs-devel/examples
-
-  cp ${BUILD_SRC_DIR}/c++/libhdfs/hdfs.h $PREFIX/usr/include/
-  cp ${BUILD_SRC_DIR}/c++/libhdfs/hdfs_*.c $PREFIX/usr/share/doc/libhdfs-devel/examples
-
-  #    This is somewhat unintuitive, but the -devel package has this symlink (see Debian Library Packaging Guide)
-  ln -sf libhdfs.so.0.0.0 $PREFIX/$SYSTEM_LIB_DIR/libhdfs.so
-  sed -e "s|^libdir='.*'|libdir=\"$SYSTEM_LIB_DIR\"|" \
-      ./build/c++/${NATIVE_BUILD_STRING}/lib/libhdfs.la > $PREFIX/$SYSTEM_LIB_DIR/libhdfs.la
-fi
 
-# XXX Hack to get hadoop to get packaged
-find $PREFIX -name "*.debug" | xargs rm -fv
+# conf
+install -d -m 0755 $HADOOP_ETC_DIR/conf.empty
+
+cp ${BUILD_DIR}/etc/hadoop/* $HADOOP_ETC_DIR/conf.empty
+
+# docs
+install -d -m 0755 ${DOC_DIR}
+cp -r ${BUILD_DIR}/share/doc/* ${DOC_DIR}/
+
+# man pages
+mkdir -p $MAN_DIR/man1
+gzip -c < $DISTRO_DIR/hadoop.1 > $MAN_DIR/man1/hadoop.1.gz
+chmod 644 $MAN_DIR/man1/hadoop.1.gz
+
+# HTTPFS
+install -d -m 0755 ${HTTPFS_DIR}/sbin
+cp ${BUILD_DIR}/sbin/httpfs.sh ${HTTPFS_DIR}/sbin/
+cp -r ${BUILD_DIR}/share/hadoop/httpfs/tomcat/webapps ${HTTPFS_DIR}/
+cp -r ${BUILD_DIR}/share/hadoop/httpfs/tomcat/conf ${HTTPFS_DIR}/
+chmod 644 ${HTTPFS_DIR}/conf/*
+install -d -m 0755 $HTTPFS_ETC_DIR/conf.empty
+mv $HADOOP_ETC_DIR/conf.empty/httpfs* $HTTPFS_ETC_DIR/conf.empty
+
+# Make the pseudo-distributed config
+for conf in conf.pseudo ; do
+  install -d -m 0755 $HADOOP_ETC_DIR/$conf
+  # Overlay the -site files
+  (cd $DISTRO_DIR/$conf && tar -cf - .) | (cd $HADOOP_ETC_DIR/$conf && tar -xf -)
+  chmod -R 0644 $HADOOP_ETC_DIR/$conf/*
+  # When building straight out of svn we have to account for pesky .svn subdirs 
+  rm -rf `find $HADOOP_ETC_DIR/$conf -name .svn -type d` 
+done
+cp ${BUILD_DIR}/etc/hadoop/log4j.properties $HADOOP_ETC_DIR/conf.pseudo
+
+# FIXME: Provide a convenience link for configuration (HADOOP-7939)
+install -d -m 0755 ${HADOOP_DIR}/etc
+ln -s ${HADOOP_ETC_DIR##${PREFIX}}/conf ${HADOOP_DIR}/etc/hadoop
+
+# Create log, var and lib
+install -d -m 0755 $PREFIX/var/{log,run,lib}/hadoop-hdfs
+install -d -m 0755 $PREFIX/var/{log,run,lib}/hadoop-yarn
+install -d -m 0755 $PREFIX/var/{log,run,lib}/hadoop-mapreduce
+
+# Remove all source and test jars and create version-less symlinks to offer integration point with other projects
+for DIR in ${HADOOP_DIR} ${HDFS_DIR} ${YARN_DIR} ${MAPREDUCE_DIR} ${HTTPFS_DIR} ; do
+  (cd $DIR &&
+   rm -fv *-test.jar *-sources.jar
+   rm -fv lib/hadoop-*.jar
+   for j in hadoop-*.jar; do
+     if [[ $j =~ hadoop-(.*)-${HADOOP_VERSION}.jar ]]; then
+       name=${BASH_REMATCH[1]}
+       ln -s $j hadoop-$name.jar
+     fi
+   done)
+done
+
+# Now create a client installation area full of symlinks
+install -d -m 0755 ${CLIENT_DIR}
+for file in `cat ${BUILD_DIR}/hadoop-client.list` ; do
+  for dir in ${HADOOP_DIR}/{lib,} ${HDFS_DIR}/{lib,} ${YARN_DIR}/{lib,} ${MAPREDUCE_DIR}/{lib,} ; do
+    [ -e $dir/$file ] && ln -fs ${dir#$PREFIX}/$file ${CLIENT_DIR}/$file && continue 2
+  done
+  exit 1
+done

Copied: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/mapreduce.conf (from r1325587, incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/mapred.conf)
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/mapreduce.conf?p2=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/mapreduce.conf&p1=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/mapred.conf&r1=1325587&r2=1326670&rev=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/mapred.conf (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/mapreduce.conf Mon Apr 16 16:10:22 2012
@@ -13,5 +13,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-mapred - nofile 32768
-mapred - nproc  65536
+mapred    - nofile 32768
+mapred    - nproc  65536

Copied: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/mapreduce.default (from r1325587, incubator/bigtop/trunk/bigtop-packages/src/common/hbase/do-component-build)
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/mapreduce.default?p2=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/mapreduce.default&p1=incubator/bigtop/trunk/bigtop-packages/src/common/hbase/do-component-build&r1=1325587&r2=1326670&rev=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hbase/do-component-build (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/mapreduce.default Mon Apr 16 16:10:22 2012
@@ -1,4 +1,3 @@
-#!/bin/sh
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.
@@ -14,7 +13,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-set -ex
-
-mvn clean -DskipTests -Dhbase.version=${HBASE_VERSION} -Dhadoop.version=0.20.205.0 install assembly:assembly "$@"
-tar --strip-components=1 -xzf target/hbase*.tar.gz
+# FIXME: MAPREDUCE-3900
+export YARN_IDENT_STRING=mapred
+export YARN_PID_DIR=/var/run/hadoop-mapreduce
+export YARN_LOG_DIR=/var/log/hadoop-mapreduce
+export HADOOP_LOG_DIR=/var/log/hadoop-mapreduce

Copied: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/yarn.conf (from r1325587, incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/mapred.conf)
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/yarn.conf?p2=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/yarn.conf&p1=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/mapred.conf&r1=1325587&r2=1326670&rev=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/mapred.conf (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/yarn.conf Mon Apr 16 16:10:22 2012
@@ -13,5 +13,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-mapred - nofile 32768
-mapred - nproc  65536
+yarn   - nofile 32768
+yarn   - nproc  65536

Copied: incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/yarn.default (from r1325587, incubator/bigtop/trunk/bigtop-packages/src/common/mahout/do-component-build)
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/yarn.default?p2=incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/yarn.default&p1=incubator/bigtop/trunk/bigtop-packages/src/common/mahout/do-component-build&r1=1325587&r2=1326670&rev=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/mahout/do-component-build (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hadoop/yarn.default Mon Apr 16 16:10:22 2012
@@ -1,4 +1,3 @@
-#!/bin/sh
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
 # this work for additional information regarding copyright ownership.
@@ -13,8 +12,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-set -ex
-
-mvn clean install -Dmahout.skip.distribution=false -DskipTests "$@"
-
+export YARN_IDENT_STRING=yarn
+export YARN_PID_DIR=/var/run/hadoop-yarn
+export YARN_LOG_DIR=/var/log/hadoop-yarn
+export YARN_CONF_DIR=/etc/hadoop/conf

Added: incubator/bigtop/trunk/bigtop-packages/src/common/hbase/HBASE-5212.patch
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hbase/HBASE-5212.patch?rev=1326670&view=auto
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hbase/HBASE-5212.patch (added)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hbase/HBASE-5212.patch Mon Apr 16 16:10:22 2012
@@ -0,0 +1,73 @@
+--- pom.xml	2012-01-16 07:03:35.000000000 -0800
++++ pom.xml	2012-02-17 09:46:32.506195389 -0800
+@@ -1788,6 +1788,13 @@
+           <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
+           <version>${hadoop.version}</version>
+           <optional>true</optional>
++          <scope>test</scope>
++        </dependency>
++        <dependency>
++          <groupId>org.apache.hadoop</groupId>
++          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
++          <version>${hadoop.version}</version>
++          <optional>true</optional>
+           <type>test-jar</type>
+           <scope>test</scope>
+         </dependency>
+--- src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java	2012-01-16 07:03:40.000000000 -0800
++++ src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java	2012-02-17 09:46:32.466194915 -0800
+@@ -423,7 +423,7 @@
+     if (os != null) {
+       try {
+         m = os.getWrappedStream().getClass().
+-          getMethod("getNumCurrentReplicas", new Class<?> []{});
++          getDeclaredMethod("getNumCurrentReplicas", new Class<?> []{});
+         m.setAccessible(true);
+       } catch (NoSuchMethodException e) {
+         // Thrown if getNumCurrentReplicas() function isn't available
+@@ -438,7 +438,7 @@
+       LOG.info("Using getNumCurrentReplicas--HDFS-826");
+     } else {
+       LOG.info("getNumCurrentReplicas--HDFS-826 not available; hdfs_out=" +
+-        os + ", exception=" + exception.getMessage());
++        os, exception);
+     }
+     return m;
+   }
+--- src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java	2012-01-16 07:03:36.000000000 -0800
++++ src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java	2012-02-17 09:46:32.446194449 -0800
+@@ -1164,6 +1164,8 @@
+     LOG.info("Mini mapreduce cluster started");
+     c.set("mapred.job.tracker",
+         mrCluster.createJobConf().get("mapred.job.tracker"));
++    /* this for mrv2 support */
++    conf.set("mapreduce.framework.name", "yarn");
+   }
+ 
+   /**
+--- src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java	2012-01-16 07:03:37.000000000 -0800
++++ src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestLogRolling.java	2012-02-17 09:46:32.436195465 -0800
+@@ -335,13 +335,18 @@
+     // We start 3 servers and then stop 2 to avoid a directory naming conflict
+     //  when we stop/start a namenode later, as mentioned in HBASE-5163
+     List<DataNode> existingNodes = dfsCluster.getDataNodes();
+-    dfsCluster
+-        .startDataNodes(TEST_UTIL.getConfiguration(), 3, true, null, null);
+-    for (DataNode dn: existingNodes){
+-      dfsCluster.stopDataNode( dn.dnRegistration.getName() );
++    int numDataNodes = 3;
++    dfsCluster.startDataNodes(TEST_UTIL.getConfiguration(), numDataNodes, true,
++        null, null);
++    List<DataNode> allNodes = dfsCluster.getDataNodes();
++    for (int i = allNodes.size()-1; i >= 0; i--) {
++      if (existingNodes.contains(allNodes.get(i))) {
++        dfsCluster.stopDataNode( i );
++      }
+     }
+ 
+-    assertTrue(
++    assertTrue("DataNodes " + dfsCluster.getDataNodes().size() +
++        " default replication " + fs.getDefaultReplication(),
+       dfsCluster.getDataNodes().size() >= fs.getDefaultReplication() + 1);
+ 
+     writeData(table, 2);

Modified: incubator/bigtop/trunk/bigtop-packages/src/common/hbase/do-component-build
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hbase/do-component-build?rev=1326670&r1=1326669&r2=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hbase/do-component-build (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hbase/do-component-build Mon Apr 16 16:10:22 2012
@@ -16,5 +16,5 @@
 
 set -ex
 
-mvn clean -DskipTests -Dhbase.version=${HBASE_VERSION} -Dhadoop.version=0.20.205.0 install assembly:assembly "$@"
-tar --strip-components=1 -xzf target/hbase*.tar.gz
+mvn clean -DskipTests -Dhadoop.profile=23 -Dhadoop.version=0.23.1 install site assembly:assembly "$@"
+tar --strip-components=2 -xzf target/hbase*.tar.gz

Modified: incubator/bigtop/trunk/bigtop-packages/src/common/hbase/install_hbase.sh
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hbase/install_hbase.sh?rev=1326670&r1=1326669&r2=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hbase/install_hbase.sh (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hbase/install_hbase.sh Mon Apr 16 16:10:22 2012
@@ -110,14 +110,19 @@ install -d -m 0755 $PREFIX/$BIN_DIR
 install -d -m 0755 $PREFIX/$ETC_DIR
 install -d -m 0755 $PREFIX/$MAN_DIR
 
-cp -ra lib/* ${PREFIX}/${LIB_DIR}/lib/
-cp hbase*.jar $PREFIX/$LIB_DIR
-cp -a docs/* $PREFIX/$DOC_DIR
-cp *.txt $PREFIX/$DOC_DIR/
-cp -a hbase-webapps $PREFIX/$LIB_DIR
-
-cp -a conf $PREFIX/$CONF_DIR
-cp -a bin/* $PREFIX/$BIN_DIR
+cp -ra $BUILD_DIR/lib/* ${PREFIX}/${LIB_DIR}/lib/
+cp $BUILD_DIR/hbase*.jar $PREFIX/$LIB_DIR
+cp -a $BUILD_DIR/docs/* $PREFIX/$DOC_DIR
+cp $BUILD_DIR/*.txt $PREFIX/$DOC_DIR/
+cp -a $BUILD_DIR/hbase-webapps $PREFIX/$LIB_DIR
+
+cp -a $BUILD_DIR/conf $PREFIX/$CONF_DIR
+cp -a $BUILD_DIR/bin/* $PREFIX/$BIN_DIR
+# Purge scripts that don't work with packages
+for file in rolling-restart.sh graceful_stop.sh local-regionservers.sh \
+            start-hbase.sh stop-hbase.sh local-master-backup.sh ; do
+  rm -f $PREFIX/$BIN_DIR/$file
+done
 
 ln -s $ETC_DIR/conf $PREFIX/$LIB_DIR/conf
 

Modified: incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build?rev=1326670&r1=1326669&r2=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hive/do-component-build Mon Apr 16 16:10:22 2012
@@ -15,5 +15,4 @@
 # limitations under the License.
 
 set -ex
-
-ant -f src/build.xml -Dhadoop.mirror=http://archive.cloudera.com/hive-deps package "$@"
+ant -f src/build.xml -Dhadoop.version=0.23.0 -Dhadoop.security.version=0.23.0 -Dhadoop.security.version.prefix=0.23 package "$@"

Modified: incubator/bigtop/trunk/bigtop-packages/src/common/hive/hive.sh
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hive/hive.sh?rev=1326670&r1=1326669&r2=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hive/hive.sh (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hive/hive.sh Mon Apr 16 16:10:22 2012
@@ -65,6 +65,7 @@ HIVE_USER="hive"
 HIVE_HOME="`eval echo ~$HIVE_USER`"
 NICENESS="+0"
 TIMEOUT=3
+USER="hive"
 
 [ -f $SYS_FILE ] && . $SYS_FILE
 
@@ -77,8 +78,14 @@ hive_start() {
       exec_env="HADOOP_OPTS=\"-Dhive.log.dir=`dirname $LOG_FILE`\""
     fi
 
+    if [ -x /sbin/runuser ]; then
+      SU="runuser -s /bin/bash $USER"
+    else
+      SU="su -s /bin/sh $USER"
+    fi
+
     log_success_msg "Starting $desc (${NAME}): "
-    start_daemon -u $HIVE_USER -p $PID_FILE -n $NICENESS  /bin/sh -c "cd $HIVE_HOME ; $exec_env nohup \
+     $SU -c "cd $HIVE_HOME ; $exec_env nohup \ 
            $EXE_FILE --service $service_name $PORT \
              > $LOG_FILE 2>&1 < /dev/null & "'echo $! '"> $PID_FILE"
 

Copied: incubator/bigtop/trunk/bigtop-packages/src/common/hive/hive.sh.suse (from r1325587, incubator/bigtop/trunk/bigtop-packages/src/common/hive/hive.sh)
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hive/hive.sh.suse?p2=incubator/bigtop/trunk/bigtop-packages/src/common/hive/hive.sh.suse&p1=incubator/bigtop/trunk/bigtop-packages/src/common/hive/hive.sh&r1=1325587&r2=1326670&rev=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hive/hive.sh (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hive/hive.sh.suse Mon Apr 16 16:10:22 2012
@@ -14,15 +14,13 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-#
-#
-#
+
 # Starts a Hive @HIVE_DAEMON@
 #
 # chkconfig: 345 90 10
 # description: Starts a Hive @HIVE_DAEMON@
 # processname: hive
-# pidfile: /var/run/hive/hive-@HIVE_DAEMON@.pid
+#
 ### BEGIN INIT INFO
 # Provides:          hive-@HIVE_DAEMON@
 # Required-Start:    $syslog $remote_fs
@@ -31,10 +29,10 @@
 # Should-Stop:
 # Default-Start:     3 4 5
 # Default-Stop:      0 1 2 6
-# Short-Description: Starts a Hive @HIVE_DAEMON@
+# Short-Description: Hive @HIVE_DAEMON@
 ### END INIT INFO
 
-source /lib/lsb/init-functions
+. /lib/lsb/init-functions
 
 # Autodetect JAVA_HOME if not defined
 if [ -e /usr/libexec/bigtop-detect-javahome ]; then
@@ -43,92 +41,158 @@ elif [ -e /usr/lib/bigtop-utils/bigtop-d
   . /usr/lib/bigtop-utils/bigtop-detect-javahome
 fi
 
-RETVAL_SUCCESS=0
-
-STATUS_RUNNING=0
-STATUS_DEAD=1
-STATUS_DEAD_AND_LOCK=2
-STATUS_NOT_RUNNING=3
-
-ERROR_PROGRAM_NOT_INSTALLED=5
-ERROR_PROGRAM_NOT_CONFIGURED=6
+# Modelled after $HADOOP_HOME/bin/hadoop-daemon.sh
 
 PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin
 NAME="hive-@HIVE_DAEMON@"
-DESC="Hive @HIVE_DAEMON@ daemon"
 SYS_FILE="/etc/default/${NAME}"
 EXE_FILE="/usr/lib/hive/bin/hive"
 PID_FILE="/var/run/hive/${NAME}.pid"
 LOCKFILE="/var/lock/subsys/${NAME}"
 LOG_FILE="/var/log/hive/${NAME}.log"
 HIVE_USER="hive"
-HIVE_HOME="`eval echo ~$HIVE_USER`"
-NICENESS="+0"
-TIMEOUT=3
+NICENESS="0"
+DESC="Hive daemon"
+ 
+DODTIME=3
+SLAVE_TIMEOUT=300
 
 [ -f $SYS_FILE ] && . $SYS_FILE
 
+hive_die() {
+    echo "$@"
+    exit 1
+}
+hive_is_process_alive() {
+    local pid="$1"
+    kill -0 $pid > /dev/null 2>&1
+}
+hive_check_pidfile() {
+    local pidfile="$1" # IN
+    local pid
+
+    pid=`cat "$pidfile" 2>/dev/null`
+    if [ "$pid" = '' ]; then
+    # The file probably does not exist or is empty. 
+	return 1
+    fi
+    
+    set -- $pid
+    pid="$1"
+
+    hive_is_process_alive $pid
+}
+hive_process_kill() {
+    local pid="$1"    # IN
+    local signal="$2" # IN
+    local second
+
+    kill -$signal $pid 2>/dev/null
+
+    for second in 0 1 2 3 4 5 6 7 8 9 10; do
+      hive_is_process_alive "$pid" || return 0
+      sleep 1
+    done
+
+    return 1
+}
+hive_stop_pidfile() {
+    local pidfile="$1" # IN
+    local pid
+
+    pid=`cat "$pidfile" 2>/dev/null`
+    if [ "$pid" = '' ]; then
+      # The file probably does not exist or is empty. Success
+	return 0
+    fi
+    
+    set -- $pid
+    pid="$1"
+
+    # First try the easy way
+    if hive_process_kill "$pid" 15; then
+	return 0
+    fi
+
+    # Otherwise try the hard way
+    if hive_process_kill "$pid" 9; then
+	return 0
+    fi
+
+    return 1
+}
+
 hive_start() {
+    ERROR_PROGRAM_NOT_INSTALLED=5
+
     [ -x $EXE_FILE ] || exit $ERROR_PROGRAM_NOT_INSTALLED
+    log_success_msg "Starting $desc (${NAME}): "
+
+    if hive_check_pidfile $PID_FILE ; then
+      exit 0
+    fi
 
     service_name="@HIVE_DAEMON@"
     if [ $service_name = "server" ] ; then
       service_name="hiveserver"
       exec_env="HADOOP_OPTS=\"-Dhive.log.dir=`dirname $LOG_FILE`\""
     fi
-
-    log_success_msg "Starting $desc (${NAME}): "
-    start_daemon -u $HIVE_USER -p $PID_FILE -n $NICENESS  /bin/sh -c "cd $HIVE_HOME ; $exec_env nohup \
+    su -s /bin/sh $HIVE_USER \
+       -c "$exec_env nohup nice -n $NICENESS       \
            $EXE_FILE --service $service_name $PORT \
              > $LOG_FILE 2>&1 < /dev/null & "'echo $! '"> $PID_FILE"
+    sleep 3
 
+    hive_check_pidfile $PID_FILE
     RETVAL=$?
-    [ $RETVAL -eq $RETVAL_SUCCESS ] && touch $LOCKFILE
+    echo
+    [ $RETVAL -eq 0 ] && touch $LOCKFILE
     return $RETVAL
 }
-
 hive_stop() {
     log_success_msg "Stopping $desc (${NAME}): "
-    killproc -p $PID_FILE java
-    RETVAL=$?
-
-    [ $RETVAL -eq $RETVAL_SUCCESS ] && rm -f $LOCKFILE $PID_FILE
-    return $RETVAL
+    if [ -f $PID_FILE ]; then
+      hive_stop_pidfile $PID_FILE
+      RETVAL=$?
+      echo
+      [ $RETVAL -eq 0 ] && rm -f $LOCKFILE $PID_FILE
+    fi
 }
-
 hive_restart() {
     hive_stop
-    [ -n "$TIMEOUT" ] && sleep $TIMEOUT
+    [ -n "$DODTIME" ] && sleep $DODTIME
     hive_start
 }
-
 hive_status() {
-    echo -n "Checking for service $desc: "
+    local STATUS_RUNNING=0
+    local STATUS_DEAD=1
+    local STATUS_DEAD_AND_LOCK=2
+    local STATUS_NOT_RUNNING=3
+    local DAEMON="${NAME}"
+
     pidofproc -p $PID_FILE java > /dev/null
     status=$?
 
     case "$status" in
       $STATUS_RUNNING)
-        log_success_msg "@HIVE_DAEMON@ is running"
+        log_success_msg "$DAEMON is running"
         ;;
       $STATUS_DEAD)
-        log_failure_msg "@HIVE_DAEMON@ is dead and pid file exists"
+        log_failure_msg "$DAEMON is dead and pid file exists"
         ;;
       $STATUS_DEAD_AND_LOCK)
-        log_failure_msg "@HIVE_DAEMON@ is dead and lock file exists"
+        log_failure_msg "$DAEMON is dead and lock file exists"
         ;;
       $STATUS_NOT_RUNNING)
-        log_failure_msg "@HIVE_DAEMON@ is not running"
+        log_failure_msg "$DAEMON is not running"
         ;;
       *)
-        log_failure_msg "@HIVE_DAEMON@ status is unknown"
+        log_failure_msg "$DAEMON status is unknown"
         ;;
     esac
     return $status
 }
 
-RETVAL=0
-
 case "$1" in
     start)
       hive_start

Propchange: incubator/bigtop/trunk/bigtop-packages/src/common/hive/hive.sh.suse
------------------------------------------------------------------------------
    svn:eol-style = native

Modified: incubator/bigtop/trunk/bigtop-packages/src/common/hive/install_hive.sh
URL: http://svn.apache.org/viewvc/incubator/bigtop/trunk/bigtop-packages/src/common/hive/install_hive.sh?rev=1326670&r1=1326669&r2=1326670&view=diff
==============================================================================
--- incubator/bigtop/trunk/bigtop-packages/src/common/hive/install_hive.sh (original)
+++ incubator/bigtop/trunk/bigtop-packages/src/common/hive/install_hive.sh Mon Apr 16 16:10:22 2012
@@ -119,7 +119,6 @@ do
   wrapper=$BIN_DIR/$file
   cat >>$wrapper <<EOF
 #!/bin/sh
-. /etc/default/hadoop
 
 # Autodetect JAVA_HOME if not defined
 if [ -e /usr/libexec/bigtop-detect-javahome ]; then
@@ -128,6 +127,9 @@ elif [ -e /usr/lib/bigtop-utils/bigtop-d
   . /usr/lib/bigtop-utils/bigtop-detect-javahome
 fi
 
+#FIXME: workaround for HIVE-2757 (NOTE: HADOOP_HOME is obsolete for Hadoop .23)
+export HADOOP_HOME=\${HADOOP_HOME:-/usr/lib/hadoop}
+
 export HIVE_HOME=$INSTALLED_LIB_DIR
 exec $INSTALLED_LIB_DIR/bin/$file "\$@"
 EOF