You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by bm...@apache.org on 2011/11/01 23:06:15 UTC

svn commit: r1196317 - in /incubator/bigtop/branches/hadoop-0.23: bigtop-packages/src/common/hadoop/do-component-build bigtop-packages/src/common/hadoop/install_hadoop.sh bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec bigtop.mk

Author: bmahe
Date: Tue Nov  1 22:06:15 2011
New Revision: 1196317

URL: http://svn.apache.org/viewvc?rev=1196317&view=rev
Log:
Update Hadoop 23 branch

Modified:
    incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/do-component-build
    incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/install_hadoop.sh
    incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
    incubator/bigtop/branches/hadoop-0.23/bigtop.mk

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/do-component-build
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/do-component-build?rev=1196317&r1=1196316&r2=1196317&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/do-component-build (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/do-component-build Tue Nov  1 22:06:15 2011
@@ -17,38 +17,25 @@
 set -ex
 
 # FIXME: there has to be a better way
-(wget http://protobuf.googlecode.com/files/protobuf-2.4.1.tar.bz2
-tar xjvf protobuf-2.4.1.tar.bz2
-cd protobuf-2.4.1
-./configure --prefix=`pwd`/root
-make install)
+#(wget http://protobuf.googlecode.com/files/protobuf-2.4.1.tar.bz2
+#tar xjvf protobuf-2.4.1.tar.bz2
+#cd protobuf-2.4.1
+#./configure --prefix=`pwd`/root
+#make install)
 
-PATH=`pwd`/protobuf-2.4.1/root/bin:$PATH
-export PATH
+#PATH=`pwd`/protobuf-2.4.1/root/bin:$PATH
+#export PATH
 
-STITCH_FILES="CHANGES.txt LICENSE.txt NOTICE.txt README.txt"
 
 mkdir build
-rm -f $STITCH_FILES
+mkdir build/src
+
+MAVEN_SKIP_TESTS="-DskipTests -DskipTest -DskipITs"
+mvn -Pdist -Pnative -Psrc -Pdocs -Dtar ${MAVEN_SKIP_TESTS} package  "$@"
+mvn install ${MAVEN_SKIP_TESTS} $@
+mvn site site:stage ${MAVEN_SKIP_TESTS} $@
+
+(cd build ; tar --strip-components=1 -xzvf  ../hadoop-dist/target/hadoop-0.23.0-SNAPSHOT.tar.gz)
+(cd build/src ; tar --strip-components=1 -xzvf  ../../hadoop-dist/target/hadoop-dist-0.23.0-SNAPSHOT-src.tar.gz)
+
 
-mvn -Pdist -Pnative -Dtar -DskipTests -DskipITs package  "$@"
-# FIXME: MR should really be part of the above
-(cd hadoop-mapreduce-project ; mvn -Dtar -DskipTests -DskipITs package assembly:single "$@")
-
-(cd build ; tar --strip-components=1 -xzvf  ../hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-0.23.0-SNAPSHOT.tar.gz)
-(cd build ; tar --strip-components=1 -xzvf  ../hadoop-common-project/hadoop-common/target/hadoop-common-0.23.0-SNAPSHOT.tar.gz)
-(cd build ; tar --strip-components=1 -xzvf  ../hadoop-mapreduce-project/target/hadoop-mapreduce-0.23.0-SNAPSHOT-all.tar.gz)
-
-# FIXME: This could be missing MR 
-for file in $STITCH_FILES ; do
-  cat hadoop*project/hadoop*/$file >> $file
-done
-
-# A bunch of workarounds
-cp build/conf/* build/etc/hadoop
-mkdir -p build/contrib/fuse-dfs
-cp build/lib/libhdfs.a build/lib/libhadooppipes.a
-cp build/lib/libhdfs.a build/lib/libhadooputils.a
-rm build/libexec/jsvc
-cp /bin/true build/libexec/jsvc
-cp build/sbin/hadoop-daemon.sh build/bin/hadoop-daemon.sh

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/install_hadoop.sh
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/install_hadoop.sh?rev=1196317&r1=1196316&r2=1196317&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/install_hadoop.sh (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/common/hadoop/install_hadoop.sh Tue Nov  1 22:06:15 2011
@@ -62,8 +62,8 @@ while true ; do
         --distro-dir)
         DISTRO_DIR=$2 ; shift 2
         ;;
-        --lib-dir)
-        LIB_DIR=$2 ; shift 2
+        --hadoop-dir)
+        HADOOP_DIR=$2 ; shift 2
         ;;
         --system-lib-dir)
         SYSTEM_LIB_DIR=$2 ; shift 2
@@ -77,8 +77,11 @@ while true ; do
         --doc-dir)
         DOC_DIR=$2 ; shift 2
         ;;
-        --etc-dir)
-        ETC_DIR=$2 ; shift 2
+        --hadoop-etc-dir)
+        HADOOP_ETC_DIR=$2 ; shift 2
+        ;;
+        --yarn-etc-dir)
+        YARN_ETC_DIR=$2 ; shift 2
         ;;
         --installed-lib-dir)
         INSTALLED_LIB_DIR=$2 ; shift 2
@@ -110,24 +113,20 @@ for var in PREFIX BUILD_DIR; do
   fi
 done
 
-LIB_DIR=${LIB_DIR:-$PREFIX/usr/lib/hadoop}
+HADOOP_DIR=${HADOOP_DIR:-$PREFIX/usr/lib/hadoop}
 SYSTEM_LIB_DIR=${SYSTEM_LIB_DIR:-/usr/lib}
 BIN_DIR=${BIN_DIR:-$PREFIX/usr/bin}
 DOC_DIR=${DOC_DIR:-$PREFIX/usr/share/doc/hadoop}
 MAN_DIR=${MAN_DIR:-$PREFIX/usr/man}
 EXAMPLE_DIR=${EXAMPLE_DIR:-$DOC_DIR/examples}
 SRC_DIR=${SRC_DIR:-$PREFIX/usr/src/hadoop}
-ETC_DIR=${ETC_DIR:-$PREFIX/etc/hadoop}
-
-INSTALLED_LIB_DIR=${INSTALLED_LIB_DIR:-/usr/lib/hadoop}
+HADOOP_ETC_DIR=${HADOOP_ETC_DIR:-$PREFIX/etc/hadoop}
+YARN_ETC_DIR=${YARN_ETC_DIR:-$PREFIX/etc/yarn}
 
-mkdir -p $LIB_DIR
-(cd $BUILD_DIR && tar -cf - .) | (cd $LIB_DIR && tar xf - )
+INSTALLED_HADOOP_DIR=${INSTALLED_HADOOP_DIR:-/usr/lib/hadoop}
 
-# Take out things we've installed elsewhere
-for x in sources conf etc share/doc lib/libhdfs* ; do
-  rm -rf $LIB_DIR/$x 
-done
+HADOOP_BIN_DIR=${HADOOP_DIR}/bin
+HADOOP_SBIN_DIR=${HADOOP_DIR}/bin
 
 # Make bin wrappers
 mkdir -p $BIN_DIR
@@ -137,139 +136,20 @@ for bin_wrapper in hadoop ; do
   cat > $wrapper <<EOF
 #!/bin/sh
 
-export HADOOP_HOME=$INSTALLED_LIB_DIR
-exec $INSTALLED_LIB_DIR/bin/$bin_wrapper "\$@"
+export HADOOP_HOME=$INSTALLED_HADOOP_DIR
+exec $INSTALLED_HADOOP_DIR/bin/$bin_wrapper "\$@"
 EOF
   chmod 755 $wrapper
 done
 
-# Link examples to /usr/share
-mkdir -p $EXAMPLE_DIR
-# FIXME
-#for x in $LIB_DIR/*examples*jar ; do
-#  INSTALL_LOC=`echo $x | sed -e "s,$LIB_DIR,$INSTALLED_LIB_DIR,"`
-#  ln -sf $INSTALL_LOC $EXAMPLE_DIR/
-#done
-# And copy the source
-mkdir -p $EXAMPLE_DIR/src
-cp -a $BUILD_DIR/sources/src/examples/* $EXAMPLE_DIR/src
-
-# Install docs
-mkdir -p $DOC_DIR
-cp -r $BUILD_DIR/share/doc/* $DOC_DIR
-
-# Install source
-mkdir -p ${SRC_DIR}
-rm -f hdfs/src/contrib/fuse-dfs/src/*.o 
-rm -f hdfs/src/contrib/fuse-dfs/src/fuse_dfs
-# rm -rf ${BUILD_SRC_DIR}/contrib/hod
-# rm -f ${SRC_DIR}/contrib/fuse-dfs/fuse_dfs
-
-
-cp -a $BUILD_DIR/sources ${SRC_DIR}/
-
-# Make the empty config
-install -d -m 0755 $ETC_DIR/conf.empty
-(cd ${BUILD_DIR}/etc/hadoop && tar cf - .) | (cd $ETC_DIR/conf.empty && tar xf -)
-# Overlay the -site files
-(cd $DISTRO_DIR/conf.empty && tar --exclude='.svn' -cf - .) | (cd $ETC_DIR/conf.empty && tar -xf -)
-
-# Link the HADOOP_HOME conf, log and pid dir to installed locations
-rm -rf $LIB_DIR/conf
-ln -s ${ETC_DIR#$PREFIX}/conf $LIB_DIR/conf
-mkdir $LIB_DIR/etc
-ln -s ${ETC_DIR#$PREFIX}/conf $LIB_DIR/etc/hadoop
-rm -rf $LIB_DIR/logs
-ln -s /var/log/hadoop $LIB_DIR/logs
-rm -rf $LIB_DIR/pids
-ln -s /var/run/hadoop $LIB_DIR/pids
-
-# Make the pseudo-distributed config
-for conf in conf.pseudo ; do
-  install -d -m 0755 $ETC_DIR/$conf
-  # Install the default configurations
-  (cd ${BUILD_DIR}/conf && tar -cf - .) | (cd $ETC_DIR/$conf && tar -xf -)
-  # Overlay the -site files
-  (cd $DISTRO_DIR/$conf && tar --exclude='.svn' -cf - .) | (cd $ETC_DIR/$conf && tar -xf -)
-done
-
-# man pages
-mkdir -p $MAN_DIR/man1
-gzip -c < $DISTRO_DIR/hadoop.1 > $MAN_DIR/man1/hadoop.1.gz
-
-############################################################
-# ARCH DEPENDENT STUFF
-############################################################
-
-if [ ! -z "$NATIVE_BUILD_STRING" ]; then
-  # Fuse 
-  mkdir -p $LIB_DIR/bin
-  if [ -d $BUILD_DIR/contrib/fuse-dfs ]; then
-    ln -s ../contrib/fuse-dfs/fuse_dfs $LIB_DIR/bin/fuse_dfs
-    gzip -c < $DISTRO_DIR/hadoop-fuse-dfs.1 > $MAN_DIR/man1/hadoop-fuse-dfs.1.gz
-
-    fuse_wrapper=${BIN_DIR}/hadoop-fuse-dfs
-  cat > $fuse_wrapper << EOF
-#!/bin/bash
-
-/sbin/modprobe fuse
-
-export HADOOP_HOME=$INSTALLED_LIB_DIR
-
-if [ -f /etc/default/hadoop-fuse ] 
-  then . /etc/default/hadoop-fuse
-fi
-
-if [ -f \$HADOOP_HOME/bin/hadoop-config.sh ] 
-  then . \$HADOOP_HOME/bin/hadoop-config.sh
-fi
-
-if [ "\${LD_LIBRARY_PATH}" = "" ]; then
-  export LD_LIBRARY_PATH=/usr/lib
-  for f in \`find \${JAVA_HOME}/jre/lib -name client -prune -o -name libjvm.so -exec dirname {} \;\`; do
-    export LD_LIBRARY_PATH=\$f:\${LD_LIBRARY_PATH}
-  done
-fi
+mkdir -p ${HADOOP_BIN_DIR}
+cp -a ${BUILD_DIR}/bin/* ${HADOOP_BIN_DIR}/
 
-for i in \${HADOOP_HOME}/*.jar \${HADOOP_HOME}/lib/*.jar
-  do CLASSPATH+=\$i:
-done
-
-export PATH=\$PATH:\${HADOOP_HOME}/bin/
-
-env CLASSPATH=\$CLASSPATH \${HADOOP_HOME}/bin/fuse_dfs \$@
-EOF
-
-    chmod 755 $fuse_wrapper
-  fi
+mkdir -p ${HADOOP_SBIN_DIR}
+cp ${BUILD_DIR}/sbin/* ${HADOOP_SBIN_DIR}/
 
-  # sbin
-  mkdir -p $LIB_DIR/sbin/${NATIVE_BUILD_STRING}
-  mv $LIB_DIR/libexec/jsvc $LIB_DIR/sbin/${NATIVE_BUILD_STRING}
-
-  # Native compression libs
-  mkdir -p $LIB_DIR/lib/native/${NATIVE_BUILD_STRING}
-  cp ${BUILD_DIR}/lib/lib* $LIB_DIR/lib/native/${NATIVE_BUILD_STRING}
-
-  # Pipes
-  mkdir -p $PREFIX/$SYSTEM_LIB_DIR $PREFIX/usr/include
-  cp ${BUILD_DIR}/lib/libhadooppipes.a ${BUILD_DIR}/lib/libhadooputils.a $PREFIX/$SYSTEM_LIB_DIR
-  cp -r ${BUILD_DIR}/sources/src/c++/pipes/api/hadoop $PREFIX/usr/include/
-  cp -r ${BUILD_DIR}/sources/src/c++/utils/api/hadoop $PREFIX/usr/include/
-
-  # libhdfs
-  cp ${BUILD_DIR}/lib/libhdfs* $PREFIX/$SYSTEM_LIB_DIR
-
-  # libhdfs-devel - hadoop doesn't realy install these things in nice places :(
-  mkdir -p $PREFIX/usr/share/doc/libhdfs-devel/examples
-
-  cp hadoop-hdfs-project/hadoop-hdfs/src/main/native/hdfs.h $PREFIX/usr/include/
-  #cp hdfs/src/c++/libhdfs/hdfs_*.c $PREFIX/usr/share/doc/libhdfs-devel/examples
-
-  #    This is somewhat unintuitive, but the -devel package has this symlink (see Debian Library Packaging Guide)
-  #ln -sf libhdfs.so.0.0.0 $PREFIX/$SYSTEM_LIB_DIR/libhdfs.so
-  sed -ie "s|^libdir='.*'|libdir=\"$SYSTEM_LIB_DIR\"|" $PREFIX/$SYSTEM_LIB_DIR/libhdfs.la
-fi
+install -d -m 0755 $PREFIX/$HADOOP_ETC_DIR/conf.empty
+install -d -m 0755 $PREFIX/$YARN_ETC_DIR/conf.empty
 
-# XXX Hack to get hadoop to get packaged
-find $PREFIX -name "*.debug" | xargs rm -fv
+cp  ${BUILD_DIR}/conf/* $PREFIX/$YARN_ETC_DIR/conf.empty
+cp ${BUILD_DIR}/etc/hadoop/* $PREFIX/$YARN_ETC_DIR/conf.empty

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec?rev=1196317&r1=1196316&r2=1196317&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec Tue Nov  1 22:06:15 2011
@@ -17,6 +17,7 @@
 #
 %define hadoop_name hadoop
 %define etc_hadoop /etc/%{name}
+%define etc_yarn /etc/yarn
 %define config_hadoop %{etc_hadoop}/conf
 %define lib_hadoop_dirname /usr/lib
 %define lib_hadoop %{lib_hadoop_dirname}/%{name}
@@ -292,7 +293,7 @@ DataNodes to bind to a low (privileged) 
 before continuing operation.
 
 %prep
-%setup -n apache-hadoop-common-e141664
+%setup -n apache-hadoop-common-ee19013
 
 %build
 # This assumes that you installed Java JDK 6 and set JAVA_HOME
@@ -318,7 +319,8 @@ bash %{SOURCE2} \
   --src-dir=$RPM_BUILD_ROOT%{src_hadoop} \
   --lib-dir=$RPM_BUILD_ROOT%{lib_hadoop} \
   --system-lib-dir=%{_libdir} \
-  --etc-dir=$RPM_BUILD_ROOT%{etc_hadoop} \
+  --hadoop-etc-dir=$RPM_BUILD_ROOT%{etc_hadoop} \
+  --yarn-etc-dir=$RPM_BUILD_ROOT%{etc_yarn} \
   --prefix=$RPM_BUILD_ROOT \
   --doc-dir=$RPM_BUILD_ROOT%{doc_hadoop} \
   --example-dir=$RPM_BUILD_ROOT%{doc_hadoop}/examples \

Modified: incubator/bigtop/branches/hadoop-0.23/bigtop.mk
URL: http://svn.apache.org/viewvc/incubator/bigtop/branches/hadoop-0.23/bigtop.mk?rev=1196317&r1=1196316&r2=1196317&view=diff
==============================================================================
--- incubator/bigtop/branches/hadoop-0.23/bigtop.mk (original)
+++ incubator/bigtop/branches/hadoop-0.23/bigtop.mk Tue Nov  1 22:06:15 2011
@@ -24,7 +24,7 @@ HADOOP_RELEASE_VERSION=1
 HADOOP_TARBALL_DST=$(HADOOP_NAME)-$(HADOOP_BASE_VERSION).tar.gz
 #HADOOP_TARBALL_SRC=$(HADOOP_TARBALL_DST)
 #HADOOP_SITE=$(APACHE_MIRROR)/hadoop/common/$(HADOOP_NAME)-$(HADOOP_BASE_VERSION)/
-HADOOP_TARBALL_SRC=c606b00
+HADOOP_TARBALL_SRC=ee19013
 HADOOP_SITE=https://github.com/apache/hadoop-common/tarball
 $(eval $(call PACKAGE,hadoop,HADOOP))