You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by se...@apache.org on 2022/09/30 14:27:31 UTC

[bigtop] branch master updated: BIGTOP-3821: Support parent directory configuration for Hadoop rpm build script (#1019)

This is an automated email from the ASF dual-hosted git repository.

sekikn pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/bigtop.git


The following commit(s) were added to refs/heads/master by this push:
     new 498a9c1c BIGTOP-3821: Support parent directory configuration for Hadoop rpm build script (#1019)
498a9c1c is described below

commit 498a9c1c365dc6670e0dac06081e5cb1655ff468
Author: Zhiguo Wu <wu...@apache.org>
AuthorDate: Fri Sep 30 22:27:25 2022 +0800

    BIGTOP-3821: Support parent directory configuration for Hadoop rpm build script (#1019)
---
 .../src/common/hadoop/install_hadoop.sh            | 347 +++++++++++----------
 bigtop-packages/src/deb/hadoop/rules               |  11 +-
 bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec   | 302 +++++++++---------
 3 files changed, 349 insertions(+), 311 deletions(-)

diff --git a/bigtop-packages/src/common/hadoop/install_hadoop.sh b/bigtop-packages/src/common/hadoop/install_hadoop.sh
index ca92a295..0e253761 100755
--- a/bigtop-packages/src/common/hadoop/install_hadoop.sh
+++ b/bigtop-packages/src/common/hadoop/install_hadoop.sh
@@ -25,7 +25,22 @@ usage: $0 <options>
      --prefix=PREFIX             path to install into
 
   Optional options:
-     --native-build-string       eg Linux-amd-64 (optional - no native installed if not set)
+     --doc-dir=DIR               path to install docs into [/usr/share/doc/hadoop]
+     --bin-dir=DIR               path to install bins [/usr/bin]
+     --man-dir=DIR               path to install mans [/usr/share/man]
+     --etc-default=DIR           path to bigtop default dir [/etc/default]
+     --hadoop-dir=DIR            path to install hadoop home [/usr/lib/hadoop]
+     --hdfs-dir=DIR              path to install hdfs home [/usr/lib/hadoop-hdfs]
+     --yarn-dir=DIR              path to install yarn home [/usr/lib/hadoop-yarn]
+     --mapreduce-dir=DIR         path to install mapreduce home [/usr/lib/hadoop-mapreduce]
+     --var-hdfs=DIR              path to install hdfs contents [/var/lib/hadoop-hdfs]
+     --var-yarn=DIR              path to install yarn contents [/var/lib/hadoop-yarn]
+     --var-mapreduce=DIR         path to install mapreduce contents [/var/lib/hadoop-mapreduce]
+     --var-httpfs=DIR            path to install httpfs contents [/var/lib/hadoop-httpfs]
+     --var-kms=DIR               path to install kms contents [/var/lib/hadoop-kms]
+     --system-include-dir=DIR    path to install development headers [/usr/include]
+     --system-lib-dir=DIR        path to install native libraries [/usr/lib]
+     --etc-hadoop=DIR            path to install hadoop conf [/etc/hive]
      ... [ see source for more similar options ]
   "
   exit 1
@@ -34,24 +49,25 @@ usage: $0 <options>
 OPTS=$(getopt \
   -n $0 \
   -o '' \
-  -l 'prefix:' \
   -l 'distro-dir:' \
   -l 'build-dir:' \
-  -l 'native-build-string:' \
-  -l 'installed-lib-dir:' \
+  -l 'prefix:' \
+  -l 'doc-dir:' \
+  -l 'bin-dir:' \
+  -l 'man-dir:' \
+  -l 'etc-default:' \
   -l 'hadoop-dir:' \
   -l 'hdfs-dir:' \
   -l 'yarn-dir:' \
   -l 'mapreduce-dir:' \
-  -l 'client-dir:' \
+  -l 'var-hdfs:' \
+  -l 'var-yarn:' \
+  -l 'var-mapreduce:' \
+  -l 'var-httpfs:' \
+  -l 'var-kms:' \
   -l 'system-include-dir:' \
   -l 'system-lib-dir:' \
-  -l 'system-libexec-dir:' \
-  -l 'hadoop-etc-dir:' \
-  -l 'doc-dir:' \
-  -l 'man-dir:' \
-  -l 'example-dir:' \
-  -l 'apache-branch:' \
+  -l 'etc-hadoop:' \
   -- "$@")
 
 if [ $? != 0 ] ; then
@@ -61,11 +77,26 @@ fi
 eval set -- "$OPTS"
 while true ; do
     case "$1" in
+        --distro-dir)
+        DISTRO_DIR=$2 ; shift 2
+        ;;
+        --build-dir)
+        BUILD_DIR=$2 ; shift 2
+        ;;
         --prefix)
         PREFIX=$2 ; shift 2
         ;;
-        --distro-dir)
-        DISTRO_DIR=$2 ; shift 2
+        --doc-dir)
+        DOC_DIR=$2 ; shift 2
+        ;;
+        --bin-dir)
+        BIN_DIR=$2 ; shift 2
+        ;;
+        --man-dir)
+        MAN_DIR=$2 ; shift 2
+        ;;
+        --etc-default)
+        ETC_DEFAULT=$2 ; shift 2
         ;;
         --hadoop-dir)
         HADOOP_DIR=$2 ; shift 2
@@ -79,38 +110,29 @@ while true ; do
         --mapreduce-dir)
         MAPREDUCE_DIR=$2 ; shift 2
         ;;
-        --client-dir)
-        CLIENT_DIR=$2 ; shift 2
-        ;;
-        --system-include-dir)
-        SYSTEM_INCLUDE_DIR=$2 ; shift 2
-        ;;
-        --system-lib-dir)
-        SYSTEM_LIB_DIR=$2 ; shift 2
-        ;;
-        --system-libexec-dir)
-        SYSTEM_LIBEXEC_DIR=$2 ; shift 2
+        --var-hdfs)
+        VAR_HDFS=$2 ; shift 2
         ;;
-        --build-dir)
-        BUILD_DIR=$2 ; shift 2
+        --var-yarn)
+        VAR_YARN=$2 ; shift 2
         ;;
-        --native-build-string)
-        NATIVE_BUILD_STRING=$2 ; shift 2
+        --var-mapreduce)
+        VAR_MAPREDUCE=$2 ; shift 2
         ;;
-        --doc-dir)
-        DOC_DIR=$2 ; shift 2
+        --var-httpfs)
+        VAR_HTTPFS=$2 ; shift 2
         ;;
-        --hadoop-etc-dir)
-        HADOOP_ETC_DIR=$2 ; shift 2
+        --var-kms)
+        VAR_KMS=$2 ; shift 2
         ;;
-        --installed-lib-dir)
-        INSTALLED_LIB_DIR=$2 ; shift 2
+        --system-include-dir)
+        SYSTEM_INCLUDE_DIR=$2 ; shift 2
         ;;
-        --man-dir)
-        MAN_DIR=$2 ; shift 2
+        --system-lib-dir)
+        SYSTEM_LIB_DIR=$2 ; shift 2
         ;;
-        --example-dir)
-        EXAMPLE_DIR=$2 ; shift 2
+        --etc-hadoop)
+        ETC_HADOOP=$2 ; shift 2
         ;;
         --)
         shift ; break
@@ -130,39 +152,44 @@ for var in PREFIX BUILD_DIR; do
   fi
 done
 
-HADOOP_DIR=${HADOOP_DIR:-$PREFIX/usr/lib/hadoop}
-HDFS_DIR=${HDFS_DIR:-$PREFIX/usr/lib/hadoop-hdfs}
-YARN_DIR=${YARN_DIR:-$PREFIX/usr/lib/hadoop-yarn}
-MAPREDUCE_DIR=${MAPREDUCE_DIR:-$PREFIX/usr/lib/hadoop-mapreduce}
-CLIENT_DIR=${CLIENT_DIR:-$PREFIX/usr/lib/hadoop/client}
+MAN_DIR=${MAN_DIR:-/usr/share/man}/man1
+DOC_DIR=${DOC_DIR:-/usr/share/doc/hadoop}
+BIN_DIR=${BIN_DIR:-/usr/bin}
+ETC_DEFAULT=${ETC_DEFAULT:-/etc/default}
+HADOOP_DIR=${HADOOP_DIR:-/usr/lib/hadoop}
+HDFS_DIR=${HDFS_DIR:-/usr/lib/hadoop-hdfs}
+YARN_DIR=${YARN_DIR:-/usr/lib/hadoop-yarn}
+MAPREDUCE_DIR=${MAPREDUCE_DIR:-/usr/lib/hadoop-mapreduce}
+VAR_HDFS=${VAR_HDFS:-/var/lib/hadoop-hdfs}
+VAR_YARN=${VAR_YARN:-/var/lib/hadoop-hdfs}
+VAR_MAPREDUCE=${VAR_MAPREDUCE:-/var/lib/hadoop-mapreduce}
+VAR_HTTPFS=${VAR_HTTPFS:-/var/lib/hadoop-httpfs}
+VAR_KMS=${VAR_KMS:-/var/lib/hadoop-kms}
+SYSTEM_INCLUDE_DIR=${SYSTEM_INCLUDE_DIR:-/usr/include}
 SYSTEM_LIB_DIR=${SYSTEM_LIB_DIR:-/usr/lib}
-BIN_DIR=${BIN_DIR:-$PREFIX/usr/bin}
-DOC_DIR=${DOC_DIR:-$PREFIX/usr/share/doc/hadoop}
-MAN_DIR=${MAN_DIR:-$PREFIX/usr/man}
-SYSTEM_INCLUDE_DIR=${SYSTEM_INCLUDE_DIR:-$PREFIX/usr/include}
-SYSTEM_LIBEXEC_DIR=${SYSTEM_LIBEXEC_DIR:-$PREFIX/usr/libexec}
-EXAMPLE_DIR=${EXAMPLE_DIR:-$DOC_DIR/examples}
-HADOOP_ETC_DIR=${HADOOP_ETC_DIR:-$PREFIX/etc/hadoop}
-BASH_COMPLETION_DIR=${BASH_COMPLETION_DIR:-$PREFIX/etc/bash_completion.d}
-
-INSTALLED_HADOOP_DIR=${INSTALLED_HADOOP_DIR:-/usr/lib/hadoop}
-HADOOP_NATIVE_LIB_DIR=${HADOOP_DIR}/lib/native
+
+BASH_COMPLETION_DIR=${BASH_COMPLETION_DIR:-/etc/bash_completion.d}
+HADOOP_NATIVE_LIB_DIR=$HADOOP_DIR/lib/native
+
+ETC_HADOOP=${ETC_HADOOP:-/etc/hadoop}
+# No prefix
+NP_ETC_HADOOP=/etc/hadoop
 
 ##Needed for some distros to find ldconfig
 export PATH="/sbin/:$PATH"
 
 # Make bin wrappers
-mkdir -p $BIN_DIR
+mkdir -p $PREFIX/$BIN_DIR
 
-for component in $HADOOP_DIR/bin/hadoop $HDFS_DIR/bin/hdfs $YARN_DIR/bin/yarn $MAPREDUCE_DIR/bin/mapred ; do
-  wrapper=$BIN_DIR/${component#*/bin/}
+for component in $PREFIX/$HADOOP_DIR/bin/hadoop $PREFIX/$HDFS_DIR/bin/hdfs $PREFIX/$YARN_DIR/bin/yarn $PREFIX/$MAPREDUCE_DIR/bin/mapred ; do
+  wrapper=$PREFIX/$BIN_DIR/${component#*/bin/}
   cat > $wrapper <<EOF
 #!/bin/bash
 
 # Autodetect JAVA_HOME if not defined
 . /usr/lib/bigtop-utils/bigtop-detect-javahome
 
-export HADOOP_LIBEXEC_DIR=/${SYSTEM_LIBEXEC_DIR#${PREFIX}}
+export HADOOP_LIBEXEC_DIR=/$HADOOP_DIR/libexec
 
 exec ${component#${PREFIX}} "\$@"
 EOF
@@ -170,92 +197,92 @@ EOF
 done
 
 #libexec
-install -d -m 0755 ${SYSTEM_LIBEXEC_DIR}
-cp -r ${BUILD_DIR}/libexec/* ${SYSTEM_LIBEXEC_DIR}/
-cp ${DISTRO_DIR}/hadoop-layout.sh ${SYSTEM_LIBEXEC_DIR}/
-install -m 0755 ${DISTRO_DIR}/init-hdfs.sh ${SYSTEM_LIBEXEC_DIR}/
-install -m 0755 ${DISTRO_DIR}/init-hcfs.json ${SYSTEM_LIBEXEC_DIR}/
-install -m 0755 ${DISTRO_DIR}/init-hcfs.groovy ${SYSTEM_LIBEXEC_DIR}/
-rm -rf ${SYSTEM_LIBEXEC_DIR}/*.cmd
+install -d -m 0755 $PREFIX/$HADOOP_DIR/libexec
+cp -r ${BUILD_DIR}/libexec/* $PREFIX/$HADOOP_DIR/libexec/
+cp ${DISTRO_DIR}/hadoop-layout.sh $PREFIX/$HADOOP_DIR/libexec/
+install -m 0755 ${DISTRO_DIR}/init-hdfs.sh $PREFIX/$HADOOP_DIR/libexec/
+install -m 0755 ${DISTRO_DIR}/init-hcfs.json $PREFIX/$HADOOP_DIR/libexec/
+install -m 0755 ${DISTRO_DIR}/init-hcfs.groovy $PREFIX/$HADOOP_DIR/libexec/
+rm -rf $PREFIX/$HADOOP_DIR/libexec/*.cmd
 
 # hadoop jar
-install -d -m 0755 ${HADOOP_DIR}
-cp ${BUILD_DIR}/share/hadoop/common/*.jar ${HADOOP_DIR}/
-cp ${BUILD_DIR}/share/hadoop/common/lib/hadoop-auth*.jar ${HADOOP_DIR}/
-cp ${BUILD_DIR}/share/hadoop/common/lib/hadoop-annotations*.jar ${HADOOP_DIR}/
-install -d -m 0755 ${HADOOP_DIR}/tools
-install -d -m 0755 ${MAPREDUCE_DIR}
-cp ${BUILD_DIR}/share/hadoop/mapreduce/hadoop-mapreduce*.jar ${MAPREDUCE_DIR}
-install -d -m 0755 ${HDFS_DIR}
-cp ${BUILD_DIR}/share/hadoop/hdfs/*.jar ${HDFS_DIR}/
-install -d -m 0755 ${YARN_DIR}
-cp ${BUILD_DIR}/share/hadoop/yarn/hadoop-yarn*.jar ${YARN_DIR}/
-install -d -m 0755 ${YARN_DIR}/timelineservice
-cp ${BUILD_DIR}/share/hadoop/yarn/timelineservice/hadoop-yarn*.jar ${YARN_DIR}/timelineservice
-chmod 644 ${HADOOP_DIR}/*.jar ${MAPREDUCE_DIR}/*.jar ${HDFS_DIR}/*.jar ${YARN_DIR}/*.jar
+install -d -m 0755 $PREFIX/$HADOOP_DIR
+cp ${BUILD_DIR}/share/hadoop/common/*.jar $PREFIX/$HADOOP_DIR/
+cp ${BUILD_DIR}/share/hadoop/common/lib/hadoop-auth*.jar $PREFIX/$HADOOP_DIR/
+cp ${BUILD_DIR}/share/hadoop/common/lib/hadoop-annotations*.jar $PREFIX/$HADOOP_DIR/
+install -d -m 0755 $PREFIX/$HADOOP_DIR/tools
+install -d -m 0755 $PREFIX/$MAPREDUCE_DIR
+cp ${BUILD_DIR}/share/hadoop/mapreduce/hadoop-mapreduce*.jar $PREFIX/$MAPREDUCE_DIR
+install -d -m 0755 $PREFIX/$HDFS_DIR
+cp ${BUILD_DIR}/share/hadoop/hdfs/*.jar $PREFIX/$HDFS_DIR/
+install -d -m 0755 $PREFIX/$YARN_DIR
+cp ${BUILD_DIR}/share/hadoop/yarn/hadoop-yarn*.jar $PREFIX/$YARN_DIR/
+install -d -m 0755 $PREFIX/$YARN_DIR/timelineservice
+cp ${BUILD_DIR}/share/hadoop/yarn/timelineservice/hadoop-yarn*.jar $PREFIX/$YARN_DIR/timelineservice
+chmod 644 $PREFIX/$HADOOP_DIR/*.jar $PREFIX/$MAPREDUCE_DIR/*.jar $PREFIX/$HDFS_DIR/*.jar $PREFIX/$YARN_DIR/*.jar
 
 # lib jars
-install -d -m 0755 ${HADOOP_DIR}/lib
-cp ${BUILD_DIR}/share/hadoop/common/lib/*.jar ${HADOOP_DIR}/lib
-install -d -m 0755 ${HADOOP_DIR}/tools/lib
-cp ${BUILD_DIR}/share/hadoop/tools/lib/*.jar ${HADOOP_DIR}/tools/lib
-install -d -m 0755 ${HDFS_DIR}/lib 
-cp ${BUILD_DIR}/share/hadoop/hdfs/lib/*.jar ${HDFS_DIR}/lib
-install -d -m 0755 ${YARN_DIR}/lib
-cp ${BUILD_DIR}/share/hadoop/yarn/lib/*.jar ${YARN_DIR}/lib
-install -d -m 0755 ${YARN_DIR}/timelineservice/lib
-cp ${BUILD_DIR}/share/hadoop/yarn/timelineservice/lib/*.jar ${YARN_DIR}/timelineservice/lib
-chmod 644 ${HADOOP_DIR}/lib/*.jar ${HDFS_DIR}/lib/*.jar ${YARN_DIR}/lib/*.jar ${YARN_DIR}/timelineservice/lib/*.jar
+install -d -m 0755 $PREFIX/$HADOOP_DIR/lib
+cp ${BUILD_DIR}/share/hadoop/common/lib/*.jar $PREFIX/$HADOOP_DIR/lib
+install -d -m 0755 $PREFIX/$HADOOP_DIR/tools/lib
+cp ${BUILD_DIR}/share/hadoop/tools/lib/*.jar $PREFIX/$HADOOP_DIR/tools/lib
+install -d -m 0755 $PREFIX/$HDFS_DIR/lib 
+cp ${BUILD_DIR}/share/hadoop/hdfs/lib/*.jar $PREFIX/$HDFS_DIR/lib
+install -d -m 0755 $PREFIX/$YARN_DIR/lib
+cp ${BUILD_DIR}/share/hadoop/yarn/lib/*.jar $PREFIX/$YARN_DIR/lib
+install -d -m 0755 $PREFIX/$YARN_DIR/timelineservice/lib
+cp ${BUILD_DIR}/share/hadoop/yarn/timelineservice/lib/*.jar $PREFIX/$YARN_DIR/timelineservice/lib
+chmod 644 $PREFIX/$HADOOP_DIR/lib/*.jar $PREFIX/$HDFS_DIR/lib/*.jar $PREFIX/$YARN_DIR/lib/*.jar $PREFIX/$YARN_DIR/timelineservice/lib/*.jar
 
 # Install webapps
-cp -ra ${BUILD_DIR}/share/hadoop/hdfs/webapps ${HDFS_DIR}/
-cp -ra ${BUILD_DIR}/share/hadoop/yarn/webapps ${YARN_DIR}/
+cp -ra ${BUILD_DIR}/share/hadoop/hdfs/webapps $PREFIX/$HDFS_DIR/
+cp -ra ${BUILD_DIR}/share/hadoop/yarn/webapps $PREFIX/$YARN_DIR/
 
 # bin
-install -d -m 0755 ${HADOOP_DIR}/bin
-cp -a ${BUILD_DIR}/bin/{hadoop,fuse_dfs} ${HADOOP_DIR}/bin
-install -d -m 0755 ${HDFS_DIR}/bin
-cp -a ${BUILD_DIR}/bin/hdfs ${HDFS_DIR}/bin
-install -d -m 0755 ${YARN_DIR}/bin
-cp -a ${BUILD_DIR}/bin/{yarn,container-executor} ${YARN_DIR}/bin
-install -d -m 0755 ${MAPREDUCE_DIR}/bin
-cp -a ${BUILD_DIR}/bin/mapred ${MAPREDUCE_DIR}/bin
+install -d -m 0755 $PREFIX/$HADOOP_DIR/bin
+cp -a ${BUILD_DIR}/bin/{hadoop,fuse_dfs} $PREFIX/$HADOOP_DIR/bin
+install -d -m 0755 $PREFIX/$HDFS_DIR/bin
+cp -a ${BUILD_DIR}/bin/hdfs $PREFIX/$HDFS_DIR/bin
+install -d -m 0755 $PREFIX/$YARN_DIR/bin
+cp -a ${BUILD_DIR}/bin/{yarn,container-executor} $PREFIX/$YARN_DIR/bin
+install -d -m 0755 $PREFIX/$MAPREDUCE_DIR/bin
+cp -a ${BUILD_DIR}/bin/mapred $PREFIX/$MAPREDUCE_DIR/bin
 # FIXME: MAPREDUCE-3980
-cp -a ${BUILD_DIR}/bin/mapred ${YARN_DIR}/bin
+cp -a ${BUILD_DIR}/bin/mapred $PREFIX/$YARN_DIR/bin
 
 # sbin
-install -d -m 0755 ${HADOOP_DIR}/sbin
-cp -a ${BUILD_DIR}/sbin/{hadoop-daemon,hadoop-daemons,workers,httpfs,kms}.sh ${HADOOP_DIR}/sbin
-install -d -m 0755 ${HDFS_DIR}/sbin
-cp -a ${BUILD_DIR}/sbin/{distribute-exclude,refresh-namenodes}.sh ${HDFS_DIR}/sbin
-install -d -m 0755 ${YARN_DIR}/sbin
-cp -a ${BUILD_DIR}/sbin/{yarn-daemon,yarn-daemons}.sh ${YARN_DIR}/sbin
-install -d -m 0755 ${MAPREDUCE_DIR}/sbin
-cp -a ${BUILD_DIR}/sbin/mr-jobhistory-daemon.sh ${MAPREDUCE_DIR}/sbin
+install -d -m 0755 $PREFIX/$HADOOP_DIR/sbin
+cp -a ${BUILD_DIR}/sbin/{hadoop-daemon,hadoop-daemons,workers,httpfs,kms}.sh $PREFIX/$HADOOP_DIR/sbin
+install -d -m 0755 $PREFIX/$HDFS_DIR/sbin
+cp -a ${BUILD_DIR}/sbin/{distribute-exclude,refresh-namenodes}.sh $PREFIX/$HDFS_DIR/sbin
+install -d -m 0755 $PREFIX/$YARN_DIR/sbin
+cp -a ${BUILD_DIR}/sbin/{yarn-daemon,yarn-daemons}.sh $PREFIX/$YARN_DIR/sbin
+install -d -m 0755 $PREFIX/$MAPREDUCE_DIR/sbin
+cp -a ${BUILD_DIR}/sbin/mr-jobhistory-daemon.sh $PREFIX/$MAPREDUCE_DIR/sbin
 
 # native libs
-install -d -m 0755 ${SYSTEM_LIB_DIR}
-install -d -m 0755 ${HADOOP_NATIVE_LIB_DIR}
+install -d -m 0755 $PREFIX/$SYSTEM_LIB_DIR
+install -d -m 0755 $PREFIX/$HADOOP_NATIVE_LIB_DIR
 
 for library in libhdfs.so.0.0.0 libhdfspp.so.0.1.0 ; do
-  cp ${BUILD_DIR}/lib/native/${library} ${SYSTEM_LIB_DIR}/
-  ldconfig -vlN ${SYSTEM_LIB_DIR}/${library}
-  ln -s ${library} ${SYSTEM_LIB_DIR}/${library/.so.*/}.so
+  cp ${BUILD_DIR}/lib/native/${library} $PREFIX/$SYSTEM_LIB_DIR/
+  ldconfig -vlN $PREFIX/$SYSTEM_LIB_DIR/${library}
+  ln -s ${library} $PREFIX/$SYSTEM_LIB_DIR/${library/.so.*/}.so
 done
 
-install -d -m 0755 ${SYSTEM_INCLUDE_DIR}
-cp ${BUILD_DIR}/include/hdfs.h ${SYSTEM_INCLUDE_DIR}/
-cp -r ${BUILD_DIR}/include/hdfspp ${SYSTEM_INCLUDE_DIR}/
+install -d -m 0755 $PREFIX/$SYSTEM_INCLUDE_DIR
+cp ${BUILD_DIR}/include/hdfs.h $PREFIX/$SYSTEM_INCLUDE_DIR/
+cp -r ${BUILD_DIR}/include/hdfspp $PREFIX/$SYSTEM_INCLUDE_DIR/
 
-cp ${BUILD_DIR}/lib/native/*.a ${HADOOP_NATIVE_LIB_DIR}/
+cp ${BUILD_DIR}/lib/native/*.a $PREFIX/$HADOOP_NATIVE_LIB_DIR/
 for library in `cd ${BUILD_DIR}/lib/native ; ls libsnappy.so.1.* 2>/dev/null` libhadoop.so.1.0.0 libnativetask.so.1.0.0; do
-  cp ${BUILD_DIR}/lib/native/${library} ${HADOOP_NATIVE_LIB_DIR}/
-  ldconfig -vlN ${HADOOP_NATIVE_LIB_DIR}/${library}
-  ln -s ${library} ${HADOOP_NATIVE_LIB_DIR}/${library/.so.*/}.so
+  cp ${BUILD_DIR}/lib/native/${library} $PREFIX/$HADOOP_NATIVE_LIB_DIR/
+  ldconfig -vlN $PREFIX/$HADOOP_NATIVE_LIB_DIR/${library}
+  ln -s ${library} $PREFIX/$HADOOP_NATIVE_LIB_DIR/${library/.so.*/}.so
 done
 
 # Install fuse wrapper
-fuse_wrapper=${BIN_DIR}/hadoop-fuse-dfs
+fuse_wrapper=$PREFIX/$BIN_DIR/hadoop-fuse-dfs
 cat > $fuse_wrapper << EOF
 #!/bin/bash
 
@@ -264,12 +291,12 @@ cat > $fuse_wrapper << EOF
 # Autodetect JAVA_HOME if not defined
 . /usr/lib/bigtop-utils/bigtop-detect-javahome
 
-export HADOOP_HOME=\${HADOOP_HOME:-${HADOOP_DIR#${PREFIX}}}
+export HADOOP_HOME=\${HADOOP_HOME:-$HADOOP_DIR}
 
-BIGTOP_DEFAULTS_DIR=\${BIGTOP_DEFAULTS_DIR-/etc/default}
+BIGTOP_DEFAULTS_DIR=\${BIGTOP_DEFAULTS_DIR-$ETC_DEFAULT}
 [ -n "\${BIGTOP_DEFAULTS_DIR}" -a -r \${BIGTOP_DEFAULTS_DIR}/hadoop-fuse ] && . \${BIGTOP_DEFAULTS_DIR}/hadoop-fuse
 
-export HADOOP_LIBEXEC_DIR=${SYSTEM_LIBEXEC_DIR#${PREFIX}}
+export HADOOP_LIBEXEC_DIR=$HADOOP_DIR/libexec
 
 if [ "\${LD_LIBRARY_PATH}" = "" ]; then
   export JAVA_NATIVE_LIBS="libjvm.so"
@@ -289,67 +316,71 @@ EOF
 chmod 755 $fuse_wrapper
 
 # Bash tab completion
-install -d -m 0755 $BASH_COMPLETION_DIR
+install -d -m 0755 $PREFIX/$BASH_COMPLETION_DIR
 install -m 0644 \
   hadoop-common-project/hadoop-common/src/contrib/bash-tab-completion/hadoop.sh \
-  $BASH_COMPLETION_DIR/hadoop
+  $PREFIX/$BASH_COMPLETION_DIR/hadoop
 
 # conf
-install -d -m 0755 $HADOOP_ETC_DIR/conf.empty
-cp ${DISTRO_DIR}/conf.empty/mapred-site.xml $HADOOP_ETC_DIR/conf.empty
+install -d -m 0755 $PREFIX/$NP_ETC_HADOOP
+install -d -m 0755 $PREFIX/$ETC_HADOOP/conf.empty
+cp ${DISTRO_DIR}/conf.empty/mapred-site.xml $PREFIX/$ETC_HADOOP/conf.empty
 # disable everything that's definied in hadoop-env.sh
 # so that it can still be used as example, but doesn't affect anything
 # by default
 sed -i -e '/^[^#]/s,^,#,' ${BUILD_DIR}/etc/hadoop/hadoop-env.sh
-cp -r ${BUILD_DIR}/etc/hadoop/* $HADOOP_ETC_DIR/conf.empty
-rm -rf $HADOOP_ETC_DIR/conf.empty/*.cmd
+cp -r ${BUILD_DIR}/etc/hadoop/* $PREFIX/$ETC_HADOOP/conf.empty
+rm -rf $PREFIX/$ETC_HADOOP/conf.empty/*.cmd
 
 # docs
-install -d -m 0755 ${DOC_DIR}
-cp -r ${BUILD_DIR}/share/doc/* ${DOC_DIR}/
+install -d -m 0755 $PREFIX/$DOC_DIR
+cp -r ${BUILD_DIR}/share/doc/* $PREFIX/$DOC_DIR/
 
 # man pages
-mkdir -p $MAN_DIR/man1
+mkdir -p $PREFIX/$MAN_DIR
 for manpage in hadoop hdfs yarn mapred; do
-	gzip -c < $DISTRO_DIR/$manpage.1 > $MAN_DIR/man1/$manpage.1.gz
-	chmod 644 $MAN_DIR/man1/$manpage.1.gz
+	gzip -c < $DISTRO_DIR/$manpage.1 > $PREFIX/$MAN_DIR/$manpage.1.gz
+	chmod 644 $PREFIX/$MAN_DIR/$manpage.1.gz
 done
 
 # HTTPFS
-install -d -m 0755 ${PREFIX}/var/lib/hadoop-httpfs
+install -d -m 0755 ${PREFIX}/${VAR_HTTPFS}
 
 # KMS
-install -d -m 0755 ${PREFIX}/var/lib/hadoop-kms
+install -d -m 0755 ${PREFIX}/${VAR_KMS}
 
 
 for conf in conf.pseudo ; do
-  install -d -m 0755 $HADOOP_ETC_DIR/$conf
+  install -d -m 0755 $PREFIX/$ETC_HADOOP/$conf
   # Install the upstream config files
-  cp -r ${BUILD_DIR}/etc/hadoop/* $HADOOP_ETC_DIR/$conf
+  cp -r ${BUILD_DIR}/etc/hadoop/* $PREFIX/$ETC_HADOOP/$conf
   # Remove the ones that shouldn't be installed
-  rm -rf $HADOOP_ETC_DIR/$conf/*.cmd
+  rm -rf $PREFIX/$ETC_HADOOP/$conf/*.cmd
   # Overlay the -site files
-  (cd $DISTRO_DIR/$conf && tar -cf - .) | (cd $HADOOP_ETC_DIR/$conf && tar -xf -)
-  find $HADOOP_ETC_DIR/$conf/ -type f -print -exec chmod 0644 {} \;
-  find $HADOOP_ETC_DIR/$conf/ -type d -print -exec chmod 0755 {} \;
+  (cd $DISTRO_DIR/$conf && tar -cf - .) | (cd $PREFIX/$ETC_HADOOP/$conf && tar -xf -)
+  find $PREFIX/$ETC_HADOOP/$conf/ -type f -print -exec chmod 0644 {} \;
+  find $PREFIX/$ETC_HADOOP/$conf/ -type d -print -exec chmod 0755 {} \;
   # When building straight out of svn we have to account for pesky .svn subdirs 
-  rm -rf `find $HADOOP_ETC_DIR/$conf -name .svn -type d` 
+  rm -rf `find $PREFIX/$ETC_HADOOP/$conf -name .svn -type d` 
 done
-cp ${BUILD_DIR}/etc/hadoop/log4j.properties $HADOOP_ETC_DIR/conf.pseudo
+cp ${BUILD_DIR}/etc/hadoop/log4j.properties $PREFIX/$ETC_HADOOP/conf.pseudo
 
 # FIXME: Provide a convenience link for configuration (HADOOP-7939)
-install -d -m 0755 ${HADOOP_DIR}/etc
-ln -s ${HADOOP_ETC_DIR##${PREFIX}}/conf ${HADOOP_DIR}/etc/hadoop
-install -d -m 0755 ${YARN_DIR}/etc
-ln -s ${HADOOP_ETC_DIR##${PREFIX}}/conf ${YARN_DIR}/etc/hadoop
+install -d -m 0755 $PREFIX/$HADOOP_DIR/etc
+ln -s $NP_ETC_HADOOP/conf $PREFIX/$HADOOP_DIR/etc/hadoop
+install -d -m 0755 $PREFIX/$YARN_DIR/etc
+ln -s $NP_ETC_HADOOP/conf $PREFIX/$YARN_DIR/etc/hadoop
 
 # Create log, var and lib
-install -d -m 0755 $PREFIX/var/{log,run,lib}/hadoop-hdfs
-install -d -m 0755 $PREFIX/var/{log,run,lib}/hadoop-yarn
-install -d -m 0755 $PREFIX/var/{log,run,lib}/hadoop-mapreduce
+install -d -m 0755 ${PREFIX}/${VAR_HDFS}
+install -d -m 0755 ${PREFIX}/${VAR_YARN}
+install -d -m 0755 ${PREFIX}/${VAR_MAPREDUCE}
+install -d -m 0755 $PREFIX/var/{log,run}/hadoop-hdfs
+install -d -m 0755 $PREFIX/var/{log,run}/hadoop-yarn
+install -d -m 0755 $PREFIX/var/{log,run}/hadoop-mapreduce
 
 # Remove all source and create version-less symlinks to offer integration point with other projects
-for DIR in ${HADOOP_DIR} ${HDFS_DIR} ${YARN_DIR} ${MAPREDUCE_DIR} ; do
+for DIR in $PREFIX/$HADOOP_DIR $PREFIX/$HDFS_DIR $PREFIX/$YARN_DIR $PREFIX/$MAPREDUCE_DIR ; do
   (cd $DIR &&
    rm -fv *-sources.jar
    for j in hadoop-*.jar; do
@@ -361,12 +392,12 @@ for DIR in ${HADOOP_DIR} ${HDFS_DIR} ${YARN_DIR} ${MAPREDUCE_DIR} ; do
 done
 
 # Now create a client installation area full of symlinks
-install -d -m 0755 ${CLIENT_DIR}
+install -d -m 0755 $PREFIX/$HADOOP_DIR/client
 for file in `cat ${BUILD_DIR}/hadoop-client.list` ; do
-  for dir in ${HADOOP_DIR}/{lib,} ${HDFS_DIR}/{lib,} ${YARN_DIR}/{lib,} ${MAPREDUCE_DIR}/{lib,} ; do
+  for dir in $PREFIX/$HADOOP_DIR/{lib,} $PREFIX/$HDFS_DIR/{lib,} $PREFIX/$YARN_DIR/{lib,} $PREFIX/$MAPREDUCE_DIR/{lib,} ; do
     [ -e $dir/$file ] && \
-    ln -fs ${dir#$PREFIX}/$file ${CLIENT_DIR}/${file} && \
-    ln -fs ${dir#$PREFIX}/$file ${CLIENT_DIR}/${file/-[[:digit:]]*/.jar} && \
+    ln -fs ${dir#$PREFIX}/$file $PREFIX/$HADOOP_DIR/client/${file} && \
+    ln -fs ${dir#$PREFIX}/$file $PREFIX/$HADOOP_DIR/client/${file/-[[:digit:]]*/.jar} && \
     continue 2
   done
   exit 1
diff --git a/bigtop-packages/src/deb/hadoop/rules b/bigtop-packages/src/deb/hadoop/rules
index c3b689f9..bc080cdf 100755
--- a/bigtop-packages/src/deb/hadoop/rules
+++ b/bigtop-packages/src/deb/hadoop/rules
@@ -57,13 +57,10 @@ override_dh_auto_install:
 	  --prefix=debian/tmp/ \
 	  --distro-dir=debian \
 	  --build-dir=${CURDIR}/build \
-	  --system-lib-dir=debian/tmp/usr/lib/ \
-	  --system-libexec-dir=debian/tmp/usr/lib/hadoop/libexec/ \
-	  --system-include-dir=debian/tmp/usr/include \
-	  --doc-dir=debian/tmp/usr/share/doc/hadoop-doc \
-          --man-dir=debian/tmp/usr/share/man \
-	  --example-dir=debian/tmp/usr/share/doc/hadoop/examples \
-	  --native-build-string=${native_dir} \
+	  --system-lib-dir=/usr/lib/ \
+	  --system-include-dir=/usr/include \
+	  --doc-dir=/usr/share/doc/hadoop-doc \
+      --man-dir=/usr/share/man \
 	  --installed-lib-dir=/usr/lib/hadoop
 	# Forcing Zookeeper dependency to be on the packaged jar
 	ln -sf /usr/lib/zookeeper/zookeeper.jar debian/tmp/usr/lib/hadoop/lib/zookeeper-[[:digit:]]*.jar
diff --git a/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec b/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
index ca6ec560..09cf20fb 100644
--- a/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
+++ b/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
@@ -23,39 +23,42 @@
 %undefine _auto_set_build_flags
 
 %define hadoop_name hadoop
-%define etc_hadoop /etc/%{name}
-%define etc_yarn /etc/yarn
-%define config_hadoop %{etc_hadoop}/conf
-%define config_yarn %{etc_yarn}/conf
-%define lib_hadoop_dirname /usr/lib
-%define lib_hadoop %{lib_hadoop_dirname}/%{name}
-%define lib_hdfs %{lib_hadoop_dirname}/%{name}-hdfs
-%define lib_yarn %{lib_hadoop_dirname}/%{name}-yarn
-%define lib_mapreduce %{lib_hadoop_dirname}/%{name}-mapreduce
-%define log_hadoop_dirname /var/log
-%define log_hadoop %{log_hadoop_dirname}/%{name}
-%define log_yarn %{log_hadoop_dirname}/%{name}-yarn
-%define log_hdfs %{log_hadoop_dirname}/%{name}-hdfs
-%define log_httpfs %{log_hadoop_dirname}/%{name}-httpfs
-%define log_kms %{log_hadoop_dirname}/%{name}-kms
-%define log_mapreduce %{log_hadoop_dirname}/%{name}-mapreduce
-%define run_hadoop_dirname /var/run
-%define run_hadoop %{run_hadoop_dirname}/hadoop
-%define run_yarn %{run_hadoop_dirname}/%{name}-yarn
-%define run_hdfs %{run_hadoop_dirname}/%{name}-hdfs
-%define run_httpfs %{run_hadoop_dirname}/%{name}-httpfs
-%define run_kms %{run_hadoop_dirname}/%{name}-kms
-%define run_mapreduce %{run_hadoop_dirname}/%{name}-mapreduce
-%define state_hadoop_dirname /var/lib
-%define state_hadoop %{state_hadoop_dirname}/hadoop
-%define state_yarn %{state_hadoop_dirname}/%{name}-yarn
-%define state_hdfs %{state_hadoop_dirname}/%{name}-hdfs
-%define state_mapreduce %{state_hadoop_dirname}/%{name}-mapreduce
-%define state_httpfs %{state_hadoop_dirname}/%{name}-httpfs
-%define state_kms %{state_hadoop_dirname}/%{name}-kms
-%define bin_hadoop %{_bindir}
-%define man_hadoop %{_mandir}
-%define doc_hadoop %{_docdir}/%{name}-%{hadoop_version}
+
+%define etc_default %{parent_dir}/etc/default
+
+%define usr_lib_hadoop %{parent_dir}/usr/lib/%{hadoop_name}
+%define usr_lib_hdfs %{parent_dir}/usr/lib/%{hadoop_name}-hdfs
+%define usr_lib_yarn %{parent_dir}/usr/lib/%{hadoop_name}-yarn
+%define usr_lib_mapreduce %{parent_dir}/usr/lib/%{hadoop_name}-mapreduce
+%define var_lib_yarn %{parent_dir}/var/lib/%{hadoop_name}-yarn
+%define var_lib_hdfs %{parent_dir}/var/lib/%{hadoop_name}-hdfs
+%define var_lib_mapreduce %{parent_dir}/var/lib/%{hadoop_name}-mapreduce
+%define var_lib_httpfs %{parent_dir}/var/lib/%{hadoop_name}-httpfs
+%define var_lib_kms %{parent_dir}/var/lib/%{hadoop_name}-kms
+%define etc_hadoop %{parent_dir}/etc/%{hadoop_name}
+
+%define usr_lib_zookeeper %{parent_dir}/usr/lib/zookeeper
+
+%define bin_dir %{parent_dir}/%{_bindir}
+%define man_dir %{parent_dir}/%{_mandir}
+%define doc_dir %{parent_dir}/%{_docdir}
+%define include_dir %{parent_dir}/%{_includedir}
+%define lib_dir %{parent_dir}/%{_libdir}
+%define doc_hadoop %{doc_dir}/%{name}-%{hadoop_version}
+
+# No prefix directory
+%define np_var_log_yarn /var/log/%{hadoop_name}-yarn
+%define np_var_log_hdfs /var/log/%{hadoop_name}-hdfs
+%define np_var_log_httpfs /var/log/%{hadoop_name}-httpfs
+%define np_var_log_kms /var/log/%{hadoop_name}-kms
+%define np_var_log_mapreduce /var/log/%{hadoop_name}-mapreduce
+%define np_var_run_yarn /var/run/%{hadoop_name}-yarn
+%define np_var_run_hdfs /var/run/%{hadoop_name}-hdfs
+%define np_var_run_httpfs /var/run/%{hadoop_name}-httpfs
+%define np_var_run_kms /var/run/%{hadoop_name}-kms
+%define np_var_run_mapreduce /var/run/%{hadoop_name}-mapreduce
+%define np_etc_hadoop /etc/%{hadoop_name}
+
 %define httpfs_services httpfs
 %define kms_services kms
 %define mapreduce_services mapreduce-historyserver
@@ -65,7 +68,6 @@
 # Hadoop outputs built binaries into %{hadoop_build}
 %define hadoop_build_path build
 %define static_images_dir src/webapps/static/images
-%define libexecdir /usr/lib
 
 %ifarch i386
 %global hadoop_arch Linux-i386-32
@@ -91,7 +93,7 @@
     %{nil}
 
 %define netcat_package nc
-%define doc_hadoop %{_docdir}/%{name}-%{hadoop_version}
+%define doc_hadoop %{doc_dir}/%{name}-%{hadoop_version}
 %define alternatives_cmd alternatives
 %global initd_dir %{_sysconfdir}/rc.d/init.d
 %endif
@@ -111,14 +113,14 @@
     %{nil}
 
 %define netcat_package netcat-openbsd
-%define doc_hadoop %{_docdir}/%{name}
+%define doc_hadoop %{doc_dir}/%{name}
 %define alternatives_cmd update-alternatives
 %global initd_dir %{_sysconfdir}/rc.d
 %endif
 
 %if  0%{?mgaversion}
 %define netcat_package netcat-openbsd
-%define doc_hadoop %{_docdir}/%{name}-%{hadoop_version}
+%define doc_hadoop %{doc_dir}/%{name}-%{hadoop_version}
 %define alternatives_cmd update-alternatives
 %global initd_dir %{_sysconfdir}/rc.d/init.d
 %endif
@@ -543,43 +545,50 @@ bash %{SOURCE1}
 %install
 %__rm -rf $RPM_BUILD_ROOT
 
-%__install -d -m 0755 $RPM_BUILD_ROOT/%{lib_hadoop}
+%__install -d -m 0755 $RPM_BUILD_ROOT/%{usr_lib_hadoop}
 
 env HADOOP_VERSION=%{hadoop_base_version} bash %{SOURCE2} \
   --distro-dir=$RPM_SOURCE_DIR \
   --build-dir=$PWD/build \
-  --system-include-dir=$RPM_BUILD_ROOT%{_includedir} \
-  --system-lib-dir=$RPM_BUILD_ROOT%{_libdir} \
-  --system-libexec-dir=$RPM_BUILD_ROOT/%{lib_hadoop}/libexec \
-  --hadoop-etc-dir=$RPM_BUILD_ROOT%{etc_hadoop} \
   --prefix=$RPM_BUILD_ROOT \
-  --doc-dir=$RPM_BUILD_ROOT%{doc_hadoop} \
-  --example-dir=$RPM_BUILD_ROOT%{doc_hadoop}/examples \
-  --native-build-string=%{hadoop_arch} \
-  --installed-lib-dir=%{lib_hadoop} \
-  --man-dir=$RPM_BUILD_ROOT%{man_hadoop} \
+  --doc-dir=%{doc_hadoop} \
+  --bin-dir=%{bin_dir} \
+  --man-dir=%{man_dir} \
+  --etc-default=%{etc_default} \
+  --hadoop-dir=%{usr_lib_hadoop} \
+  --hdfs-dir=%{usr_lib_hdfs} \
+  --yarn-dir=%{usr_lib_yarn} \
+  --mapreduce-dir=%{usr_lib_mapreduce} \
+  --var-hdfs=%{var_lib_hdfs} \
+  --var-yarn=%{var_lib_uarn} \
+  --var-mapreduce=%{var_lib_mapreduce} \
+  --var-httpfs=%{var_lib_httpfs} \
+  --var-kms=%{var_lib_kms} \
+  --system-include-dir=%{include_dir} \
+  --system-lib-dir=%{lib_dir} \
+  --etc-hadoop=%{etc_hadoop}
 
 # Forcing Zookeeper dependency to be on the packaged jar
-%__ln_s -f /usr/lib/zookeeper/zookeeper.jar $RPM_BUILD_ROOT/%{lib_hadoop}/lib/zookeeper-[[:digit:]]*.jar
+%__ln_s -f %{usr_lib_zookeeper}/zookeeper.jar $RPM_BUILD_ROOT/%{usr_lib_hadoop}/lib/zookeeper-[[:digit:]]*.jar
 # Workaround for BIGTOP-583
-%__rm -f $RPM_BUILD_ROOT/%{lib_hadoop}-*/lib/slf4j-log4j12-*.jar
+%__rm -f $RPM_BUILD_ROOT/%{usr_lib_hadoop}-*/lib/slf4j-log4j12-*.jar
 
 # Init.d scripts
 %__install -d -m 0755 $RPM_BUILD_ROOT/%{initd_dir}/
 
 # Install top level /etc/default files
-%__install -d -m 0755 $RPM_BUILD_ROOT/etc/default
-%__cp $RPM_SOURCE_DIR/hadoop.default $RPM_BUILD_ROOT/etc/default/hadoop
+%__install -d -m 0755 $RPM_BUILD_ROOT/%{etc_default}
+%__cp $RPM_SOURCE_DIR/hadoop.default $RPM_BUILD_ROOT/%{etc_default}/hadoop
 # FIXME: BIGTOP-463
-echo 'export JSVC_HOME=%{libexecdir}/bigtop-utils' >> $RPM_BUILD_ROOT/etc/default/hadoop
-%__cp $RPM_SOURCE_DIR/%{name}-fuse.default $RPM_BUILD_ROOT/etc/default/%{name}-fuse
+echo 'export JSVC_HOME=/usr/lib/bigtop-utils' >> $RPM_BUILD_ROOT/%{etc_default}/hadoop
+%__cp $RPM_SOURCE_DIR/%{name}-fuse.default $RPM_BUILD_ROOT/%{etc_default}/%{name}-fuse
 
 # Generate the init.d scripts
 for service in %{hadoop_services}
 do
        bash %{SOURCE11} $RPM_SOURCE_DIR/%{name}-${service}.svc rpm $RPM_BUILD_ROOT/%{initd_dir}/%{name}-${service}
-       cp $RPM_SOURCE_DIR/${service/-*/}.default $RPM_BUILD_ROOT/etc/default/%{name}-${service}
-       chmod 644 $RPM_BUILD_ROOT/etc/default/%{name}-${service}
+       cp $RPM_SOURCE_DIR/${service/-*/}.default $RPM_BUILD_ROOT/%{etc_default}/%{name}-${service}
+       chmod 644 $RPM_BUILD_ROOT/%{etc_default}/%{name}-${service}
 done
 
 # Install security limits
@@ -589,53 +598,53 @@ done
 %__install -m 0644 %{SOURCE10} $RPM_BUILD_ROOT/etc/security/limits.d/mapreduce.conf
 
 # Install fuse default file
-%__install -d -m 0755 $RPM_BUILD_ROOT/etc/default
-%__cp %{SOURCE4} $RPM_BUILD_ROOT/etc/default/hadoop-fuse
+%__install -d -m 0755 $RPM_BUILD_ROOT/%{etc_default}
+%__cp %{SOURCE4} $RPM_BUILD_ROOT/%{etc_default}/hadoop-fuse
 
 # /var/lib/*/cache
-%__install -d -m 1777 $RPM_BUILD_ROOT/%{state_yarn}/cache
-%__install -d -m 1777 $RPM_BUILD_ROOT/%{state_hdfs}/cache
-%__install -d -m 1777 $RPM_BUILD_ROOT/%{state_mapreduce}/cache
+%__install -d -m 1777 $RPM_BUILD_ROOT/%{var_lib_yarn}/cache
+%__install -d -m 1777 $RPM_BUILD_ROOT/%{var_lib_hdfs}/cache
+%__install -d -m 1777 $RPM_BUILD_ROOT/%{var_lib_mapreduce}/cache
 # /var/log/*
-%__install -d -m 0755 $RPM_BUILD_ROOT/%{log_yarn}
-%__install -d -m 0755 $RPM_BUILD_ROOT/%{log_hdfs}
-%__install -d -m 0755 $RPM_BUILD_ROOT/%{log_mapreduce}
-%__install -d -m 0755 $RPM_BUILD_ROOT/%{log_httpfs}
-%__install -d -m 0755 $RPM_BUILD_ROOT/%{log_kms}
+%__install -d -m 0755 $RPM_BUILD_ROOT/%{np_var_log_yarn}
+%__install -d -m 0755 $RPM_BUILD_ROOT/%{np_var_log_hdfs}
+%__install -d -m 0755 $RPM_BUILD_ROOT/%{np_var_log_mapreduce}
+%__install -d -m 0755 $RPM_BUILD_ROOT/%{np_var_log_httpfs}
+%__install -d -m 0755 $RPM_BUILD_ROOT/%{np_var_log_kms}
 # /var/run/*
-%__install -d -m 0755 $RPM_BUILD_ROOT/%{run_yarn}
-%__install -d -m 0755 $RPM_BUILD_ROOT/%{run_hdfs}
-%__install -d -m 0755 $RPM_BUILD_ROOT/%{run_mapreduce}
-%__install -d -m 0755 $RPM_BUILD_ROOT/%{run_httpfs}
-%__install -d -m 0755 $RPM_BUILD_ROOT/%{run_kms}
+%__install -d -m 0755 $RPM_BUILD_ROOT/%{np_var_run_yarn}
+%__install -d -m 0755 $RPM_BUILD_ROOT/%{np_var_run_hdfs}
+%__install -d -m 0755 $RPM_BUILD_ROOT/%{np_var_run_mapreduce}
+%__install -d -m 0755 $RPM_BUILD_ROOT/%{np_var_run_httpfs}
+%__install -d -m 0755 $RPM_BUILD_ROOT/%{np_var_run_kms}
 
-%__install -d -m 1777 $RPM_BUILD_ROOT/%{lib_hadoop}/logs
+%__install -d -m 1777 $RPM_BUILD_ROOT/%{usr_lib_hadoop}/logs
 
 %pre
 getent group hadoop >/dev/null || groupadd -r hadoop
 
 %pre hdfs
-getent group hdfs >/dev/null   || groupadd -r hdfs
-getent passwd hdfs >/dev/null || /usr/sbin/useradd --comment "Hadoop HDFS" --shell /bin/bash -M -r -g hdfs -G hadoop --home %{state_hdfs} hdfs
+getent group hdfs >/dev/null  || groupadd -r hdfs
+getent passwd hdfs >/dev/null || /usr/sbin/useradd --comment "Hadoop HDFS" --shell /bin/bash -M -r -g hdfs -G hadoop --home %{var_lib_hdfs} hdfs
 
 %pre httpfs
-getent group httpfs >/dev/null   || groupadd -r httpfs
-getent passwd httpfs >/dev/null || /usr/sbin/useradd --comment "Hadoop HTTPFS" --shell /bin/bash -M -r -g httpfs -G httpfs --home %{state_httpfs} httpfs
+getent group httpfs >/dev/null  || groupadd -r httpfs
+getent passwd httpfs >/dev/null || /usr/sbin/useradd --comment "Hadoop HTTPFS" --shell /bin/bash -M -r -g httpfs -G httpfs --home %{var_lib_httpfs} httpfs
 
 %pre kms
-getent group kms >/dev/null   || groupadd -r kms
-getent passwd kms >/dev/null || /usr/sbin/useradd --comment "Hadoop KMS" --shell /bin/bash -M -r -g kms -G kms --home %{state_kms} kms
+getent group kms >/dev/null  || groupadd -r kms
+getent passwd kms >/dev/null || /usr/sbin/useradd --comment "Hadoop KMS" --shell /bin/bash -M -r -g kms -G kms --home %{var_lib_kms} kms
 
 %pre yarn
-getent group yarn >/dev/null   || groupadd -r yarn
-getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Hadoop Yarn" --shell /bin/bash -M -r -g yarn -G hadoop --home %{state_yarn} yarn
+getent group yarn >/dev/null  || groupadd -r yarn
+getent passwd yarn >/dev/null || /usr/sbin/useradd --comment "Hadoop Yarn" --shell /bin/bash -M -r -g yarn -G hadoop --home %{var_lib_yarn} yarn
 
 %pre mapreduce
-getent group mapred >/dev/null   || groupadd -r mapred
-getent passwd mapred >/dev/null || /usr/sbin/useradd --comment "Hadoop MapReduce" --shell /bin/bash -M -r -g mapred -G hadoop --home %{state_mapreduce} mapred
+getent group mapred >/dev/null  || groupadd -r mapred
+getent passwd mapred >/dev/null || /usr/sbin/useradd --comment "Hadoop MapReduce" --shell /bin/bash -M -r -g mapred -G hadoop --home %{var_lib_mapreduce} mapred
 
 %post
-%{alternatives_cmd} --install %{config_hadoop} %{name}-conf %{etc_hadoop}/conf.empty 10
+%{alternatives_cmd} --install %{np_etc_hadoop}/conf %{name}-conf %{etc_hadoop}/conf.empty 10
 
 %post httpfs
 chkconfig --add %{name}-httpfs
@@ -677,29 +686,29 @@ fi
 %config(noreplace) %{etc_hadoop}/conf.empty/capacity-scheduler.xml
 %config(noreplace) %{etc_hadoop}/conf.empty/container-executor.cfg
 %config(noreplace) /etc/security/limits.d/yarn.conf
-%{lib_hadoop}/libexec/yarn-config.sh
-%{lib_yarn}
-%attr(4754,root,yarn) %{lib_yarn}/bin/container-executor
-%{bin_hadoop}/yarn
-%attr(0775,yarn,hadoop) %{run_yarn}
-%attr(0775,yarn,hadoop) %{log_yarn}
-%attr(0755,yarn,hadoop) %{state_yarn}
-%attr(1777,yarn,hadoop) %{state_yarn}/cache
+%{usr_lib_hadoop}/libexec/yarn-config.sh
+%{usr_lib_yarn}
+%attr(4754,root,yarn) %{usr_lib_yarn}/bin/container-executor
+%{bin_dir}/yarn
+%attr(0775,yarn,hadoop) %{np_var_run_yarn}
+%attr(0775,yarn,hadoop) %{np_var_log_yarn}
+%attr(0755,yarn,hadoop) %{var_lib_yarn}
+%attr(1777,yarn,hadoop) %{var_lib_yarn}/cache
 
 %files hdfs
 %defattr(-,root,root)
 %config(noreplace) %{etc_hadoop}/conf.empty/hdfs-site.xml
 %config(noreplace) /etc/security/limits.d/hdfs.conf
-%{lib_hdfs}
-%{lib_hadoop}/libexec/hdfs-config.sh
-%{bin_hadoop}/hdfs
-%attr(0775,hdfs,hadoop) %{run_hdfs}
-%attr(0775,hdfs,hadoop) %{log_hdfs}
-%attr(0755,hdfs,hadoop) %{state_hdfs}
-%attr(1777,hdfs,hadoop) %{state_hdfs}/cache
-%{lib_hadoop}/libexec/init-hdfs.sh
-%{lib_hadoop}/libexec/init-hcfs.json
-%{lib_hadoop}/libexec/init-hcfs.groovy
+%{usr_lib_hdfs}
+%{usr_lib_hadoop}/libexec/hdfs-config.sh
+%{bin_dir}/hdfs
+%attr(0775,hdfs,hadoop) %{np_var_run_hdfs}
+%attr(0775,hdfs,hadoop) %{np_var_log_hdfs}
+%attr(0755,hdfs,hadoop) %{var_lib_hdfs}
+%attr(1777,hdfs,hadoop) %{var_lib_hdfs}/cache
+%{usr_lib_hadoop}/libexec/init-hdfs.sh
+%{usr_lib_hadoop}/libexec/init-hcfs.json
+%{usr_lib_hadoop}/libexec/init-hcfs.groovy
 
 %files mapreduce
 %defattr(-,root,root)
@@ -707,13 +716,13 @@ fi
 %config(noreplace) %{etc_hadoop}/conf.empty/mapred-env.sh
 %config(noreplace) %{etc_hadoop}/conf.empty/mapred-queues.xml.template
 %config(noreplace) /etc/security/limits.d/mapreduce.conf
-%{lib_mapreduce}
-%{lib_hadoop}/libexec/mapred-config.sh
-%{bin_hadoop}/mapred
-%attr(0775,mapred,hadoop) %{run_mapreduce}
-%attr(0775,mapred,hadoop) %{log_mapreduce}
-%attr(0775,mapred,hadoop) %{state_mapreduce}
-%attr(1777,mapred,hadoop) %{state_mapreduce}/cache
+%{usr_lib_mapreduce}
+%{usr_lib_hadoop}/libexec/mapred-config.sh
+%{bin_dir}/mapred
+%attr(0775,mapred,hadoop) %{np_var_run_mapreduce}
+%attr(0775,mapred,hadoop) %{np_var_log_mapreduce}
+%attr(0775,mapred,hadoop) %{var_lib_mapreduce}
+%attr(1777,mapred,hadoop) %{var_lib_mapreduce}/cache
 
 
 %files
@@ -727,29 +736,30 @@ fi
 %config(noreplace) %{etc_hadoop}/conf.empty/configuration.xsl
 %config(noreplace) %{etc_hadoop}/conf.empty/hadoop-env.sh
 %config(noreplace) %{etc_hadoop}/conf.empty/hadoop-policy.xml
-%config(noreplace) /etc/default/hadoop
+%config(noreplace) %{etc_default}/hadoop
+%dir %{np_etc_hadoop}
 /etc/bash_completion.d/hadoop
-%{lib_hadoop}/*.jar
-%{lib_hadoop}/lib
-%{lib_hadoop}/sbin
-%{lib_hadoop}/bin
-%{lib_hadoop}/etc
-%{lib_hadoop}/logs
-%{lib_hadoop}/tools
-%{lib_hadoop}/libexec/hadoop-config.sh
-%{lib_hadoop}/libexec/hadoop-layout.sh
-%{lib_hadoop}/libexec/hadoop-functions.sh
-%{lib_hadoop}/libexec/shellprofile.d
-%{lib_hadoop}/libexec/tools
-%{bin_hadoop}/hadoop
-%{man_hadoop}/man1/hadoop.1.*
-%{man_hadoop}/man1/yarn.1.*
-%{man_hadoop}/man1/hdfs.1.*
-%{man_hadoop}/man1/mapred.1.*
-%attr(1777,hdfs,hadoop) %{lib_hadoop}/logs
+%{usr_lib_hadoop}/*.jar
+%{usr_lib_hadoop}/lib
+%{usr_lib_hadoop}/sbin
+%{usr_lib_hadoop}/bin
+%{usr_lib_hadoop}/etc
+%{usr_lib_hadoop}/logs
+%{usr_lib_hadoop}/tools
+%{usr_lib_hadoop}/libexec/hadoop-config.sh
+%{usr_lib_hadoop}/libexec/hadoop-layout.sh
+%{usr_lib_hadoop}/libexec/hadoop-functions.sh
+%{usr_lib_hadoop}/libexec/shellprofile.d
+%{usr_lib_hadoop}/libexec/tools
+%{bin_dir}/hadoop
+%{man_dir}/man1/hadoop.1.*
+%{man_dir}/man1/yarn.1.*
+%{man_dir}/man1/hdfs.1.*
+%{man_dir}/man1/mapred.1.*
+%attr(1777,hdfs,hadoop) %{usr_lib_hadoop}/logs
 
 # Shouldn't the following be moved to hadoop-hdfs?
-%exclude %{lib_hadoop}/bin/fuse_dfs
+%exclude %{usr_lib_hadoop}/bin/fuse_dfs
 
 %files doc
 %defattr(-,root,root)
@@ -758,14 +768,14 @@ fi
 %files httpfs
 %defattr(-,root,root)
 
-%config(noreplace) /etc/default/%{name}-httpfs
+%config(noreplace) %{etc_default}/%{name}-httpfs
 %config(noreplace) %{etc_hadoop}/conf.empty/httpfs-env.sh
 %config(noreplace) %{etc_hadoop}/conf.empty/httpfs-log4j.properties
 %config(noreplace) %{etc_hadoop}/conf.empty/httpfs-site.xml
 %{initd_dir}/%{name}-httpfs
-%attr(0775,httpfs,httpfs) %{run_httpfs}
-%attr(0775,httpfs,httpfs) %{log_httpfs}
-%attr(0775,httpfs,httpfs) %{state_httpfs}
+%attr(0775,httpfs,httpfs) %{np_var_run_httpfs}
+%attr(0775,httpfs,httpfs) %{np_var_log_httpfs}
+%attr(0775,httpfs,httpfs) %{var_lib_httpfs}
 
 %files kms
 %defattr(-,root,root)
@@ -773,18 +783,18 @@ fi
 %config(noreplace) %{etc_hadoop}/conf.empty/kms-env.sh
 %config(noreplace) %{etc_hadoop}/conf.empty/kms-log4j.properties
 %config(noreplace) %{etc_hadoop}/conf.empty/kms-site.xml
-%config(noreplace) /etc/default/%{name}-kms
+%config(noreplace) %{etc_default}/%{name}-kms
 %{initd_dir}/%{name}-kms
-%attr(0775,kms,kms) %{run_kms}
-%attr(0775,kms,kms) %{log_kms}
-%attr(0775,kms,kms) %{state_kms}
+%attr(0775,kms,kms) %{np_var_run_kms}
+%attr(0775,kms,kms) %{np_var_log_kms}
+%attr(0775,kms,kms) %{var_lib_kms}
 
 # Service file management RPMs
 %define service_macro() \
 %files %1 \
 %defattr(-,root,root) \
 %{initd_dir}/%{name}-%1 \
-%config(noreplace) /etc/default/%{name}-%1 \
+%config(noreplace) %{etc_default}/%{name}-%1 \
 %post %1 \
 chkconfig --add %{name}-%1 \
 \
@@ -813,7 +823,7 @@ fi
 
 # Pseudo-distributed Hadoop installation
 %post conf-pseudo
-%{alternatives_cmd} --install %{config_hadoop} %{name}-conf %{etc_hadoop}/conf.pseudo 30
+%{alternatives_cmd} --install %{np_etc_hadoop}/conf %{name}-conf %{etc_hadoop}/conf.pseudo 30
 
 %preun conf-pseudo
 if [ "$1" = 0 ]; then
@@ -826,25 +836,25 @@ fi
 
 %files client
 %defattr(-,root,root)
-%{lib_hadoop}/client
+%{usr_lib_hadoop}/client
 
 %files libhdfs
 %defattr(-,root,root)
-%{_libdir}/libhdfs.*
+%{lib_dir}/libhdfs.*
 
 %files libhdfs-devel
-%{_includedir}/hdfs.h
-#%doc %{_docdir}/libhdfs-%{hadoop_version}
+%{include_dir}/hdfs.h
+#%doc %{doc_dir}/libhdfs-%{hadoop_version}
 
 %files libhdfspp
 %defattr(-,root,root)
-%{_libdir}/libhdfspp.*
+%{lib_dir}/libhdfspp.*
 
 %files libhdfspp-devel
-%{_includedir}/hdfspp
+%{include_dir}/hdfspp
 
 %files hdfs-fuse
 %defattr(-,root,root)
-%attr(0644,root,root) %config(noreplace) /etc/default/hadoop-fuse
-%attr(0755,root,root) %{lib_hadoop}/bin/fuse_dfs
-%attr(0755,root,root) %{bin_hadoop}/hadoop-fuse-dfs
+%attr(0644,root,root) %config(noreplace) %{etc_default}/hadoop-fuse
+%attr(0755,root,root) %{usr_lib_hadoop}/bin/fuse_dfs
+%attr(0755,root,root) %{bin_dir}/hadoop-fuse-dfs