You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by se...@apache.org on 2022/09/30 10:40:45 UTC

[bigtop] branch master updated: BIGTOP-3805: Support parent directory configuration for Spark rpm build script (#1002)

This is an automated email from the ASF dual-hosted git repository.

sekikn pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/bigtop.git


The following commit(s) were added to refs/heads/master by this push:
     new 54ffccc6 BIGTOP-3805: Support parent directory configuration for Spark rpm build script (#1002)
54ffccc6 is described below

commit 54ffccc6a6ad38c1752a96310f4fed1ac390d609
Author: Zhiguo Wu <wu...@apache.org>
AuthorDate: Fri Sep 30 18:40:39 2022 +0800

    BIGTOP-3805: Support parent directory configuration for Spark rpm build script (#1002)
    
    * BIGTOP-3805: Support parent directory configuration for Spark rpm build script
    
    * fix desc
---
 bigtop-packages/src/common/spark/install_spark.sh |  59 ++++++----
 bigtop-packages/src/rpm/spark/SPECS/spark.spec    | 125 ++++++++++++----------
 2 files changed, 110 insertions(+), 74 deletions(-)

diff --git a/bigtop-packages/src/common/spark/install_spark.sh b/bigtop-packages/src/common/spark/install_spark.sh
index fa4dd5cb..a8165c43 100644
--- a/bigtop-packages/src/common/spark/install_spark.sh
+++ b/bigtop-packages/src/common/spark/install_spark.sh
@@ -28,8 +28,11 @@ usage: $0 <options>
   Optional options:
      --doc-dir=DIR               path to install docs into [/usr/share/doc/spark]
      --lib-dir=DIR               path to install Spark home [/usr/lib/spark]
-     --installed-lib-dir=DIR     path where lib-dir will end up on target system
+     --var-dir=DIR               path to install Spark contents [/var/lib/spark]
      --bin-dir=DIR               path to install bins [/usr/bin]
+     --man-dir=DIR               path to install mans [/usr/share/man]
+     --etc-default=DIR           path to bigtop default dir [/etc/default]
+     --etc-spark=DIR             path to install spark conf [/etc/spark]
      ... [ see source for more similar options ]
   "
   exit 1
@@ -41,8 +44,11 @@ OPTS=$(getopt \
   -l 'prefix:' \
   -l 'doc-dir:' \
   -l 'lib-dir:' \
-  -l 'installed-lib-dir:' \
+  -l 'var-dir:' \
   -l 'bin-dir:' \
+  -l 'man-dir:' \
+  -l 'etc-default:' \
+  -l 'etc-spark:' \
   -l 'source-dir:' \
   -l 'build-dir:' -- "$@")
 
@@ -68,12 +74,21 @@ while true ; do
         --lib-dir)
         LIB_DIR=$2 ; shift 2
         ;;
-        --installed-lib-dir)
-        INSTALLED_LIB_DIR=$2 ; shift 2
+        --var-dir)
+        VAR_DIR=$2 ; shift 2
         ;;
         --bin-dir)
         BIN_DIR=$2 ; shift 2
         ;;
+        --man-dir)
+        MAN_DIR=$2 ; shift 2
+        ;;
+        --etc-default)
+        ETC_DEFAULT=$2 ; shift 2
+        ;;
+        --etc-spark)
+        ETC_SPARK=$2 ; shift 2
+        ;;
         --)
         shift ; break
         ;;
@@ -97,21 +112,26 @@ if [ -f "$SOURCE_DIR/bigtop.bom" ]; then
 fi
 
 DIST_DIR=${BUILD_DIR}/dist
-MAN_DIR=${MAN_DIR:-/usr/share/man/man1}
+
+MAN_DIR=${MAN_DIR:-/usr/share/man}/man1
 DOC_DIR=${DOC_DIR:-/usr/share/doc/spark}
 LIB_DIR=${LIB_DIR:-/usr/lib/spark}
-INSTALLED_LIB_DIR=${INSTALLED_LIB_DIR:-/usr/lib/spark}
+VAR_DIR=${VAR_DIR:-/var/lib/spark}
 BIN_DIR=${BIN_DIR:-/usr/bin}
-CONF_DIR=${CONF_DIR:-/etc/spark/conf.dist}
-PYSPARK_PYTHON=${PYSPARK_PYTHON:-python}
+ETC_DEFAULT=${ETC_DEFAULT:-/etc/default}
+
+ETC_SPARK=${ETC_SPARK:-/etc/spark}
+# No prefix
+NP_ETC_SPARK=/etc/spark
 
 install -d -m 0755 $PREFIX/$LIB_DIR
 install -d -m 0755 $PREFIX/$LIB_DIR/external/lib
 install -d -m 0755 $PREFIX/$LIB_DIR/yarn/lib
-install -d -m 0755 $PREFIX/$CONF_DIR
+install -d -m 0755 $PREFIX/$NP_ETC_SPARK
+install -d -m 0755 $PREFIX/$ETC_SPARK/conf.dist
 install -d -m 0755 $PREFIX/$DOC_DIR
 
-install -d -m 0755 $PREFIX/var/lib/spark/
+install -d -m 0755 $PREFIX/$VAR_DIR/
 install -d -m 0755 $PREFIX/var/log/spark/
 install -d -m 0755 $PREFIX/var/run/spark/
 install -d -m 0755 $PREFIX/var/run/spark/work/
@@ -134,10 +154,10 @@ copy_external_jars() {
 find_external_modules | copy_external_jars
 
 # Move the configuration files to the correct location
-mv $PREFIX/$LIB_DIR/conf/* $PREFIX/$CONF_DIR
-cp $SOURCE_DIR/spark-env.sh $PREFIX/$CONF_DIR
+mv $PREFIX/$LIB_DIR/conf/* $PREFIX/$ETC_SPARK/conf.dist
+cp $SOURCE_DIR/spark-env.sh $PREFIX/$ETC_SPARK/conf.dist
 rmdir $PREFIX/$LIB_DIR/conf
-ln -s /etc/spark/conf $PREFIX/$LIB_DIR/conf
+ln -s $NP_ETC_SPARK/conf $PREFIX/$LIB_DIR/conf
 
 # Copy in the wrappers
 install -d -m 0755 $PREFIX/$BIN_DIR
@@ -148,23 +168,23 @@ for wrap in bin/spark-class bin/spark-shell bin/spark-sql bin/spark-submit bin/f
 # Autodetect JAVA_HOME if not defined
 . /usr/lib/bigtop-utils/bigtop-detect-javahome
 
-exec $INSTALLED_LIB_DIR/$wrap "\$@"
+exec $LIB_DIR/$wrap "\$@"
 EOF
   chmod 755 $PREFIX/$BIN_DIR/$(basename $wrap)
 done
 
 ln -s /var/run/spark/work $PREFIX/$LIB_DIR/work
 
-rm -f ${PREFIX}/${INSTALLED_LIB_DIR}/python/.gitignore
+rm -f $PREFIX/$LIB_DIR/python/.gitignore
 cat > $PREFIX/$BIN_DIR/pyspark <<EOF
 #!/bin/bash
 
 # Autodetect JAVA_HOME if not defined
 . /usr/lib/bigtop-utils/bigtop-detect-javahome
 
-export PYSPARK_PYTHON=${PYSPARK_PYTHON}
+export PYSPARK_PYTHON=python
 
-exec $INSTALLED_LIB_DIR/bin/pyspark "\$@"
+exec $LIB_DIR/bin/pyspark "\$@"
 EOF
 chmod 755 $PREFIX/$BIN_DIR/pyspark
 
@@ -174,7 +194,7 @@ cat > $PREFIX/$BIN_DIR/spark-example <<EOF
 # Autodetect JAVA_HOME if not defined
 . /usr/lib/bigtop-utils/bigtop-detect-javahome
 
-exec $INSTALLED_LIB_DIR/bin/run-example "\$@"
+exec $LIB_DIR/bin/run-example "\$@"
 EOF
 chmod 755 $PREFIX/$BIN_DIR/spark-example
 
@@ -189,7 +209,6 @@ cp ${BUILD_DIR}/NOTICE ${PREFIX}/${LIB_DIR}/
 #   - https://github.com/apache/spark/pull/22840
 #   - https://issues.apache.org/jira/browse/SPARK-24654
 # Remenber to fetch the new LICENSE-binary and licenses-binary files when upgrading Spark version.
-echo ${PWD}
 cp ${SOURCE_DIR}/LICENSE-binary ${PREFIX}/${LIB_DIR}/LICENSE
 cp -r ${SOURCE_DIR}/licenses-binary ${PREFIX}/${LIB_DIR}/licenses
 
@@ -205,4 +224,4 @@ pushd $PREFIX/$LIB_DIR/external/lib
 for j in $(ls *.jar); do
   ln -s $j $(echo $j | sed -n 's/\(.*\)\(_.\+\)\(.jar\)/\1\3/p')
 done
-popd
+popd
\ No newline at end of file
diff --git a/bigtop-packages/src/rpm/spark/SPECS/spark.spec b/bigtop-packages/src/rpm/spark/SPECS/spark.spec
index 27e7f034..36d767c1 100644
--- a/bigtop-packages/src/rpm/spark/SPECS/spark.spec
+++ b/bigtop-packages/src/rpm/spark/SPECS/spark.spec
@@ -14,24 +14,29 @@
 # limitations under the License.
 
 %define spark_name spark
-%define lib_spark /usr/lib/%{spark_name}
-%define var_lib_spark /var/lib/%{spark_name}
-%define var_run_spark /var/run/%{spark_name}
-%define var_log_spark /var/log/%{spark_name}
-%define bin_spark /usr/lib/%{spark_name}/bin
-%define etc_spark /etc/%{spark_name}
-%define config_spark %{etc_spark}/conf
-%define bin /usr/bin
-%define man_dir /usr/share/man
+
+%define etc_default %{parent_dir}/etc/default
+
+%define usr_lib_spark %{parent_dir}/usr/lib/%{spark_name}
+%define var_lib_spark %{parent_dir}/var/lib/%{spark_name}
+%define etc_spark %{parent_dir}/etc/%{spark_name}
+
+%define bin_dir %{parent_dir}/%{_bindir}
+%define man_dir %{parent_dir}/%{_mandir}
+%define doc_dir %{parent_dir}/%{_docdir}
+
+# No prefix directory
+%define np_var_log_spark /var/log/%{spark_name}
+%define np_var_run_spark /var/run/%{spark_name}
+%define np_etc_spark /etc/%{spark_name}
+
 %define spark_services master worker history-server thriftserver
-%define lib_hadoop_client /usr/lib/hadoop/client
-%define lib_hadoop_yarn /usr/lib/hadoop-yarn/
 
 %if  %{?suse_version:1}0
-%define doc_spark %{_docdir}/spark
+%define doc_spark %{doc_dir}/spark
 %define alternatives_cmd update-alternatives
 %else
-%define doc_spark %{_docdir}/spark-%{spark_version}
+%define doc_spark %{doc_dir}/spark-%{spark_version}
 %define alternatives_cmd alternatives
 %endif
 
@@ -48,7 +53,7 @@ BuildArch: noarch
 Buildroot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
 License: ASL 2.0
 Source0: %{spark_name}-%{spark_base_version}.tar.gz
-Source1: do-component-build 
+Source1: do-component-build
 Source2: install_%{spark_name}.sh
 Source3: spark-master.svc
 Source4: spark-worker.svc
@@ -173,16 +178,28 @@ PYSPARK_PYTHON=python2 bash $RPM_SOURCE_DIR/install_spark.sh \
           --build-dir=`pwd`         \
           --source-dir=$RPM_SOURCE_DIR \
           --prefix=$RPM_BUILD_ROOT  \
-          --doc-dir=%{doc_spark}
+          --doc-dir=%{doc_spark} \
+          --lib-dir=%{usr_lib_spark} \
+          --var-dir=%{var_lib_spark} \
+          --bin-dir=%{bin_dir} \
+          --man-dir=%{man_dir} \
+          --etc-default=%{etc_default} \
+          --etc-spark=%{etc_spark}
 %else
 bash $RPM_SOURCE_DIR/install_spark.sh \
           --build-dir=`pwd`         \
           --source-dir=$RPM_SOURCE_DIR \
           --prefix=$RPM_BUILD_ROOT  \
-          --doc-dir=%{doc_spark}
+          --doc-dir=%{doc_spark} \
+          --lib-dir=%{usr_lib_spark} \
+          --var-dir=%{var_lib_spark} \
+          --bin-dir=%{bin_dir} \
+          --man-dir=%{man_dir} \
+          --etc-default=%{etc_default} \
+          --etc-spark=%{etc_spark}
 %endif
 
-%__rm -f $RPM_BUILD_ROOT/%{lib_spark}/jars/hadoop-*.jar
+%__rm -f $RPM_BUILD_ROOT/%{usr_lib_spark}/jars/hadoop-*.jar
 
 for service in %{spark_services}
 do
@@ -196,11 +213,11 @@ getent group spark >/dev/null || groupadd -r spark
 getent passwd spark >/dev/null || useradd -c "Spark" -s /sbin/nologin -g spark -r -d %{var_lib_spark} spark 2> /dev/null || :
 
 %post
-%{alternatives_cmd} --install %{config_spark} %{spark_name}-conf %{config_spark}.dist 30
+%{alternatives_cmd} --install %{np_etc_spark}/conf %{spark_name}-conf %{etc_spark}/conf.dist 30
 
 %preun
 if [ "$1" = 0 ]; then
-        %{alternatives_cmd} --remove %{spark_name}-conf %{config_spark}.dist || :
+        %{alternatives_cmd} --remove %{spark_name}-conf %{etc_spark}/conf.dist || :
 fi
 
 for service in %{spark_services}; do
@@ -215,58 +232,58 @@ done
 #######################
 %files
 %defattr(-,root,root,755)
-%config(noreplace) %{config_spark}.dist
+%config(noreplace) %{etc_spark}/conf.dist
 %doc %{doc_spark}
-%{lib_spark}/LICENSE
-%{lib_spark}/NOTICE
-%{lib_spark}/README.md
-%{lib_spark}/RELEASE
-%{bin_spark}
-%exclude %{bin_spark}/pyspark
-%{lib_spark}/conf
-%{lib_spark}/data
-%{lib_spark}/examples
-%{lib_spark}/jars
-%exclude %{lib_spark}/jars/datanucleus-*.jar
-%{lib_spark}/licenses
-%{lib_spark}/sbin
-%{lib_spark}/work
-%{lib_spark}/kubernetes
-%{etc_spark}
+%{usr_lib_spark}/LICENSE
+%{usr_lib_spark}/NOTICE
+%{usr_lib_spark}/README.md
+%{usr_lib_spark}/RELEASE
+%{usr_lib_spark}/bin
+%exclude %{usr_lib_spark}/bin/pyspark
+%{usr_lib_spark}/conf
+%{usr_lib_spark}/data
+%{usr_lib_spark}/examples
+%{usr_lib_spark}/jars
+%exclude %{usr_lib_spark}/jars/datanucleus-*.jar
+%{usr_lib_spark}/licenses
+%{usr_lib_spark}/sbin
+%{usr_lib_spark}/work
+%{usr_lib_spark}/kubernetes
+%{np_etc_spark}
 %attr(0755,spark,spark) %{var_lib_spark}
-%attr(0755,spark,spark) %{var_run_spark}
-%attr(0755,spark,spark) %{var_log_spark}
-%{bin}/spark-*
-%{bin}/find-spark-home
-%exclude %{lib_spark}/R
-%exclude %{lib_spark}/bin/sparkR
-%exclude %{bin}/sparkR
+%attr(0755,spark,spark) %{np_var_run_spark}
+%attr(0755,spark,spark) %{np_var_log_spark}
+%{bin_dir}/spark-*
+%{bin_dir}/find-spark-home
+%exclude %{usr_lib_spark}/R
+%exclude %{usr_lib_spark}/bin/sparkR
+%exclude %{bin_dir}/sparkR
 
 %files -n spark-python
 %defattr(-,root,root,755)
-%attr(0755,root,root) %{bin}/pyspark
-%attr(0755,root,root) %{lib_spark}/bin/pyspark
-%{lib_spark}/python
+%attr(0755,root,root) %{bin_dir}/pyspark
+%attr(0755,root,root) %{usr_lib_spark}/bin/pyspark
+%{usr_lib_spark}/python
 
 %files -n spark-datanucleus
 %defattr(-,root,root,755)
-%{lib_spark}/jars/datanucleus-*.jar
-%{lib_spark}/yarn/lib/datanucleus-*.jar
+%{usr_lib_spark}/jars/datanucleus-*.jar
+%{usr_lib_spark}/yarn/lib/datanucleus-*.jar
 
 %files -n spark-external
 %defattr(-,root,root,755)
-%{lib_spark}/external
+%{usr_lib_spark}/external
 
 %files -n spark-yarn-shuffle
 %defattr(-,root,root,755)
-%{lib_spark}/yarn/spark-*-yarn-shuffle.jar
-%{lib_spark}/yarn/lib/spark-yarn-shuffle.jar
+%{usr_lib_spark}/yarn/spark-*-yarn-shuffle.jar
+%{usr_lib_spark}/yarn/lib/spark-yarn-shuffle.jar
 
 %files -n spark-sparkr
 %defattr(-,root,root,755)
-%{lib_spark}/R
-%{lib_spark}/bin/sparkR
-%{bin}/sparkR
+%{usr_lib_spark}/R
+%{usr_lib_spark}/bin/sparkR
+%{bin_dir}/sparkR
 
 %define service_macro() \
 %files -n %1 \