You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by gu...@apache.org on 2022/12/05 03:12:52 UTC

[bigtop] branch master updated: BIGTOP-3786: Improve the build scripts to append suffix to the rpm package name (#1058)

This is an automated email from the ASF dual-hosted git repository.

guyuqi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/bigtop.git


The following commit(s) were added to refs/heads/master by this push:
     new 650b2e50 BIGTOP-3786: Improve the build scripts to append suffix to the rpm package name (#1058)
650b2e50 is described below

commit 650b2e5074761fe2b8693041d9f86320c040ed19
Author: Yu Hou <52...@qq.com>
AuthorDate: Mon Dec 5 11:12:46 2022 +0800

    BIGTOP-3786: Improve the build scripts to append suffix to the rpm package name (#1058)
    
    Append suffix for Kafka, solr, hive, hbase, tez, flink, spark, zeppelin, hadoop
---
 bigtop-packages/src/rpm/flink/SPECS/flink.spec     |  17 ++--
 bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec   |  74 ++++++++-------
 bigtop-packages/src/rpm/hbase/SPECS/hbase.spec     |  47 ++++-----
 bigtop-packages/src/rpm/hive/SPECS/hive.spec       |  67 +++++++------
 bigtop-packages/src/rpm/kafka/SPECS/kafka.spec     |  10 +-
 bigtop-packages/src/rpm/solr/SPECS/solr.spec       |   7 +-
 bigtop-packages/src/rpm/spark/SPECS/spark.spec     | 105 ++++++++++++---------
 bigtop-packages/src/rpm/tez/SPECS/tez.spec         |  30 +++---
 .../src/rpm/zeppelin/SPECS/zeppelin.spec           |  47 ++++-----
 .../src/rpm/zookeeper/SPECS/zookeeper.spec         |  32 ++++---
 bigtop.bom                                         |  15 ++-
 packages.gradle                                    |  31 ++++--
 12 files changed, 275 insertions(+), 207 deletions(-)

diff --git a/bigtop-packages/src/rpm/flink/SPECS/flink.spec b/bigtop-packages/src/rpm/flink/SPECS/flink.spec
index 21332ffb..c7243325 100644
--- a/bigtop-packages/src/rpm/flink/SPECS/flink.spec
+++ b/bigtop-packages/src/rpm/flink/SPECS/flink.spec
@@ -14,6 +14,7 @@
 # limitations under the License.
 
 %define flink_name flink
+%define flink_pkg_name flink%{pkg_name_suffix}
 
 %define etc_default %{parent_dir}/etc/default
 
@@ -49,7 +50,7 @@
 %global initd_dir %{_sysconfdir}/rc.d
 %endif
 
-Name: %{flink_name}
+Name: %{flink_pkg_name}
 Version: %{flink_version}
 Release: %{flink_release}
 Summary: Apache Flink is an open source platform for distributed stream and batch data processing.
@@ -117,7 +118,7 @@ Apache Flink Task Manager service.
 ##############################################
 
 %prep
-%setup -n %{name}-%{flink_base_version}
+%setup -n %{flink_name}-%{flink_base_version}
 #BIGTOP_PATCH_COMMANDS
 
 %build
@@ -171,18 +172,18 @@ getent passwd flink >/dev/null || useradd -c "Flink" -s /sbin/nologin -g flink -
 
 %define service_macro() \
 %files %1 \
-%config(noreplace) %{initd_dir}/%{name}-%1 \
+%config(noreplace) %{initd_dir}/%{flink_name}-%1 \
 %post %1 \
-chkconfig --add %{name}-%1 \
+chkconfig --add %{flink_name}-%1 \
 %preun %1 \
-/sbin/service ${name}-%1 status > /dev/null 2>&1 \
+/sbin/service %{flink_name}-%1 status > /dev/null 2>&1 \
 if [ "$?" -eq 0 ]; then \
-  service ${name}-%1 stop > /dev/null 2>&1 \
-  chkconfig --del %{name}-%1 \
+  service %{flink_name}-%1 stop > /dev/null 2>&1 \
+  chkconfig --del %{flink_name}-%1 \
 fi \
 %postun %1 \
 if [ "$?" -ge 1 ]; then \
-   service %{name}-%1 condrestart > /dev/null 2>&1 || : \
+   service %{flink_name}-%1 condrestart > /dev/null 2>&1 || : \
 fi
 %service_macro jobmanager 
 %service_macro taskmanager
diff --git a/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec b/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
index e57d98ec..8d372300 100644
--- a/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
+++ b/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
@@ -23,6 +23,8 @@
 %undefine _auto_set_build_flags
 
 %define hadoop_name hadoop
+%define hadoop_pkg_name hadoop%{pkg_name_suffix}
+%define zookeeper_pkg_name zookeeper%{pkg_name_suffix}
 
 %define etc_default %{parent_dir}/etc/default
 
@@ -44,7 +46,7 @@
 %define doc_dir %{parent_dir}/%{_docdir}
 %define include_dir %{parent_dir}/%{_includedir}
 %define lib_dir %{parent_dir}/%{_libdir}
-%define doc_hadoop %{doc_dir}/%{name}-%{hadoop_version}
+%define doc_hadoop %{doc_dir}/%{hadoop_name}-%{hadoop_version}
 
 # No prefix directory
 %define np_var_log_yarn /var/log/%{hadoop_name}-yarn
@@ -93,7 +95,7 @@
     %{nil}
 
 %define netcat_package nc
-%define doc_hadoop %{doc_dir}/%{name}-%{hadoop_version}
+%define doc_hadoop %{doc_dir}/%{hadoop_name}-%{hadoop_version}
 %define alternatives_cmd alternatives
 %global initd_dir %{_sysconfdir}/rc.d/init.d
 %endif
@@ -113,14 +115,14 @@
     %{nil}
 
 %define netcat_package netcat-openbsd
-%define doc_hadoop %{doc_dir}/%{name}
+%define doc_hadoop %{doc_dir}/%{hadoop_name}
 %define alternatives_cmd update-alternatives
 %global initd_dir %{_sysconfdir}/rc.d
 %endif
 
 %if  0%{?mgaversion}
 %define netcat_package netcat-openbsd
-%define doc_hadoop %{doc_dir}/%{name}-%{hadoop_version}
+%define doc_hadoop %{doc_dir}/%{hadoop_name}-%{hadoop_version}
 %define alternatives_cmd update-alternatives
 %global initd_dir %{_sysconfdir}/rc.d/init.d
 %endif
@@ -138,16 +140,16 @@
 # BIGTOP-3359
 %define _build_id_links none
 
-Name: %{hadoop_name}
+Name: %{hadoop_pkg_name}
 Version: %{hadoop_version}
 Release: %{hadoop_release}
 Summary: Hadoop is a software platform for processing vast amounts of data
 License: ASL 2.0
 URL: http://hadoop.apache.org/core/
 Group: Development/Libraries
-Source0: %{name}-%{hadoop_base_version}.tar.gz
+Source0: %{hadoop_name}-%{hadoop_base_version}.tar.gz
 Source1: do-component-build
-Source2: install_%{name}.sh
+Source2: install_%{hadoop_name}.sh
 Source4: hadoop-fuse.default
 Source5: httpfs.default
 Source6: hadoop.1
@@ -178,7 +180,7 @@ Source31: kms.default
 #BIGTOP_PATCH_FILES
 Buildroot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id} -u -n)
 BuildRequires: fuse-devel, fuse
-Requires: coreutils, /usr/sbin/useradd, /usr/sbin/usermod, /sbin/chkconfig, /sbin/service, bigtop-utils >= 0.7, zookeeper >= 3.4.0
+Requires: coreutils, /usr/sbin/useradd, /usr/sbin/usermod, /sbin/chkconfig, /sbin/service, bigtop-utils >= 0.7, %{zookeeper_pkg_name} >= 3.4.0
 Requires: psmisc, %{netcat_package}
 Requires: openssl-devel
 # Sadly, Sun/Oracle JDK in RPM form doesn't provide libjvm.so, which means we have
@@ -475,7 +477,7 @@ Hadoop Filesystem Library
 %package libhdfs-devel
 Summary: Development support for libhdfs
 Group: Development/Libraries
-Requires: hadoop = %{version}-%{release}, hadoop-libhdfs = %{version}-%{release}
+Requires: %{name} = %{version}-%{release}, %{name}-libhdfs = %{version}-%{release}
 
 %description libhdfs-devel
 Includes examples and header files for accessing HDFS from C
@@ -519,7 +521,7 @@ These projects (enumerated below) allow HDFS to be mounted (on most flavors of U
 
 
 %prep
-%setup -n %{name}-%{hadoop_base_version}-src
+%setup -n %{hadoop_name}-%{hadoop_base_version}-src
 
 #BIGTOP_PATCH_COMMANDS
 %build
@@ -577,14 +579,14 @@ env HADOOP_VERSION=%{hadoop_base_version} bash %{SOURCE2} \
 
 # Install top level /etc/default files
 # %__install -d -m 0755 $RPM_BUILD_ROOT/%{etc_default}
-%__cp $RPM_SOURCE_DIR/%{name}-fuse.default $RPM_BUILD_ROOT/%{etc_default}/%{name}-fuse
+%__cp $RPM_SOURCE_DIR/%{hadoop_name}-fuse.default $RPM_BUILD_ROOT/%{etc_default}/%{hadoop_name}-fuse
 
 # Generate the init.d scripts
 for service in %{hadoop_services}
 do
-       bash %{SOURCE11} $RPM_SOURCE_DIR/%{name}-${service}.svc rpm $RPM_BUILD_ROOT/%{initd_dir}/%{name}-${service}
-       cp $RPM_SOURCE_DIR/${service/-*/}.default $RPM_BUILD_ROOT/%{etc_default}/%{name}-${service}
-       chmod 644 $RPM_BUILD_ROOT/%{etc_default}/%{name}-${service}
+       bash %{SOURCE11} $RPM_SOURCE_DIR/%{hadoop_name}-${service}.svc rpm $RPM_BUILD_ROOT/%{initd_dir}/%{hadoop_name}-${service}
+       cp $RPM_SOURCE_DIR/${service/-*/}.default $RPM_BUILD_ROOT/%{etc_default}/%{hadoop_name}-${service}
+       chmod 644 $RPM_BUILD_ROOT/%{etc_default}/%{hadoop_name}-${service}
 done
 
 # Install security limits
@@ -640,39 +642,39 @@ getent group mapred >/dev/null  || groupadd -r mapred
 getent passwd mapred >/dev/null || /usr/sbin/useradd --comment "Hadoop MapReduce" --shell /bin/bash -M -r -g mapred -G hadoop --home %{var_lib_mapreduce} mapred
 
 %post
-%{alternatives_cmd} --install %{np_etc_hadoop}/conf %{name}-conf %{etc_hadoop}/conf.empty 10
+%{alternatives_cmd} --install %{np_etc_hadoop}/conf %{hadoop_name}-conf %{etc_hadoop}/conf.empty 10
 
 %post httpfs
-chkconfig --add %{name}-httpfs
+chkconfig --add %{hadoop_name}-httpfs
 
 %post kms
-chkconfig --add %{name}-kms
+chkconfig --add %{hadoop_name}-kms
 
 %preun
 if [ "$1" = 0 ]; then
-  %{alternatives_cmd} --remove %{name}-conf %{etc_hadoop}/conf.empty || :
+  %{alternatives_cmd} --remove %{hadoop_name}-conf %{etc_hadoop}/conf.empty || :
 fi
 
 %preun httpfs
 if [ $1 = 0 ]; then
-  service %{name}-httpfs stop > /dev/null 2>&1
-  chkconfig --del %{name}-httpfs
+  service %{hadoop_name}-httpfs stop > /dev/null 2>&1
+  chkconfig --del %{hadoop_name}-httpfs
 fi
 
 %postun httpfs
 if [ $1 -ge 1 ]; then
-  service %{name}-httpfs condrestart >/dev/null 2>&1
+  service %{hadoop_name}-httpfs condrestart >/dev/null 2>&1
 fi
 
 %preun kms
 if [ $1 = 0 ]; then
-  service %{name}-kms stop > /dev/null 2>&1
-  chkconfig --del %{name}-kms
+  service %{hadoop_name}-kms stop > /dev/null 2>&1
+  chkconfig --del %{hadoop_name}-kms
 fi
 
 %postun kms
 if [ $1 -ge 1 ]; then
-  service %{name}-kms condrestart >/dev/null 2>&1
+  service %{hadoop_name}-kms condrestart >/dev/null 2>&1
 fi
 
 %files yarn
@@ -764,11 +766,11 @@ fi
 %files httpfs
 %defattr(-,root,root)
 
-%config(noreplace) %{etc_default}/%{name}-httpfs
+%config(noreplace) %{etc_default}/%{hadoop_name}-httpfs
 %config(noreplace) %{etc_hadoop}/conf.empty/httpfs-env.sh
 %config(noreplace) %{etc_hadoop}/conf.empty/httpfs-log4j.properties
 %config(noreplace) %{etc_hadoop}/conf.empty/httpfs-site.xml
-%{initd_dir}/%{name}-httpfs
+%{initd_dir}/%{hadoop_name}-httpfs
 %attr(0775,httpfs,httpfs) %{np_var_run_httpfs}
 %attr(0775,httpfs,httpfs) %{np_var_log_httpfs}
 %attr(0775,httpfs,httpfs) %{var_lib_httpfs}
@@ -779,8 +781,8 @@ fi
 %config(noreplace) %{etc_hadoop}/conf.empty/kms-env.sh
 %config(noreplace) %{etc_hadoop}/conf.empty/kms-log4j.properties
 %config(noreplace) %{etc_hadoop}/conf.empty/kms-site.xml
-%config(noreplace) %{etc_default}/%{name}-kms
-%{initd_dir}/%{name}-kms
+%config(noreplace) %{etc_default}/%{hadoop_name}-kms
+%{initd_dir}/%{hadoop_name}-kms
 %attr(0775,kms,kms) %{np_var_run_kms}
 %attr(0775,kms,kms) %{np_var_log_kms}
 %attr(0775,kms,kms) %{var_lib_kms}
@@ -789,19 +791,19 @@ fi
 %define service_macro() \
 %files %1 \
 %defattr(-,root,root) \
-%{initd_dir}/%{name}-%1 \
-%config(noreplace) %{etc_default}/%{name}-%1 \
+%{initd_dir}/%{hadoop_name}-%1 \
+%config(noreplace) %{etc_default}/%{hadoop_name}-%1 \
 %post %1 \
-chkconfig --add %{name}-%1 \
+chkconfig --add %{hadoop_name}-%1 \
 \
 %preun %1 \
 if [ $1 = 0 ]; then \
-  service %{name}-%1 stop > /dev/null 2>&1 \
-  chkconfig --del %{name}-%1 \
+  service %{hadoop_name}-%1 stop > /dev/null 2>&1 \
+  chkconfig --del %{hadoop_name}-%1 \
 fi \
 %postun %1 \
 if [ $1 -ge 1 ]; then \
-  service %{name}-%1 condrestart >/dev/null 2>&1 \
+  service %{hadoop_name}-%1 condrestart >/dev/null 2>&1 \
 fi
 
 %service_macro hdfs-namenode
@@ -819,11 +821,11 @@ fi
 
 # Pseudo-distributed Hadoop installation
 %post conf-pseudo
-%{alternatives_cmd} --install %{np_etc_hadoop}/conf %{name}-conf %{etc_hadoop}/conf.pseudo 30
+%{alternatives_cmd} --install %{np_etc_hadoop}/conf %{hadoop_name}-conf %{etc_hadoop}/conf.pseudo 30
 
 %preun conf-pseudo
 if [ "$1" = 0 ]; then
-        %{alternatives_cmd} --remove %{name}-conf %{etc_hadoop}/conf.pseudo
+        %{alternatives_cmd} --remove %{hadoop_name}-conf %{etc_hadoop}/conf.pseudo
 fi
 
 %files conf-pseudo
diff --git a/bigtop-packages/src/rpm/hbase/SPECS/hbase.spec b/bigtop-packages/src/rpm/hbase/SPECS/hbase.spec
index eaf1c700..02a09ec1 100644
--- a/bigtop-packages/src/rpm/hbase/SPECS/hbase.spec
+++ b/bigtop-packages/src/rpm/hbase/SPECS/hbase.spec
@@ -13,11 +13,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+%define hbase_name hbase
+%define hbase_pkg_name hbase%{pkg_name_suffix}
+%define hadoop_pkg_name hadoop%{pkg_name_suffix}
+%define zookeeper_pkg_name zookeeper%{pkg_name_suffix}
+
 %define etc_default %{parent_dir}/etc/default
 
-%define usr_lib_hbase %{parent_dir}/usr/lib/%{name}
-%define var_lib_hbase %{parent_dir}/var/lib/%{name}
-%define etc_hbase %{parent_dir}/etc/%{name}
+%define usr_lib_hbase %{parent_dir}/usr/lib/%{hbase_name}
+%define var_lib_hbase %{parent_dir}/var/lib/%{hbase_name}
+%define etc_hbase %{parent_dir}/etc/%{hbase_name}
 
 %define usr_lib_hadoop %{parent_dir}/usr/lib/hadoop
 %define usr_lib_zookeeper %{parent_dir}/usr/lib/zookeeper
@@ -27,9 +32,9 @@
 %define doc_dir %{parent_dir}/%{_docdir}
 
 # No prefix directory
-%define np_var_log_hbase /var/log/%{name}
-%define np_var_run_hbase /var/run/%{name}
-%define np_etc_hbase /etc/%{name}
+%define np_var_log_hbase /var/log/%{hbase_name}
+%define np_var_run_hbase /var/run/%{hbase_name}
+%define np_etc_hbase /etc/%{hbase_name}
 
 %define hbase_username hbase
 %define hbase_services master regionserver thrift thrift2 rest
@@ -52,7 +57,7 @@
     /usr/lib/rpm/brp-compress ; \
     %{nil}
 
-%define doc_hbase %{doc_dir}/%{name}
+%define doc_hbase %{doc_dir}/%{hbase_name}
 %global initd_dir %{_sysconfdir}/rc.d
 %define alternatives_cmd update-alternatives
 
@@ -76,7 +81,7 @@
 %endif
 
 
-%define doc_hbase %{doc_dir}/%{name}-%{hbase_version}
+%define doc_hbase %{doc_dir}/%{hbase_name}-%{hbase_version}
 %global initd_dir %{_sysconfdir}/rc.d/init.d
 %define alternatives_cmd alternatives
 
@@ -85,7 +90,7 @@
 # Disable debuginfo package
 %define debug_package %{nil}
 
-Name: hbase
+Name: %{hbase_pkg_name}
 Version: %{hbase_version}
 Release: %{hbase_release}
 Summary: HBase is the Hadoop database. Use it when you need random, realtime read/write access to your Big Data. This project's goal is the hosting of very large tables -- billions of rows X millions of columns -- atop clusters of commodity hardware. 
@@ -93,7 +98,7 @@ URL: http://hbase.apache.org/
 Group: Development/Libraries
 Buildroot: %{_topdir}/INSTALL/%{name}-%{version}
 License: ASL 2.0
-Source0: %{name}-%{hbase_base_version}.tar.gz
+Source0: %{hbase_name}-%{hbase_base_version}.tar.gz
 Source1: do-component-build
 Source2: install_hbase.sh
 Source3: hbase.svc
@@ -102,7 +107,7 @@ Source6: hbase.nofiles.conf
 Source7: regionserver-init.d.tpl
 #BIGTOP_PATCH_FILES
 Requires: coreutils, /usr/sbin/useradd, /sbin/chkconfig, /sbin/service
-Requires: hadoop-client, zookeeper >= 3.3.1, bigtop-utils >= 0.7
+Requires: %{hadoop_pkg_name}-client, %{zookeeper_pkg_name} >= 3.3.1, bigtop-utils >= 0.7
 
 %if  0%{?mgaversion}
 Requires: bsh-utils
@@ -272,7 +277,7 @@ Requires: /lib/lsb/init-functions
 The Apache HBase REST gateway
 
 %prep
-%setup -n %{name}-%{hbase_base_version}
+%setup -n %{hbase_name}-%{hbase_base_version}
 
 #BIGTOP_PATCH_COMMANDS
 
@@ -297,7 +302,7 @@ bash %{SOURCE2} \
 %__install -d -m 0755 $RPM_BUILD_ROOT/%{etc_default}/
 
 %__install -d -m 0755 $RPM_BUILD_ROOT/etc/security/limits.d
-%__install -m 0644 %{SOURCE6} $RPM_BUILD_ROOT/etc/security/limits.d/%{name}.nofiles.conf
+%__install -m 0644 %{SOURCE6} $RPM_BUILD_ROOT/etc/security/limits.d/%{hbase_name}.nofiles.conf
 
 %__install -d  -m 0755  %{buildroot}/%{np_var_log_hbase}
 ln -s %{np_var_log_hbase} %{buildroot}/%{usr_lib_hbase}/logs
@@ -309,7 +314,7 @@ ln -s %{np_var_run_hbase} %{buildroot}/%{usr_lib_hbase}/pids
 
 for service in %{hbase_services}
 do
-    init_file=$RPM_BUILD_ROOT/%{initd_dir}/%{name}-${service}
+    init_file=$RPM_BUILD_ROOT/%{initd_dir}/%{hbase_name}-${service}
     if [[ "$service" = "regionserver" ]] ; then
         # Region servers start from a different template that allows
         # them to run multiple concurrent instances of the daemon
@@ -351,11 +356,11 @@ getent group hbase 2>/dev/null >/dev/null || /usr/sbin/groupadd -r hbase
 getent passwd hbase 2>&1 > /dev/null || /usr/sbin/useradd -c "HBase" -s /sbin/nologin -g hbase -r -d /var/lib/hbase hbase 2> /dev/null || :
 
 %post
-%{alternatives_cmd} --install %{np_etc_hbase}/conf %{name}-conf %{etc_hbase}/conf.dist 30
+%{alternatives_cmd} --install %{np_etc_hbase}/conf %{hbase_name}-conf %{etc_hbase}/conf.dist 30
 
 %preun
 if [ "$1" = 0 ]; then
-        %{alternatives_cmd} --remove %{name}-conf %{etc_hbase}/conf.dist || :
+        %{alternatives_cmd} --remove %{hbase_name}-conf %{etc_hbase}/conf.dist || :
 fi
 
 
@@ -387,18 +392,18 @@ fi
 
 %define service_macro() \
 %files %1 \
-%attr(0755,root,root)/%{initd_dir}/%{name}-%1 \
+%attr(0755,root,root)/%{initd_dir}/%{hbase_name}-%1 \
 %post %1 \
-chkconfig --add %{name}-%1 \
+chkconfig --add %{hbase_name}-%1 \
 \
 %preun %1 \
 if [ $1 = 0 ] ; then \
-        service %{name}-%1 stop > /dev/null 2>&1 \
-        chkconfig --del %{name}-%1 \
+        service %{hbase_name}-%1 stop > /dev/null 2>&1 \
+        chkconfig --del %{hbase_name}-%1 \
 fi \
 %postun %1 \
 if [ $1 -ge 1 ]; then \
-        service %{name}-%1 condrestart >/dev/null 2>&1 \
+        service %{hbase_name}-%1 condrestart >/dev/null 2>&1 \
 fi
 %service_macro master
 %service_macro thrift
diff --git a/bigtop-packages/src/rpm/hive/SPECS/hive.spec b/bigtop-packages/src/rpm/hive/SPECS/hive.spec
index 99fce1d9..3919a2db 100644
--- a/bigtop-packages/src/rpm/hive/SPECS/hive.spec
+++ b/bigtop-packages/src/rpm/hive/SPECS/hive.spec
@@ -15,13 +15,18 @@
 
 %define hadoop_username hadoop
 
+%define hive_name hive
+%define hive_pkg_name hive%{pkg_name_suffix}
+%define hadoop_pkg_name hadoop%{pkg_name_suffix}
+%define zookeeper_pkg_name zookeeper%{pkg_name_suffix}
+
 %define etc_default %{parent_dir}/etc/default
 
-%define usr_lib_hive %{parent_dir}/usr/lib/%{name}
-%define usr_lib_hcatalog %{parent_dir}/usr/lib/%{name}-hcatalog
-%define var_lib_hive %{parent_dir}/var/lib/%{name}
-%define var_lib_hcatalog %{parent_dir}/var/lib/%{name}-hcatalog
-%define etc_hive %{parent_dir}/etc/%{name}
+%define usr_lib_hive %{parent_dir}/usr/lib/%{hive_name}
+%define usr_lib_hcatalog %{parent_dir}/usr/lib/%{hive_name}-hcatalog
+%define var_lib_hive %{parent_dir}/var/lib/%{hive_name}
+%define var_lib_hcatalog %{parent_dir}/var/lib/%{hive_name}-hcatalog
+%define etc_hive %{parent_dir}/etc/%{hive_name}
 
 %define usr_lib_zookeeper %{parent_dir}/usr/lib/zookeeper
 %define usr_lib_hbase %{parent_dir}/usr/lib/hbase
@@ -31,10 +36,10 @@
 %define doc_dir %{parent_dir}/%{_docdir}
 
 # No prefix directory
-%define np_var_log_hive /var/log/%{name}
-%define np_var_run_hive /var/run/%{name}
-%define np_var_log_hcatalog /var/log/%{name}-hcatalog
-%define np_etc_hive /etc/%{name}
+%define np_var_log_hive /var/log/%{hive_name}
+%define np_var_run_hive /var/run/%{hive_name}
+%define np_var_log_hcatalog /var/log/%{hive_name}-hcatalog
+%define np_etc_hive /etc/%{hive_name}
 
 %define hive_config_virtual hive_active_configuration
 %define hive_services hive-metastore hive-server2 hive-hcatalog-server hive-webhcat-server
@@ -43,7 +48,7 @@
 
 %if  %{!?suse_version:1}0
 
-%define doc_hive %{doc_dir}/%{name}-%{hive_version}
+%define doc_hive %{doc_dir}/%{hive_name}-%{hive_version}
 %define alternatives_cmd alternatives
 
 %global initd_dir %{_sysconfdir}/rc.d/init.d
@@ -55,7 +60,7 @@
 %define suse_check \# Define an empty suse_check for compatibility with older sles
 %endif
 
-%define doc_hive %{doc_dir}/%{name}
+%define doc_hive %{doc_dir}/%{hive_name}
 %define alternatives_cmd update-alternatives
 
 %global initd_dir %{_sysconfdir}/rc.d
@@ -68,7 +73,7 @@
 %endif
 
 
-Name: hive
+Name: %{hive_pkg_name}
 Version: %{hive_version}
 Release: %{hive_release}
 Summary: Hive is a data warehouse infrastructure built on top of Hadoop
@@ -77,7 +82,7 @@ URL: http://hive.apache.org/
 Group: Development/Libraries
 Buildroot: %{_topdir}/INSTALL/%{name}-%{version}
 BuildArch: noarch
-Source0: apache-%{name}-%{hive_base_version}-src.tar.gz
+Source0: apache-%{hive_name}-%{hive_base_version}-src.tar.gz
 Source1: do-component-build
 Source2: install_hive.sh
 Source3: init.d.tmpl
@@ -95,8 +100,8 @@ Source16: hive-hcatalog-server.default
 Source17: hive-webhcat-server.default
 Source18: bigtop.bom
 #BIGTOP_PATCH_FILES
-Requires: hadoop-client, bigtop-utils >= 0.7, zookeeper, hive-jdbc = %{version}-%{release}
-Conflicts: hadoop-hive
+Requires: %{hadoop_pkg_name}-client, bigtop-utils >= 0.7, %{zookeeper_pkg_name}, %{name}-jdbc = %{version}-%{release}
+Conflicts: %{hadoop_pkg_name}-hive
 Obsoletes: %{name}-webinterface
 
 %description 
@@ -147,7 +152,7 @@ This optional package hosts a metadata server for Hive clients across a network
 %package hbase
 Summary: Provides integration between Apache HBase and Apache Hive
 Group: Development/Libraries
-Requires: hive = %{version}-%{release}, hbase
+Requires: %{name} = %{version}-%{release}, hbase
 
 %description hbase
 This optional package provides integration between Apache HBase and Apache Hive
@@ -155,7 +160,7 @@ This optional package provides integration between Apache HBase and Apache Hive
 %package jdbc
 Summary: Provides libraries necessary to connect to Apache Hive via JDBC
 Group: Development/Libraries
-Requires: hadoop-client
+Requires: %{hadoop_pkg_name}-client
 
 %description jdbc
 This package provides libraries necessary to connect to Apache Hive via JDBC
@@ -163,7 +168,7 @@ This package provides libraries necessary to connect to Apache Hive via JDBC
 %package hcatalog
 Summary: Apache Hcatalog is a data warehouse infrastructure built on top of Hadoop
 Group: Development/Libraries
-Requires: hadoop, hive, bigtop-utils >= 0.7
+Requires: %{hadoop_pkg_name}, %{name}, bigtop-utils >= 0.7
 
 %description hcatalog
 Apache HCatalog is a table and storage management service for data created using Apache Hadoop.
@@ -240,7 +245,7 @@ Requires: /lib/lsb/init-functions
 Init scripts for WebHcat server.
 
 %prep
-%setup -q -n apache-%{name}-%{hive_base_version}-src
+%setup -q -n apache-%{hive_name}-%{hive_base_version}-src
 
 #BIGTOP_PATCH_COMMANDS
 
@@ -277,10 +282,10 @@ cp $RPM_SOURCE_DIR/hive-site.xml .
 
 %__install -d -m 0755 $RPM_BUILD_ROOT/%{initd_dir}/
 %__install -d -m 0755 $RPM_BUILD_ROOT/%{etc_default}/
-%__install -m 0644 $RPM_SOURCE_DIR/hive-metastore.default $RPM_BUILD_ROOT/%{etc_default}/%{name}-metastore
-%__install -m 0644 $RPM_SOURCE_DIR/hive-server2.default $RPM_BUILD_ROOT/%{etc_default}/%{name}-server2
-%__install -m 0644 $RPM_SOURCE_DIR/hive-hcatalog-server.default $RPM_BUILD_ROOT/%{etc_default}/%{name}-hcatalog-server
-%__install -m 0644 $RPM_SOURCE_DIR/hive-webhcat-server.default $RPM_BUILD_ROOT/%{etc_default}/%{name}-webhcat-server
+%__install -m 0644 $RPM_SOURCE_DIR/hive-metastore.default $RPM_BUILD_ROOT/%{etc_default}/%{hive_name}-metastore
+%__install -m 0644 $RPM_SOURCE_DIR/hive-server2.default $RPM_BUILD_ROOT/%{etc_default}/%{hive_name}-server2
+%__install -m 0644 $RPM_SOURCE_DIR/hive-hcatalog-server.default $RPM_BUILD_ROOT/%{etc_default}/%{hive_name}-hcatalog-server
+%__install -m 0644 $RPM_SOURCE_DIR/hive-webhcat-server.default $RPM_BUILD_ROOT/%{etc_default}/%{hive_name}-webhcat-server
 
 %__install -d -m 0755 $RPM_BUILD_ROOT/%{np_var_log_hive}
 %__install -d -m 0755 $RPM_BUILD_ROOT/%{np_var_run_hive}
@@ -309,7 +314,7 @@ getent passwd hive >/dev/null || useradd -c "Hive" -s /sbin/nologin -g hive -r -
 %post
 
 # Install config alternatives
-%{alternatives_cmd} --install %{np_etc_hive}/conf %{name}-conf %{etc_hive}/conf.dist 30
+%{alternatives_cmd} --install %{np_etc_hive}/conf %{hive_name}-conf %{etc_hive}/conf.dist 30
 
 
 # Upgrade
@@ -323,7 +328,7 @@ fi
 
 %preun
 if [ "$1" = 0 ]; then
-  %{alternatives_cmd} --remove %{name}-conf %{etc_hive}/conf.dist || :
+  %{alternatives_cmd} --remove %{hive_name}-conf %{etc_hive}/conf.dist || :
 fi
 
 
@@ -401,19 +406,19 @@ fi
 
 %define service_macro() \
 %files %1 \
-%attr(0755,root,root)/%{initd_dir}/%{name}-%1 \
-%config(noreplace) %{etc_default}/%{name}-%1 \
+%attr(0755,root,root)/%{initd_dir}/%{hive_name}-%1 \
+%config(noreplace) %{etc_default}/%{hive_name}-%1 \
 %post %1 \
-chkconfig --add %{name}-%1 \
+chkconfig --add %{hive_name}-%1 \
 \
 %preun %1 \
 if [ "$1" = 0 ] ; then \
-        service %{name}-%1 stop > /dev/null \
-        chkconfig --del %{name}-%1 \
+        service %{hive_name}-%1 stop > /dev/null \
+        chkconfig --del %{hive_name}-%1 \
 fi \
 %postun %1 \
 if [ $1 -ge 1 ]; then \
-   service %{name}-%1 condrestart >/dev/null 2>&1 || : \
+   service %{hive_name}-%1 condrestart >/dev/null 2>&1 || : \
 fi
 %service_macro server2
 %service_macro metastore
diff --git a/bigtop-packages/src/rpm/kafka/SPECS/kafka.spec b/bigtop-packages/src/rpm/kafka/SPECS/kafka.spec
index a5442f8e..3c8f3b35 100644
--- a/bigtop-packages/src/rpm/kafka/SPECS/kafka.spec
+++ b/bigtop-packages/src/rpm/kafka/SPECS/kafka.spec
@@ -14,6 +14,8 @@
 # limitations under the License.
 
 %define kafka_name kafka
+%define kafka_pkg_name kafka%{pkg_name_suffix}
+%define zookeeper_pkg_name zookeeper%{pkg_name_suffix}
 
 %define etc_default %{parent_dir}/etc/default
 
@@ -59,14 +61,14 @@
 # disable repacking jars
 %define __os_install_post %{nil}
 
-Name: kafka
+Name: %{kafka_pkg_name}
 Version: %{kafka_version}
 Release: %{kafka_release}
 Summary: Apache Kafka is publish-subscribe messaging rethought as a distributed commit log.
 URL: http://kafka.apache.org/
 Group: Development/Libraries
 BuildArch: noarch
-Buildroot: %(mktemp -ud %{_tmppath}/%{kafka_name}-%{version}-%{release}-XXXXXX)
+Buildroot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
 License: ASL 2.0
 Source0: %{kafka_name}-%{kafka_base_version}.tar.gz
 Source1: do-component-build
@@ -75,7 +77,7 @@ Source3: kafka-server.svc
 Source4: init.d.tmpl
 Source6: kafka.default
 #BIGTOP_PATCH_FILES
-Requires: zookeeper
+Requires: %{zookeeper_pkg_name}
 Requires: bigtop-utils >= 0.7
 Requires(preun): /sbin/service
 
@@ -89,7 +91,7 @@ larger than the capability of any single machine and to allow clusters of co-ord
 %package server
 Summary: Server for kafka
 Group: System/Daemons
-Requires: kafka = %{version}-%{release}
+Requires: %{name} = %{version}-%{release}
 
 # CentOS 5 does not have any dist macro
 # So I will suppose anything that is not Mageia or a SUSE will be a RHEL/CentOS/Fedora
diff --git a/bigtop-packages/src/rpm/solr/SPECS/solr.spec b/bigtop-packages/src/rpm/solr/SPECS/solr.spec
index 0524f085..c889f8c5 100644
--- a/bigtop-packages/src/rpm/solr/SPECS/solr.spec
+++ b/bigtop-packages/src/rpm/solr/SPECS/solr.spec
@@ -14,6 +14,7 @@
 # limitations under the License.
 
 %define solr_name solr
+%define solr_pkg_name %{solr_name}%{pkg_name_suffix}
 
 %define etc_default %{parent_dir}/etc/default
 
@@ -30,7 +31,7 @@
 %define np_var_run_solr /var/run/%{solr_name}
 %define np_etc_solr /etc/%{solr_name}
 
-%define svc_solr %{name}-server
+%define svc_solr %{solr_name}-server
 %define tomcat_deployment_solr %{etc_solr}/tomcat-conf
 
 %if  %{?suse_version:1}0
@@ -50,7 +51,7 @@
 # disable repacking jars
 %define __os_install_post %{nil}
 
-Name: solr
+Name: %{solr_pkg_name}
 Version: %{solr_version}
 Release: %{solr_release}
 Summary: Apache Solr is the popular, blazing fast open source enterprise search platform
@@ -61,7 +62,7 @@ Buildroot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
 License: ASL 2.0
 Source0: solr-%{solr_base_version}-src.tgz
 Source1: do-component-build 
-Source2: install_%{name}.sh
+Source2: install_%{solr_name}.sh
 Source3: solr.default
 Source4: solr-server.init
 Source5: solrctl.sh
diff --git a/bigtop-packages/src/rpm/spark/SPECS/spark.spec b/bigtop-packages/src/rpm/spark/SPECS/spark.spec
index 36d767c1..3610a49f 100644
--- a/bigtop-packages/src/rpm/spark/SPECS/spark.spec
+++ b/bigtop-packages/src/rpm/spark/SPECS/spark.spec
@@ -14,6 +14,8 @@
 # limitations under the License.
 
 %define spark_name spark
+%define spark_pkg_name spark%{pkg_name_suffix}
+%define hadoop_pkg_name hadoop%{pkg_name_suffix}
 
 %define etc_default %{parent_dir}/etc/default
 
@@ -43,7 +45,7 @@
 # disable repacking jars
 %define __os_install_post %{nil}
 
-Name: spark-core
+Name: %{spark_pkg_name}
 Version: %{spark_version}
 Release: %{spark_release}
 Summary: Lightning-Fast Cluster Computing
@@ -62,7 +64,7 @@ Source7: spark-history-server.svc
 Source8: spark-thriftserver.svc
 Source9: bigtop.bom
 #BIGTOP_PATCH_FILES
-Requires: bigtop-utils >= 0.7, hadoop-client, hadoop-yarn
+Requires: bigtop-utils >= 0.7, %{hadoop_pkg_name}-client, %{hadoop_pkg_name}-yarn
 Requires(preun): /sbin/service
 
 %global initd_dir %{_sysconfdir}/init.d
@@ -87,78 +89,86 @@ written in Scala, a high-level language for the JVM, and exposes a clean
 language-integrated syntax that makes it easy to write parallel jobs.
 Spark runs on top of the Apache Mesos cluster manager.
 
-%package -n spark-master
+%package -n %{spark_pkg_name}-core
+Summary: Spark core
+Group: Development/Libraries
+Requires: %{spark_pkg_name} = %{version}-%{release}
+
+%description -n %{spark_pkg_name}-core
+Spark core
+
+%package -n %{spark_pkg_name}-master
 Summary: Server for Spark master
 Group: Development/Libraries
-Requires: spark-core = %{version}-%{release}
+Requires: %{spark_pkg_name}-core = %{version}-%{release}
 
-%description -n spark-master
+%description -n %{spark_pkg_name}-master
 Server for Spark master
 
-%package -n spark-worker
+%package -n %{spark_pkg_name}-worker
 Summary: Server for Spark worker
 Group: Development/Libraries
-Requires: spark-core = %{version}-%{release}
+Requires: %{spark_pkg_name}-core = %{version}-%{release}
 
-%description -n spark-worker
+%description -n %{spark_pkg_name}-worker
 Server for Spark worker
 
-%package -n spark-python
+%package -n %{spark_pkg_name}-python
 Summary: Python client for Spark
 Group: Development/Libraries
 %if 0%{?rhel} >= 8
-Requires: spark-core = %{version}-%{release}, python2
+Requires: %{spark_pkg_name}-core = %{version}-%{release}, python2
 %else
-Requires: spark-core = %{version}-%{release}, python
+Requires: %{spark_pkg_name}-core = %{version}-%{release}, python
 %endif
 
-%description -n spark-python
+%description -n %{spark_pkg_name}-python
 Includes PySpark, an interactive Python shell for Spark, and related libraries
 
-%package -n spark-history-server
+%package -n %{spark_pkg_name}-history-server
 Summary: History server for Apache Spark
 Group: Development/Libraries
-Requires: spark-core = %{version}-%{release}
+Requires: %{spark_pkg_name}-core = %{version}-%{release}
 
-%description -n spark-history-server
+%description -n %{spark_pkg_name}-history-server
 History server for Apache Spark
 
-%package -n spark-thriftserver
+%package -n %{spark_pkg_name}-thriftserver
 Summary: Thrift server for Spark SQL
 Group: Development/Libraries
-Requires: spark-core = %{version}-%{release}
+Requires: %{spark_pkg_name}-core = %{version}-%{release}
 
-%description -n spark-thriftserver
+%description -n %{spark_pkg_name}-thriftserver
 Thrift server for Spark SQL
 
-%package -n spark-datanucleus
+%package -n %{spark_pkg_name}-datanucleus
 Summary: DataNucleus libraries for Apache Spark
 Group: Development/Libraries
 
-%description -n spark-datanucleus
+%description -n %{spark_pkg_name}-datanucleus
 DataNucleus libraries used by Spark SQL with Hive Support
 
-%package -n spark-external
+%package -n %{spark_pkg_name}-external
 Summary: External libraries for Apache Spark
 Group: Development/Libraries
 
-%description -n spark-external
+%description -n %{spark_pkg_name}-external
 External libraries built for Apache Spark but not included in the main
 distribution (e.g., external streaming libraries)
 
-%package -n spark-yarn-shuffle
+%package -n %{spark_pkg_name}-yarn-shuffle
 Summary: Spark YARN Shuffle Service
 Group: Development/Libraries
 
-%description -n spark-yarn-shuffle
+%description -n %{spark_pkg_name}-yarn-shuffle
 Spark YARN Shuffle Service
 
-%package -n spark-sparkr
+%package -n %{spark_pkg_name}-sparkr
 Summary: R package for Apache Spark
 Group: Development/Libraries
-Requires: spark-core = %{version}-%{release}, R
+Requires: %{spark_pkg_name}-core = %{version}-%{release}, R
 
-%description -n spark-sparkr
+%description -n %{spark_pkg_name}-sparkr
 SparkR is an R package that provides a light-weight frontend to use Apache Spark from R.
 
 %prep
@@ -236,7 +246,6 @@ done
 %doc %{doc_spark}
 %{usr_lib_spark}/LICENSE
 %{usr_lib_spark}/NOTICE
-%{usr_lib_spark}/README.md
 %{usr_lib_spark}/RELEASE
 %{usr_lib_spark}/bin
 %exclude %{usr_lib_spark}/bin/pyspark
@@ -259,48 +268,52 @@ done
 %exclude %{usr_lib_spark}/bin/sparkR
 %exclude %{bin_dir}/sparkR
 
-%files -n spark-python
+%files -n %{spark_pkg_name}-core
+%defattr(-,root,root,755)
+%{usr_lib_spark}/README.md
+
+%files -n %{spark_pkg_name}-python
 %defattr(-,root,root,755)
 %attr(0755,root,root) %{bin_dir}/pyspark
 %attr(0755,root,root) %{usr_lib_spark}/bin/pyspark
 %{usr_lib_spark}/python
 
-%files -n spark-datanucleus
+%files -n %{spark_pkg_name}-datanucleus
 %defattr(-,root,root,755)
 %{usr_lib_spark}/jars/datanucleus-*.jar
 %{usr_lib_spark}/yarn/lib/datanucleus-*.jar
 
-%files -n spark-external
+%files -n %{spark_pkg_name}-external
 %defattr(-,root,root,755)
 %{usr_lib_spark}/external
 
-%files -n spark-yarn-shuffle
+%files -n %{spark_pkg_name}-yarn-shuffle
 %defattr(-,root,root,755)
 %{usr_lib_spark}/yarn/spark-*-yarn-shuffle.jar
 %{usr_lib_spark}/yarn/lib/spark-yarn-shuffle.jar
 
-%files -n spark-sparkr
+%files -n %{spark_pkg_name}-sparkr
 %defattr(-,root,root,755)
 %{usr_lib_spark}/R
 %{usr_lib_spark}/bin/sparkR
 %{bin_dir}/sparkR
 
 %define service_macro() \
-%files -n %1 \
-%attr(0755,root,root)/%{initd_dir}/%1 \
-%post -n %1 \
-chkconfig --add %1 \
+%files -n %{spark_pkg_name}-%1 \
+%attr(0755,root,root)/%{initd_dir}/%{spark_name}-%1 \
+%post -n %{spark_pkg_name}-%1 \
+chkconfig --add %{spark_name}-%1 \
 \
-%preun -n %1 \
+%preun -n %{spark_pkg_name}-%1 \
 if [ $1 = 0 ] ; then \
-        service %1 stop > /dev/null 2>&1 \
-        chkconfig --del %1 \
+        service %{spark_name}-%1 stop > /dev/null 2>&1 \
+        chkconfig --del %{spark_name}-%1 \
 fi \
-%postun -n %1 \
+%postun -n %{spark_pkg_name}-%1 \
 if [ $1 -ge 1 ]; then \
-        service %1 condrestart >/dev/null 2>&1 \
+        service %{spark_name}-%1 condrestart >/dev/null 2>&1 \
 fi
-%service_macro spark-master
-%service_macro spark-worker
-%service_macro spark-history-server
-%service_macro spark-thriftserver
+%service_macro master
+%service_macro worker
+%service_macro history-server
+%service_macro thriftserver
diff --git a/bigtop-packages/src/rpm/tez/SPECS/tez.spec b/bigtop-packages/src/rpm/tez/SPECS/tez.spec
index 0a87c0d5..c3f01650 100644
--- a/bigtop-packages/src/rpm/tez/SPECS/tez.spec
+++ b/bigtop-packages/src/rpm/tez/SPECS/tez.spec
@@ -13,8 +13,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-%define usr_lib_tez %{parent_dir}/usr/lib/%{name}
-%define etc_tez %{parent_dir}/etc/%{name}
+%define tez_name tez
+%define tez_pkg_name tez%{pkg_name_suffix}
+%define hadoop_pkg_name hadoop%{pkg_name_suffix}
+
+%define usr_lib_tez %{parent_dir}/usr/lib/%{tez_name}
+%define etc_tez %{parent_dir}/etc/%{tez_name}
 
 %define usr_lib_hadoop %{parent_dir}/usr/lib/hadoop
 
@@ -23,9 +27,9 @@
 %define doc_dir %{parent_dir}/%{_docdir}
 
 # No prefix directory
-%define np_var_log_tez /var/log/%{name}
-%define np_var_run_tez /var/run/%{name}
-%define np_etc_tez /etc/%{name}
+%define np_var_log_tez /var/log/%{tez_name}
+%define np_var_run_tez /var/run/%{tez_name}
+%define np_etc_tez /etc/%{tez_name}
 
 %if %{!?suse_version:1}0 && %{!?mgaversion:1}0
 
@@ -48,7 +52,7 @@
 %define suse_check \# Define an empty suse_check for compatibility with older sles
 %endif
 
-%define doc_tez %{doc_dir}/%{name}
+%define doc_tez %{doc_dir}/%{tez_name}
 %define alternatives_cmd update-alternatives
 %define __os_install_post \
     %{suse_check} ; \
@@ -57,12 +61,12 @@
 
 %else
 
-%define doc_tez %{doc_dir}/%{name}-%{tez_version}
+%define doc_tez %{doc_dir}/%{tez_name}-%{tez_version}
 %define alternatives_cmd alternatives
 
 %endif
 
-Name: tez
+Name: %{tez_pkg_name}
 Version: %{tez_version}
 Release: %{tez_release}
 Summary:Apache Tez is the Hadoop enhanced Map/Reduce module.
@@ -70,7 +74,7 @@ URL: http://tez.apache.org
 Group: Development/Libraries
 Buildroot: %{_topdir}/INSTALL/%{name}-%{version}
 License: Apache License v2.0
-Source0: apache-%{name}-%{tez_base_version}-src.tar.gz
+Source0: apache-%{tez_name}-%{tez_base_version}-src.tar.gz
 Source1: do-component-build
 Source2: install_tez.sh
 Source3: tez.1
@@ -79,7 +83,7 @@ Source5: bigtop.bom
 Source6: init.d.tmpl
 #BIGTOP_PATCH_FILES
 BuildArch: noarch
-Requires: hadoop hadoop-hdfs hadoop-yarn hadoop-mapreduce
+Requires: %{hadoop_pkg_name} %{hadoop_pkg_name}-hdfs %{hadoop_pkg_name}-yarn %{hadoop_pkg_name}-mapreduce
 
 %if  0%{?mgaversion}
 Requires: bsh-utils
@@ -94,7 +98,7 @@ which allows for a complex directed-acyclic-graph of tasks for
 processing data. It is currently built atop Apache Hadoop YARN
 
 %prep
-%setup -q -n apache-%{name}-%{tez_base_version}-src
+%setup -q -n apache-%{tez_name}-%{tez_base_version}-src
 
 #BIGTOP_PATCH_COMMANDS
 
@@ -124,11 +128,11 @@ sh %{SOURCE2} \
 
 # Manage configuration symlink
 %post
-%{alternatives_cmd} --install %{np_etc_tez}/conf %{name}-conf %{etc_tez}/conf.dist 30
+%{alternatives_cmd} --install %{np_etc_tez}/conf %{tez_name}-conf %{etc_tez}/conf.dist 30
 
 %preun
 if [ "$1" = 0 ]; then
-        %{alternatives_cmd} --remove %{name}-conf %{etc_tez}/conf.dist || :
+        %{alternatives_cmd} --remove %{tez_name}-conf %{etc_tez}/conf.dist || :
 fi
 
 #######################
diff --git a/bigtop-packages/src/rpm/zeppelin/SPECS/zeppelin.spec b/bigtop-packages/src/rpm/zeppelin/SPECS/zeppelin.spec
index c4746022..50f5148a 100644
--- a/bigtop-packages/src/rpm/zeppelin/SPECS/zeppelin.spec
+++ b/bigtop-packages/src/rpm/zeppelin/SPECS/zeppelin.spec
@@ -13,26 +13,31 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+%define zeppelin_name zeppelin
+%define zeppelin_pkg_name zeppelin%{pkg_name_suffix}
+%define hadoop_pkg_name hadoop%{pkg_name_suffix}
+%define spark_pkg_name spark%{pkg_name_suffix}
+
 %define etc_default %{parent_dir}/etc/default
 
-%define usr_lib_zeppelin %{parent_dir}/usr/lib/%{name}
-%define var_lib_zeppelin %{parent_dir}/var/lib/%{name}
-%define etc_zeppelin_conf_dist %{parent_dir}/etc/%{name}/conf.dist
+%define usr_lib_zeppelin %{parent_dir}/usr/lib/%{zeppelin_name}
+%define var_lib_zeppelin %{parent_dir}/var/lib/%{zeppelin_name}
+%define etc_zeppelin_conf_dist %{parent_dir}/etc/%{zeppelin_name}/conf.dist
 
 %define man_dir %{parent_dir}/%{_mandir}
 %define doc_dir %{parent_dir}/%{_docdir}
 %define lib_dir %{parent_dir}/%{_libdir}
 
 # No prefix directory
-%define np_var_log_zeppelin /var/log/%{name}
-%define np_var_run_zeppelin /var/run/%{name}
-%define np_etc_zeppelin /etc/%{name}
+%define np_var_log_zeppelin /var/log/%{zeppelin_name}
+%define np_var_run_zeppelin /var/run/%{zeppelin_name}
+%define np_etc_zeppelin /etc/%{zeppelin_name}
 
 %if  %{?suse_version:1}0
-%define doc_zeppelin %{doc_dir}/%{name}
+%define doc_zeppelin %{doc_dir}/%{zeppelin_name}
 %define alternatives_cmd update-alternatives
 %else
-%define doc_zeppelin %{doc_dir}/%{name}-%{zeppelin_version}
+%define doc_zeppelin %{doc_dir}/%{zeppelin_name}-%{zeppelin_version}
 %define alternatives_cmd alternatives
 %endif
 
@@ -40,7 +45,7 @@
 %define __os_install_post %{nil}
 %define __jar_repack ${nil}
 
-Name: zeppelin
+Name: %{zeppelin_pkg_name}
 Version: %{zeppelin_version}
 Release: %{zeppelin_release}
 Summary: Web-based notebook for Apache Spark
@@ -48,7 +53,7 @@ URL: http://zeppelin.apache.org/
 Group: Applications/Engineering
 Buildroot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
 License: ASL 2.0
-Source0: %{name}-%{zeppelin_base_version}.tar.gz
+Source0: %{zeppelin_name}-%{zeppelin_base_version}.tar.gz
 Source1: bigtop.bom
 Source2: do-component-build
 Source3: init.d.tmpl
@@ -56,7 +61,7 @@ Source4: install_zeppelin.sh
 Source5: zeppelin-env.sh
 Source6: zeppelin.svc
 #BIGTOP_PATCH_FILES
-Requires: bigtop-utils >= 0.7, hadoop-client, spark-core >= 1.5, spark-python >= 1.5
+Requires: bigtop-utils >= 0.7, %{hadoop_pkg_name}-client, %{spark_pkg_name}-core >= 1.5, %{spark_pkg_name}-python >= 1.5
 Requires(preun): /sbin/service
 AutoReq: no
 
@@ -81,7 +86,7 @@ Zeppelin is a web-based notebook that enables interactive data analytics with Ap
 You can make beautiful data-driven, interactive and collaborative documents with SQL, Scala and more.
 
 %prep
-%setup -n %{name}-%{zeppelin_base_version}
+%setup -n %{zeppelin_name}-%{zeppelin_base_version}
 
 #BIGTOP_PATCH_COMMANDS
 
@@ -105,27 +110,27 @@ bash $RPM_SOURCE_DIR/install_zeppelin.sh \
   --conf-dist-dir=%{etc_zeppelin_conf_dist}
 
 # Install init script
-initd_script=$RPM_BUILD_ROOT/%{initd_dir}/%{name}
-bash %{SOURCE3} $RPM_SOURCE_DIR/%{name}.svc rpm $initd_script
+initd_script=$RPM_BUILD_ROOT/%{initd_dir}/%{zeppelin_name}
+bash %{SOURCE3} $RPM_SOURCE_DIR/%{zeppelin_name}.svc rpm $initd_script
 
 %pre
 getent group zeppelin >/dev/null || groupadd -r zeppelin
 getent passwd zeppelin >/dev/null || useradd -c "Zeppelin" -s /sbin/nologin -g zeppelin -r -d %{var_lib_zeppelin} zeppelin 2> /dev/null || :
 
 %post
-%{alternatives_cmd} --install %{np_etc_zeppelin}/conf %{name}-conf %{etc_zeppelin_conf_dist} 30
-chkconfig --add %{name}
+%{alternatives_cmd} --install %{np_etc_zeppelin}/conf %{zeppelin_name}-conf %{etc_zeppelin_conf_dist} 30
+chkconfig --add %{zeppelin_name}
 
 %preun
 if [ "$1" = 0 ]; then
-  %{alternatives_cmd} --remove %{name}-conf %{etc_zeppelin_conf_dist} || :
+  %{alternatives_cmd} --remove %{zeppelin_name}-conf %{etc_zeppelin_conf_dist} || :
 fi
 
-/sbin/service %{name} status > /dev/null 2>&1
+/sbin/service %{zeppelin_name} status > /dev/null 2>&1
 if [ $? -eq 0 ]; then
-  service %{name} stop > /dev/null 2>&1
+  service %{zeppelin_name} stop > /dev/null 2>&1
 fi
-chkconfig --del %{name}
+chkconfig --del %{zeppelin_name}
 
 #######################
 #### FILES SECTION ####
@@ -144,4 +149,4 @@ chkconfig --del %{name}
 %attr(0755,zeppelin,zeppelin) %{var_lib_zeppelin}
 %attr(0755,zeppelin,zeppelin) %{np_var_run_zeppelin}
 %attr(0755,zeppelin,zeppelin) %{np_var_log_zeppelin}
-%attr(0755,root,root)/%{initd_dir}/%{name}
+%attr(0755,root,root)/%{initd_dir}/%{zeppelin_name}
diff --git a/bigtop-packages/src/rpm/zookeeper/SPECS/zookeeper.spec b/bigtop-packages/src/rpm/zookeeper/SPECS/zookeeper.spec
index 90a82a76..2865a2d8 100644
--- a/bigtop-packages/src/rpm/zookeeper/SPECS/zookeeper.spec
+++ b/bigtop-packages/src/rpm/zookeeper/SPECS/zookeeper.spec
@@ -14,9 +14,11 @@
 # limitations under the License.
 
 %define etc_default %{parent_dir}/etc/default
+%define zookeeper_name zookeeper
+%define zookeeper_pkg_name zookeeper%{pkg_name_suffix}
 
-%define usr_lib_zookeeper %{parent_dir}/usr/lib/%{name}
-%define var_lib_zookeeper %{parent_dir}/var/lib/%{name}
+%define usr_lib_zookeeper %{parent_dir}/usr/lib/%{zookeeper_name}
+%define var_lib_zookeeper %{parent_dir}/var/lib/%{zookeeper_name}
 %define etc_zookeeper_conf_dist %{parent_dir}/etc/zookeeper/conf.dist
 
 %define bin_dir %{parent_dir}/%{_bindir}
@@ -26,12 +28,12 @@
 %define lib_dir %{parent_dir}/%{_libdir}
 
 # No prefix directory
-%define np_var_log_zookeeper /var/log/%{name}
-%define np_var_run_zookeeper /var/run/%{name}
-%define np_etc_zookeeper /etc/%{name}
+%define np_var_log_zookeeper /var/log/%{zookeeper_name}
+%define np_var_run_zookeeper /var/run/%{zookeeper_name}
+%define np_etc_zookeeper /etc/%{zookeeper_name}
 
-%define svc_zookeeper %{name}-server
-%define svc_zookeeper_rest %{name}-rest
+%define svc_zookeeper %{zookeeper_name}-server
+%define svc_zookeeper_rest %{zookeeper_name}-rest
 
 %if  %{?suse_version:1}0
 
@@ -51,7 +53,7 @@
     %{nil}
 
 
-%define doc_zookeeper %{doc_dir}/%{name}
+%define doc_zookeeper %{doc_dir}/%{zookeeper_name}
 %define alternatives_cmd update-alternatives
 %define alternatives_dep update-alternatives
 %define chkconfig_dep    aaa_base
@@ -60,7 +62,7 @@
 
 %else
 
-%define doc_zookeeper %{doc_dir}/%{name}-%{zookeeper_version}
+%define doc_zookeeper %{doc_dir}/%{zookeeper_name}-%{zookeeper_version}
 %define alternatives_cmd alternatives
 %define alternatives_dep chkconfig 
 %define chkconfig_dep    chkconfig
@@ -71,7 +73,7 @@
 
 
 
-Name: zookeeper
+Name: %{zookeeper_pkg_name}
 Version: %{zookeeper_version}
 Release: %{zookeeper_release}
 Summary: A high-performance coordination service for distributed applications.
@@ -79,7 +81,7 @@ URL: http://zookeeper.apache.org/
 Group: Development/Libraries
 Buildroot: %{_topdir}/INSTALL/%{name}-%{version}
 License: ASL 2.0
-Source0: apache-%{name}-%{zookeeper_base_version}.tar.gz
+Source0: apache-%{zookeeper_name}-%{zookeeper_base_version}.tar.gz
 Source1: do-component-build
 Source2: install_zookeeper.sh
 Source3: zookeeper-server.sh
@@ -154,7 +156,7 @@ Provides native libraries and development headers for C / C++ ZooKeeper clients.
 This package starts the zookeeper REST server on startup
 
 %prep
-%setup -n apache-%{name}-%{zookeeper_base_version}
+%setup -n apache-%{zookeeper_name}-%{zookeeper_base_version}
 
 #BIGTOP_PATCH_COMMANDS
 
@@ -200,12 +202,12 @@ getent passwd zookeeper > /dev/null || useradd -c "ZooKeeper" -s /sbin/nologin -
 
 # Manage configuration symlink
 %post
-%{alternatives_cmd} --install %{np_etc_zookeeper}/conf %{name}-conf %{etc_zookeeper_conf_dist} 30
+%{alternatives_cmd} --install %{np_etc_zookeeper}/conf %{zookeeper_name}-conf %{etc_zookeeper_conf_dist} 30
 %__install -d -o zookeeper -g zookeeper -m 0755 %{var_lib_zookeeper}
 
 %preun
 if [ "$1" = 0 ]; then
-        %{alternatives_cmd} --remove %{name}-conf %{etc_zookeeper_conf_dist} || :
+        %{alternatives_cmd} --remove %{zookeeper_name}-conf %{etc_zookeeper_conf_dist} || :
 fi
 
 %post server
@@ -242,7 +244,7 @@ fi
 %files
 %defattr(-,root,root)
 %config(noreplace) %{etc_zookeeper_conf_dist}
-%config(noreplace) %{etc_default}/%{name}
+%config(noreplace) %{etc_default}/%{zookeeper_name}
 %{np_etc_zookeeper}
 %{usr_lib_zookeeper}
 %{bin_dir}/zookeeper-server
diff --git a/bigtop.bom b/bigtop.bom
index 3101c025..b7c81e59 100644
--- a/bigtop.bom
+++ b/bigtop.bom
@@ -90,7 +90,8 @@
 
 bigtop {
 /** Base Configuration of the mirror and archives */
-  version = "3.2.0-SNAPSHOT"
+  base_version = "3.2.0"
+  version = base_version + "-SNAPSHOT"
   stack {
     'jdk' { version = "1." + ( System.getenv('BIGTOP_JDK') ?: "8" ); version_base = version }
     'scala' { version = '2.12.13'; version_base = version }
@@ -127,6 +128,7 @@ bigtop {
     'zookeeper' {
       name    = 'zookeeper'
       pkg     = name
+      rpm_pkg_suffix = "_" + bigtop.base_version.replace(".", "_")
       version {
         base  = '3.5.9'
         pkg   = base
@@ -144,6 +146,7 @@ bigtop {
     }
     'hadoop' {
       name    = 'hadoop'
+      rpm_pkg_suffix = "_" + bigtop.base_version.replace(".", "_")
       relNotes = 'Apache Hadoop'
       version { base = '3.3.4'; pkg = base; release = 1 }
       tarball { destination = "${name}-${version.base}.tar.gz"
@@ -154,6 +157,7 @@ bigtop {
     }
     'hbase' {
       name    = 'hbase'
+      rpm_pkg_suffix = "_" + bigtop.base_version.replace(".", "_")
       relNotes = 'Apache HBase'
       version { base = '2.4.13'; pkg = base; release = 1 }
       tarball { destination = "${name}-${version.base}.tar.gz"
@@ -165,6 +169,7 @@ bigtop {
 
     'hive' {
       name    = 'hive'
+      rpm_pkg_suffix = "_" + bigtop.base_version.replace(".", "_")
       relNotes = 'Apache Hive'
       version { base = '3.1.3'; pkg = base; release = 1 }
       tarball { destination = "apache-${name}-${version.base}-src.tar.gz"
@@ -175,6 +180,7 @@ bigtop {
     }
     'tez' {
       name    = 'tez'
+      rpm_pkg_suffix = "_" + bigtop.base_version.replace(".", "_")
       relNotes = 'Apache TEZ'
       version { base = '0.10.1'; pkg = base; release = 1 }
       tarball { destination = "apache-${name}-${version.base}-src.tar.gz"
@@ -195,6 +201,7 @@ bigtop {
     }
     'solr' {
       name    = 'solr'
+      rpm_pkg_suffix = "_" + bigtop.base_version.replace(".", "_")
       relNotes = 'Apache Solr'
       version { base = '8.11.2'; pkg = base; release = 1 }
       tarball { destination = "$name-${version.base}-src.tgz"
@@ -205,7 +212,8 @@ bigtop {
     }
     'spark' {
       name    = 'spark'
-      pkg     = 'spark-core'
+      pkg     = 'spark'
+      rpm_pkg_suffix = "_" + bigtop.base_version.replace(".", "_")
       relNotes = 'Apache Spark'
       /*
        * Remember to fetch the new LICENSE-binary and licenses-binary files
@@ -222,6 +230,7 @@ bigtop {
 
     'flink' {
       name    = 'flink'
+      rpm_pkg_suffix = "_" + bigtop.base_version.replace(".", "_")
       relNotes = 'Apache Flink'
       version { base = '1.15.3'; pkg = base; release = 1 }
       tarball { destination = "$name-${version.base}.tar.gz"
@@ -286,6 +295,7 @@ bigtop {
     }
     'kafka' {
       name    = 'kafka'
+      rpm_pkg_suffix = "_" + bigtop.base_version.replace(".", "_")
       relNotes = 'Apache Kafka'
       version { base = '2.8.1'; pkg = base; release = 2 }
       tarball { destination = "$name-${version.base}.tar.gz"
@@ -305,6 +315,7 @@ bigtop {
     }
     'zeppelin' {
       name    = 'zeppelin'
+      rpm_pkg_suffix = "_" + bigtop.base_version.replace(".", "_")
       relNotes = 'Apache Zeppelin'
       version { base = '0.10.1'; pkg = base; release = 1 }
       tarball { source      = "$name-${version.base}.tgz"
diff --git a/packages.gradle b/packages.gradle
index b3f4b190..95012af9 100644
--- a/packages.gradle
+++ b/packages.gradle
@@ -472,6 +472,16 @@ def genTasks = { target ->
       println "\tNothing to do. Exiting..."
       return
     }
+    def final BIGTOP_BASE_VERSION = "${config.bigtop.base_version}"
+
+    def final PKG_NAME_SUFFIX = config.bigtop.components[target].rpm_pkg_suffix
+    def RPM_PKG_NAME_SUFFIX = PKG_NAME_SUFFIX
+    def GRADLE_PKG_NAME_SUFFIX = PKG_NAME_SUFFIX
+    if (!project.hasProperty("pkgSuffix") || !PKG_NAME_SUFFIX) {
+      RPM_PKG_NAME_SUFFIX = "%{nil}"
+      GRADLE_PKG_NAME_SUFFIX = ""
+    }
+
     def final BIGTOP_BUILD_STAMP = System.getenv('BIGTOP_BUILD_STAMP') ?:
             config.bigtop.components[target].version.release
     def final PKG_BUILD_DIR = config.bigtop.components[target].builddir
@@ -482,7 +492,7 @@ def genTasks = { target ->
     def final BASE_VERSION = config.bigtop.components[target].version.base
     def final HADOOP_VERSION = config.bigtop.components["hadoop"].version.pkg
     def RELEASE_DIST = "rpmbuild --eval '%{?dist}' 2>/dev/null".execute().text.trim().replaceAll("'",'')
-    def SRCRPM="$PKG_OUTPUT_DIR/$PKG_NAME-${PKG_VERSION}-$BIGTOP_BUILD_STAMP${RELEASE_DIST}.src.rpm"
+    def SRCRPM = "$PKG_OUTPUT_DIR/${PKG_NAME}${GRADLE_PKG_NAME_SUFFIX}-${PKG_VERSION}-$BIGTOP_BUILD_STAMP${RELEASE_DIST}.src.rpm"
 
     def final DO_MAVEN_DEPLOY = project.findProperty('doMavenDeploy') == 'true'
     def final MAVEN_DEPLOY_SOURCE = project.findProperty('mavenDeploySource') == 'true'
@@ -490,11 +500,7 @@ def genTasks = { target ->
     def final MAVEN_REPO_URI = project.hasProperty('mavenRepoUri') ? project.property('mavenRepoUri') : null
 
     def final PARENT_DIR = project.hasProperty("parentDir") ? project.property('parentDir') : "%{nil}"
-    def final mpackVersionRegex = /^(?<version>[0-9]+\.[0-9]+\.[0-9]+)-.*/
     def FULL_PARENT_DIR = "${PARENT_DIR}"
-    def matcher = "${config.bigtop.version}" =~ mpackVersionRegex
-    matcher.matches()
-    def BIGTOP_BASE_VERSION = matcher.group("version")
     if (PARENT_DIR != "%{nil}") {
       FULL_PARENT_DIR = "${PARENT_DIR}/${BIGTOP_BASE_VERSION}"
     }
@@ -509,7 +515,8 @@ def genTasks = { target ->
         '--define', "do_maven_deploy ${DO_MAVEN_DEPLOY}",
         '--define', "maven_deploy_source ${MAVEN_DEPLOY_SOURCE}",
         "--define", "parent_dir ${FULL_PARENT_DIR}",
-        "--define", "bigtop_base_version ${BIGTOP_BASE_VERSION}"
+        "--define", "bigtop_base_version ${BIGTOP_BASE_VERSION}",
+        "--define", "pkg_name_suffix ${RPM_PKG_NAME_SUFFIX}",
     ]
 
    if (MAVEN_REPO_URI != null) {
@@ -547,6 +554,15 @@ def genTasks = { target ->
       println "\tNothing to do. Exiting..."
       return
     }
+    def final BIGTOP_BASE_VERSION = "${config.bigtop.base_version}"
+
+    def final PKG_NAME_SUFFIX = config.bigtop.components[target].rpm_pkg_suffix
+    def RPM_PKG_NAME_SUFFIX = PKG_NAME_SUFFIX
+    def GRADLE_PKG_NAME_SUFFIX = PKG_NAME_SUFFIX
+    if (!project.hasProperty("pkgSuffix") || !PKG_NAME_SUFFIX) {
+      RPM_PKG_NAME_SUFFIX = "%{nil}"
+      GRADLE_PKG_NAME_SUFFIX = ""
+    }
     def final BIGTOP_BUILD_STAMP = System.getenv('BIGTOP_BUILD_STAMP') ?:
             config.bigtop.components[target].version.release
     def final NAME = config.bigtop.components[target].name
@@ -631,6 +647,7 @@ def genTasks = { target ->
         '--define', "${PKG_NAME_FOR_PKG}_release ${BIGTOP_BUILD_STAMP}%{?dist}",
         '-bs', '--nodeps', "--buildroot=${PKG_BUILD_DIR}/rpm/INSTALL",
         specFileName,
+        '--define', "pkg_name_suffix ${RPM_PKG_NAME_SUFFIX}",
     ]
     exec {
       workingDir BASE_DIR
@@ -640,7 +657,7 @@ def genTasks = { target ->
     mkdir(PKG_OUTPUT_DIR)
     def RELEASE_DIST = "rpmbuild --eval '%{?dist}' 2>/dev/null".execute().text.trim().replaceAll("'",'')
     copy {
-      from "$PKG_BUILD_DIR/rpm/SRPMS/${PKG_NAME}-${PKG_VERSION}-${BIGTOP_BUILD_STAMP}${RELEASE_DIST}.src.rpm"
+      from "$PKG_BUILD_DIR/rpm/SRPMS/${PKG_NAME}${GRADLE_PKG_NAME_SUFFIX}-${PKG_VERSION}-${BIGTOP_BUILD_STAMP}${RELEASE_DIST}.src.rpm"
       into PKG_OUTPUT_DIR
     }
     touchTargetFile(config.bigtop.components[target].targetsrpm)