You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by iw...@apache.org on 2020/12/18 13:12:25 UTC
[bigtop] branch master updated: BIGTOP-3280. Bump Hadoop to 3.2.1.
(#704)
This is an automated email from the ASF dual-hosted git repository.
iwasakims pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/bigtop.git
The following commit(s) were added to refs/heads/master by this push:
new 26a92e4 BIGTOP-3280. Bump Hadoop to 3.2.1. (#704)
26a92e4 is described below
commit 26a92e4340fee6ab2f6aa6a5925faf33d41ec181
Author: Masatake Iwasaki <iw...@apache.org>
AuthorDate: Fri Dec 18 22:12:14 2020 +0900
BIGTOP-3280. Bump Hadoop to 3.2.1. (#704)
---
.../src/common/hadoop/do-component-build | 11 +-
.../src/common/hadoop/install_hadoop.sh | 72 +-
.../hadoop/patch0-HADOOP-16739-branch-3.2.diff | 92 +++
.../common/hadoop/patch0-revert-HADOOP-16598.diff | 796 ---------------------
.../src/common/hadoop/patch1-HADOOP-15939.diff | 13 +
bigtop-packages/src/deb/hadoop/control | 4 +-
bigtop-packages/src/deb/hadoop/hadoop-httpfs.dirs | 1 -
.../src/deb/hadoop/hadoop-httpfs.install | 2 -
.../src/deb/hadoop/hadoop-httpfs.postinst | 3 -
bigtop-packages/src/deb/hadoop/hadoop-httpfs.prerm | 2 -
bigtop-packages/src/deb/hadoop/hadoop-kms.dirs | 1 -
bigtop-packages/src/deb/hadoop/hadoop-kms.install | 2 -
bigtop-packages/src/deb/hadoop/hadoop-kms.postinst | 3 -
bigtop-packages/src/deb/hadoop/hadoop-kms.prerm | 2 -
.../src/deb/hadoop/hadoop-mapreduce.install | 1 -
bigtop-packages/src/deb/hadoop/hadoop.install | 5 +-
bigtop-packages/src/deb/hadoop/rules | 2 -
bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec | 39 +-
bigtop.bom | 2 +-
bigtop_toolchain/manifests/protobuf.pp | 2 +-
20 files changed, 128 insertions(+), 927 deletions(-)
diff --git a/bigtop-packages/src/common/hadoop/do-component-build b/bigtop-packages/src/common/hadoop/do-component-build
index adc02cc..da0ba13 100644
--- a/bigtop-packages/src/common/hadoop/do-component-build
+++ b/bigtop-packages/src/common/hadoop/do-component-build
@@ -116,13 +116,6 @@ fi
mkdir build
mkdir build/src
-# For build environments that don't support downloads from Internet, support a local source.
-if [ ! -z ${LOCAL_DOWNLOADS_DIR-} ] && [ -f $LOCAL_DOWNLOADS_DIR/tomcat/apache-tomcat-$BIGTOP_TOMCAT_VERSION.tar.gz ]; then
- BIGTOP_TOMCAT_DOWNLOAD_URL=file://$LOCAL_DOWNLOADS_DIR/tomcat/apache-tomcat-$BIGTOP_TOMCAT_VERSION.tar.gz
-else
- BIGTOP_TOMCAT_DOWNLOAD_URL=http://archive.apache.org/dist/tomcat/tomcat-8/v${BIGTOP_TOMCAT_VERSION}/bin/apache-tomcat-${BIGTOP_TOMCAT_VERSION}.tar.gz
-fi
-
# Build artifacts
MAVEN_OPTS="-Dzookeeper.version=$ZOOKEEPER_VERSION"
MAVEN_OPTS="$MAVEN_OPTS -DskipTests -DskipTest -DskipITs"
@@ -131,7 +124,7 @@ MAVEN_OPTS="$MAVEN_OPTS -DskipTests -DskipTest -DskipITs"
. $(dirname ${0})/maven_deploy.sh
# Build artifacts
-mvn $ANT_OPTS $BUNDLE_SNAPPY -Pdist -Pnative -Psrc -Dtar ${MAVEN_OPTS} -Dtomcat.version=${BIGTOP_TOMCAT_VERSION} -Dtomcat.download.url="$BIGTOP_TOMCAT_DOWNLOAD_URL" install package ${EXTRA_GOALS} "$@"
+mvn $ANT_OPTS $BUNDLE_SNAPPY -Pdist -Pnative -Psrc -Dtar ${MAVEN_OPTS} install package ${EXTRA_GOALS} "$@"
mvn site site:stage ${MAVEN_OPTS} $@
(cd build ; tar --strip-components=1 -xzvf ../hadoop-dist/target/hadoop-${HADOOP_VERSION}.tar.gz)
@@ -142,7 +135,7 @@ mkdir -p build/share
cp -r target/staging/hadoop-project build/share/doc
# Create a manifest for hadoop client package
-(cd hadoop-client/target/hadoop-client-*/share/hadoop/client/lib ; ls) > build/hadoop-client.list
+(cd hadoop-client-modules/hadoop-client/target/hadoop-client-*/share/hadoop/client/lib ; ls) > build/hadoop-client.list
# Copy fuse output to the build directory
cp hadoop-hdfs-project/hadoop-hdfs-native-client/target/main/native/fuse-dfs/fuse_dfs build/bin
diff --git a/bigtop-packages/src/common/hadoop/install_hadoop.sh b/bigtop-packages/src/common/hadoop/install_hadoop.sh
index 3c82d1d..91730dd 100755
--- a/bigtop-packages/src/common/hadoop/install_hadoop.sh
+++ b/bigtop-packages/src/common/hadoop/install_hadoop.sh
@@ -50,8 +50,6 @@ OPTS=$(getopt \
-l 'system-lib-dir:' \
-l 'system-libexec-dir:' \
-l 'hadoop-etc-dir:' \
- -l 'httpfs-etc-dir:' \
- -l 'kms-etc-dir:' \
-l 'doc-dir:' \
-l 'man-dir:' \
-l 'example-dir:' \
@@ -113,12 +111,6 @@ while true ; do
--hadoop-etc-dir)
HADOOP_ETC_DIR=$2 ; shift 2
;;
- --httpfs-etc-dir)
- HTTPFS_ETC_DIR=$2 ; shift 2
- ;;
- --kms-etc-dir)
- KMS_ETC_DIR=$2 ; shift 2
- ;;
--installed-lib-dir)
INSTALLED_LIB_DIR=$2 ; shift 2
;;
@@ -161,8 +153,6 @@ SYSTEM_INCLUDE_DIR=${SYSTEM_INCLUDE_DIR:-$PREFIX/usr/include}
SYSTEM_LIBEXEC_DIR=${SYSTEM_LIBEXEC_DIR:-$PREFIX/usr/libexec}
EXAMPLE_DIR=${EXAMPLE_DIR:-$DOC_DIR/examples}
HADOOP_ETC_DIR=${HADOOP_ETC_DIR:-$PREFIX/etc/hadoop}
-HTTPFS_ETC_DIR=${HTTPFS_ETC_DIR:-$PREFIX/etc/hadoop-httpfs}
-KMS_ETC_DIR=${KMS_ETC_DIR:-$PREFIX/etc/hadoop-kms}
BASH_COMPLETION_DIR=${BASH_COMPLETION_DIR:-$PREFIX/etc/bash_completion.d}
INSTALLED_HADOOP_DIR=${INSTALLED_HADOOP_DIR:-/usr/lib/hadoop}
@@ -191,7 +181,7 @@ done
#libexec
install -d -m 0755 ${SYSTEM_LIBEXEC_DIR}
-cp ${BUILD_DIR}/libexec/* ${SYSTEM_LIBEXEC_DIR}/
+cp -r ${BUILD_DIR}/libexec/* ${SYSTEM_LIBEXEC_DIR}/
cp ${DISTRO_DIR}/hadoop-layout.sh ${SYSTEM_LIBEXEC_DIR}/
install -m 0755 ${DISTRO_DIR}/init-hdfs.sh ${SYSTEM_LIBEXEC_DIR}/
install -m 0755 ${DISTRO_DIR}/init-hcfs.json ${SYSTEM_LIBEXEC_DIR}/
@@ -202,7 +192,7 @@ rm -rf ${SYSTEM_LIBEXEC_DIR}/*.cmd
install -d -m 0755 ${HADOOP_DIR}
cp ${BUILD_DIR}/share/hadoop/common/*.jar ${HADOOP_DIR}/
cp ${BUILD_DIR}/share/hadoop/common/lib/hadoop-auth*.jar ${HADOOP_DIR}/
-cp ${BUILD_DIR}/share/hadoop/mapreduce/lib/hadoop-annotations*.jar ${HADOOP_DIR}/
+cp ${BUILD_DIR}/share/hadoop/common/lib/hadoop-annotations*.jar ${HADOOP_DIR}/
install -d -m 0755 ${MAPREDUCE_DIR}
cp ${BUILD_DIR}/share/hadoop/mapreduce/hadoop-mapreduce*.jar ${MAPREDUCE_DIR}
cp ${BUILD_DIR}/share/hadoop/tools/lib/*.jar ${MAPREDUCE_DIR}
@@ -232,7 +222,7 @@ cp -ra ${BUILD_DIR}/share/hadoop/hdfs/webapps ${HDFS_DIR}/
# bin
install -d -m 0755 ${HADOOP_DIR}/bin
-cp -a ${BUILD_DIR}/bin/{hadoop,rcc,fuse_dfs} ${HADOOP_DIR}/bin
+cp -a ${BUILD_DIR}/bin/{hadoop,fuse_dfs} ${HADOOP_DIR}/bin
install -d -m 0755 ${HDFS_DIR}/bin
cp -a ${BUILD_DIR}/bin/hdfs ${HDFS_DIR}/bin
install -d -m 0755 ${YARN_DIR}/bin
@@ -244,7 +234,7 @@ cp -a ${BUILD_DIR}/bin/mapred ${YARN_DIR}/bin
# sbin
install -d -m 0755 ${HADOOP_DIR}/sbin
-cp -a ${BUILD_DIR}/sbin/{hadoop-daemon,hadoop-daemons,slaves}.sh ${HADOOP_DIR}/sbin
+cp -a ${BUILD_DIR}/sbin/{hadoop-daemon,hadoop-daemons,workers}.sh ${HADOOP_DIR}/sbin
install -d -m 0755 ${HDFS_DIR}/sbin
cp -a ${BUILD_DIR}/sbin/{distribute-exclude,refresh-namenodes}.sh ${HDFS_DIR}/sbin
install -d -m 0755 ${YARN_DIR}/sbin
@@ -265,7 +255,7 @@ install -d -m 0755 ${SYSTEM_INCLUDE_DIR}
cp ${BUILD_DIR}/include/hdfs.h ${SYSTEM_INCLUDE_DIR}/
cp ${BUILD_DIR}/lib/native/*.a ${HADOOP_NATIVE_LIB_DIR}/
-for library in `cd ${BUILD_DIR}/lib/native ; ls libsnappy.so.1.* 2>/dev/null` libhadoop.so.1.0.0; do
+for library in `cd ${BUILD_DIR}/lib/native ; ls libsnappy.so.1.* 2>/dev/null` libhadoop.so.1.0.0 libnativetask.so.1.0.0; do
cp ${BUILD_DIR}/lib/native/${library} ${HADOOP_NATIVE_LIB_DIR}/
ldconfig -vlN ${HADOOP_NATIVE_LIB_DIR}/${library}
ln -s ${library} ${HADOOP_NATIVE_LIB_DIR}/${library/.so.*/}.so
@@ -318,7 +308,7 @@ cp ${DISTRO_DIR}/conf.empty/mapred-site.xml $HADOOP_ETC_DIR/conf.empty
# so that it can still be used as example, but doesn't affect anything
# by default
sed -i -e '/^[^#]/s,^,#,' ${BUILD_DIR}/etc/hadoop/hadoop-env.sh
-cp ${BUILD_DIR}/etc/hadoop/* $HADOOP_ETC_DIR/conf.empty
+cp -r ${BUILD_DIR}/etc/hadoop/* $HADOOP_ETC_DIR/conf.empty
rm -rf $HADOOP_ETC_DIR/conf.empty/*.cmd
# docs
@@ -335,68 +325,24 @@ done
# HTTPFS
install -d -m 0755 ${HTTPFS_DIR}/sbin
cp ${BUILD_DIR}/sbin/httpfs.sh ${HTTPFS_DIR}/sbin/
-cp -r ${BUILD_DIR}/share/hadoop/httpfs/tomcat/webapps ${HTTPFS_DIR}/webapps
install -d -m 0755 ${PREFIX}/var/lib/hadoop-httpfs
-install -d -m 0755 $HTTPFS_ETC_DIR/conf.empty
-
-install -m 0755 ${DISTRO_DIR}/httpfs-tomcat-deployment.sh ${HTTPFS_DIR}/tomcat-deployment.sh
-
-HTTPFS_HTTP_DIRECTORY=$HTTPFS_ETC_DIR/tomcat-conf.dist
-HTTPFS_HTTPS_DIRECTORY=$HTTPFS_ETC_DIR/tomcat-conf.https
-
-install -d -m 0755 ${HTTPFS_HTTP_DIRECTORY}
-cp -r ${BUILD_DIR}/share/hadoop/httpfs/tomcat/conf ${HTTPFS_HTTP_DIRECTORY}
-chmod 644 ${HTTPFS_HTTP_DIRECTORY}/conf/*
-install -d -m 0755 ${HTTPFS_HTTP_DIRECTORY}/WEB-INF
-mv ${HTTPFS_DIR}/webapps/webhdfs/WEB-INF/*.xml ${HTTPFS_HTTP_DIRECTORY}/WEB-INF/
-
-cp -r ${HTTPFS_HTTP_DIRECTORY} ${HTTPFS_HTTPS_DIRECTORY}
-mv ${HTTPFS_HTTPS_DIRECTORY}/conf/ssl-server.xml ${HTTPFS_HTTPS_DIRECTORY}/conf/server.xml
-rm ${HTTPFS_HTTP_DIRECTORY}/conf/ssl-server.xml
-
-mv $HADOOP_ETC_DIR/conf.empty/httpfs* $HTTPFS_ETC_DIR/conf.empty
-sed -i -e '/<\/configuration>/i\
- <property>\
- <name>httpfs.hadoop.config.dir</name>\
- <value>/etc/hadoop/conf</value>\
- </property>' $HTTPFS_ETC_DIR/conf.empty/httpfs-site.xml
# KMS
install -d -m 0755 ${KMS_DIR}/sbin
cp ${BUILD_DIR}/sbin/kms.sh ${KMS_DIR}/sbin/
-cp -r ${BUILD_DIR}/share/hadoop/kms/tomcat/webapps ${KMS_DIR}/webapps
install -d -m 0755 ${PREFIX}/var/lib/hadoop-kms
-install -d -m 0755 $KMS_ETC_DIR/conf.empty
-
-install -m 0755 ${DISTRO_DIR}/kms-tomcat-deployment.sh ${KMS_DIR}/tomcat-deployment.sh
-
-KMS_HTTP_DIRECTORY=$KMS_ETC_DIR/tomcat-conf.dist
-KMS_HTTPS_DIRECTORY=$KMS_ETC_DIR/tomcat-conf.https
-
-install -d -m 0755 ${KMS_HTTP_DIRECTORY}
-cp -r ${BUILD_DIR}/share/hadoop/kms/tomcat/conf ${KMS_HTTP_DIRECTORY}
-chmod 644 ${KMS_HTTP_DIRECTORY}/conf/*
-install -d -m 0755 ${KMS_HTTP_DIRECTORY}/WEB-INF
-cp ${KMS_DIR}/webapps/kms/WEB-INF/*.xml ${KMS_HTTP_DIRECTORY}/WEB-INF/
-
-cp -r ${KMS_HTTP_DIRECTORY} ${KMS_HTTPS_DIRECTORY}
-mv ${KMS_HTTPS_DIRECTORY}/conf/ssl-server.xml ${KMS_HTTPS_DIRECTORY}/conf/server.xml
-rm ${KMS_HTTP_DIRECTORY}/conf/ssl-server.xml
-mv $HADOOP_ETC_DIR/conf.empty/kms* $KMS_ETC_DIR/conf.empty
-cp $HADOOP_ETC_DIR/conf.empty/core-site.xml $KMS_ETC_DIR/conf.empty
-# Make the pseudo-distributed config
for conf in conf.pseudo ; do
install -d -m 0755 $HADOOP_ETC_DIR/$conf
# Install the upstream config files
- cp ${BUILD_DIR}/etc/hadoop/* $HADOOP_ETC_DIR/$conf
+ cp -r ${BUILD_DIR}/etc/hadoop/* $HADOOP_ETC_DIR/$conf
# Remove the ones that shouldn't be installed
- rm -rf $HADOOP_ETC_DIR/$conf/httpfs*
rm -rf $HADOOP_ETC_DIR/$conf/*.cmd
# Overlay the -site files
(cd $DISTRO_DIR/$conf && tar -cf - .) | (cd $HADOOP_ETC_DIR/$conf && tar -xf -)
- chmod -R 0644 $HADOOP_ETC_DIR/$conf/*
+ find $HADOOP_ETC_DIR/$conf/ -type f -print -exec chmod 0644 {} \;
+ find $HADOOP_ETC_DIR/$conf/ -type d -print -exec chmod 0755 {} \;
# When building straight out of svn we have to account for pesky .svn subdirs
rm -rf `find $HADOOP_ETC_DIR/$conf -name .svn -type d`
done
diff --git a/bigtop-packages/src/common/hadoop/patch0-HADOOP-16739-branch-3.2.diff b/bigtop-packages/src/common/hadoop/patch0-HADOOP-16739-branch-3.2.diff
new file mode 100644
index 0000000..49e8166
--- /dev/null
+++ b/bigtop-packages/src/common/hadoop/patch0-HADOOP-16739-branch-3.2.diff
@@ -0,0 +1,92 @@
+diff --git a/BUILDING.txt b/BUILDING.txt
+index 9d40d24524e..cb3d68edfff 100644
+--- a/BUILDING.txt
++++ b/BUILDING.txt
+@@ -422,6 +422,47 @@ Building command example:
+ Note that the command above manually specified the openssl library and include
+ path. This is necessary at least for Homebrewed OpenSSL.
+
++
++----------------------------------------------------------------------------------
++
++Building on CentOS 8
++
++----------------------------------------------------------------------------------
++
++
++* Install development tools such as GCC, autotools, OpenJDK and Maven.
++ $ sudo dnf group install 'Development Tools'
++ $ sudo dnf install java-1.8.0-openjdk-devel maven
++
++* Install Protocol Buffers v2.5.0.
++ $ git clone https://github.com/protocolbuffers/protobuf
++ $ cd protobuf
++ $ git checkout v2.5.0
++ $ autoreconf -i
++ $ ./configure --prefix=/usr/local
++ $ make
++ $ sudo make install
++ $ cd ..
++
++* Install libraries provided by CentOS 8.
++ $ sudo dnf install libtirpc-devel zlib-devel lz4-devel bzip2-devel openssl-devel cyrus-sasl-devel libpmem-devel
++
++* Install optional dependencies (snappy-devel).
++ $ sudo dnf --enablerepo=PowerTools snappy-devel
++
++* Install optional dependencies (libzstd-devel).
++ $ sudo dnf install https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm
++ $ sudo dnf --enablerepo=epel install libzstd-devel
++
++* Install optional dependencies (isa-l).
++ $ sudo dnf --enablerepo=PowerTools install nasm
++ $ git clone https://github.com/intel/isa-l
++ $ cd isa-l/
++ $ ./autogen.sh
++ $ ./configure
++ $ make
++ $ sudo make install
++
+ ----------------------------------------------------------------------------------
+
+ Building on Windows
+diff --git a/hadoop-tools/hadoop-pipes/src/CMakeLists.txt b/hadoop-tools/hadoop-pipes/src/CMakeLists.txt
+index ff660bfafce..ce6ee317936 100644
+--- a/hadoop-tools/hadoop-pipes/src/CMakeLists.txt
++++ b/hadoop-tools/hadoop-pipes/src/CMakeLists.txt
+@@ -22,6 +22,25 @@ list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/../../../hadoop-common-project
+ include(HadoopCommon)
+
+ find_package(OpenSSL REQUIRED)
++find_package(PkgConfig QUIET)
++pkg_check_modules(LIBTIRPC libtirpc)
++
++find_path(RPC_INCLUDE_DIRS NAMES rpc/rpc.h)
++
++if (NOT RPC_INCLUDE_DIRS)
++ find_path(TIRPC_INCLUDE_DIRS
++ NAMES netconfig.h
++ PATH_SUFFIXES tirpc
++ HINTS ${LIBTIRPC_INCLUDE_DIRS}
++ )
++
++ find_library(TIRPC_LIBRARIES
++ NAMES tirpc
++ HINTS ${LIBTIRPC_LIBRARY_DIRS}
++ )
++
++ include_directories(${TIRPC_INCLUDE_DIRS})
++endif()
+
+ include_directories(
+ main/native/utils/api
+@@ -51,6 +70,9 @@ add_library(hadooputils STATIC
+ main/native/utils/impl/StringUtils.cc
+ main/native/utils/impl/SerialUtils.cc
+ )
++if (NOT RPC_INCLUDE_DIRS AND LIBTIRPC_FOUND)
++ target_link_libraries(hadooputils tirpc)
++endif()
+
+ add_library(hadooppipes STATIC
+ main/native/pipes/impl/HadoopPipes.cc
diff --git a/bigtop-packages/src/common/hadoop/patch0-revert-HADOOP-16598.diff b/bigtop-packages/src/common/hadoop/patch0-revert-HADOOP-16598.diff
deleted file mode 100644
index 8a7cae4..0000000
--- a/bigtop-packages/src/common/hadoop/patch0-revert-HADOOP-16598.diff
+++ /dev/null
@@ -1,796 +0,0 @@
-commit 49478f7a59d52cbb379f641d9e7a8ac5e08e3c0f
-Author: Masatake Iwasaki <iw...@apache.org>
-Date: Mon Nov 16 03:54:23 2020 +0000
-
- Revert "HADOOP-16598. Backport "HADOOP-16558 [COMMON+HDFS] use protobuf-maven-plugin to generate protobuf classes" to all active branches"
-
- This reverts commit e649b30b32962256aa2fb48f8c69d30a705a1220.
-
-diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
-index 1da0b49dc7a..e79e1e2d3cd 100644
---- a/hadoop-common-project/hadoop-common/pom.xml
-+++ b/hadoop-common-project/hadoop-common/pom.xml
-@@ -369,20 +369,6 @@
- </resource>
- </resources>
- <plugins>
-- <plugin>
-- <groupId>org.xolstice.maven.plugins</groupId>
-- <artifactId>protobuf-maven-plugin</artifactId>
-- <executions>
-- <execution>
-- <id>src-compile-protoc</id>
-- <configuration><skip>false</skip></configuration>
-- </execution>
-- <execution>
-- <id>src-test-compile-protoc</id>
-- <configuration><skip>false</skip></configuration>
-- </execution>
-- </executions>
-- </plugin>
- <plugin>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-maven-plugins</artifactId>
-@@ -403,6 +389,57 @@
- </source>
- </configuration>
- </execution>
-+ <execution>
-+ <id>compile-protoc</id>
-+ <goals>
-+ <goal>protoc</goal>
-+ </goals>
-+ <configuration>
-+ <protocVersion>${protobuf.version}</protocVersion>
-+ <protocCommand>${protoc.path}</protocCommand>
-+ <imports>
-+ <param>${basedir}/src/main/proto</param>
-+ </imports>
-+ <source>
-+ <directory>${basedir}/src/main/proto</directory>
-+ <includes>
-+ <include>HAServiceProtocol.proto</include>
-+ <include>IpcConnectionContext.proto</include>
-+ <include>ProtocolInfo.proto</include>
-+ <include>RpcHeader.proto</include>
-+ <include>ZKFCProtocol.proto</include>
-+ <include>ProtobufRpcEngine.proto</include>
-+ <include>Security.proto</include>
-+ <include>GetUserMappingsProtocol.proto</include>
-+ <include>TraceAdmin.proto</include>
-+ <include>RefreshAuthorizationPolicyProtocol.proto</include>
-+ <include>RefreshUserMappingsProtocol.proto</include>
-+ <include>RefreshCallQueueProtocol.proto</include>
-+ <include>GenericRefreshProtocol.proto</include>
-+ </includes>
-+ </source>
-+ </configuration>
-+ </execution>
-+ <execution>
-+ <id>compile-test-protoc</id>
-+ <goals>
-+ <goal>test-protoc</goal>
-+ </goals>
-+ <configuration>
-+ <protocVersion>${protobuf.version}</protocVersion>
-+ <protocCommand>${protoc.path}</protocCommand>
-+ <imports>
-+ <param>${basedir}/src/test/proto</param>
-+ </imports>
-+ <source>
-+ <directory>${basedir}/src/test/proto</directory>
-+ <includes>
-+ <include>test.proto</include>
-+ <include>test_rpc_service.proto</include>
-+ </includes>
-+ </source>
-+ </configuration>
-+ </execution>
- </executions>
- </plugin>
- <plugin>
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/GenericRefreshProtocol.proto b/hadoop-common-project/hadoop-common/src/main/proto/GenericRefreshProtocol.proto
-index 6296f88da69..fe465490b19 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/GenericRefreshProtocol.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/GenericRefreshProtocol.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ipc.proto";
- option java_outer_classname = "GenericRefreshProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/GetUserMappingsProtocol.proto b/hadoop-common-project/hadoop-common/src/main/proto/GetUserMappingsProtocol.proto
-index cb91a13b048..51552b879f3 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/GetUserMappingsProtocol.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/GetUserMappingsProtocol.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.tools.proto";
- option java_outer_classname = "GetUserMappingsProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto b/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto
-index 5a88a7ff03f..16ee9a2e0a5 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ha.proto";
- option java_outer_classname = "HAServiceProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto b/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
-index 16e2fb7c4db..4557e893cff 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ipc.protobuf";
- option java_outer_classname = "IpcConnectionContextProtos";
- option java_generate_equals_and_hash = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto b/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
-index fa113134027..a17e2078e94 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- /**
- * These are the messages used by Hadoop RPC for the Rpc Engine Protocol Buffer
- * to marshal the request and response in the RPC layer.
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto b/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto
-index 0e9d0d4baa4..fdbc440d91c 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ipc.protobuf";
- option java_outer_classname = "ProtocolInfoProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/RefreshAuthorizationPolicyProtocol.proto b/hadoop-common-project/hadoop-common/src/main/proto/RefreshAuthorizationPolicyProtocol.proto
-index f57c6d63039..5ef1c2d0a8c 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/RefreshAuthorizationPolicyProtocol.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/RefreshAuthorizationPolicyProtocol.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.security.proto";
- option java_outer_classname = "RefreshAuthorizationPolicyProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/RefreshCallQueueProtocol.proto b/hadoop-common-project/hadoop-common/src/main/proto/RefreshCallQueueProtocol.proto
-index 463b7c548fe..67ed1332510 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/RefreshCallQueueProtocol.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/RefreshCallQueueProtocol.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ipc.proto";
- option java_outer_classname = "RefreshCallQueueProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/RefreshUserMappingsProtocol.proto b/hadoop-common-project/hadoop-common/src/main/proto/RefreshUserMappingsProtocol.proto
-index a1130f5c2d9..41031ed9ea0 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/RefreshUserMappingsProtocol.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/RefreshUserMappingsProtocol.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.security.proto";
- option java_outer_classname = "RefreshUserMappingsProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto b/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
-index 4705b4276b8..e8d8cbbfe70 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ipc.protobuf";
- option java_outer_classname = "RpcHeaderProtos";
- option java_generate_equals_and_hash = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/Security.proto b/hadoop-common-project/hadoop-common/src/main/proto/Security.proto
-index 736850110b6..5ff571decf5 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/Security.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/Security.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.security.proto";
- option java_outer_classname = "SecurityProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/TraceAdmin.proto b/hadoop-common-project/hadoop-common/src/main/proto/TraceAdmin.proto
-index 8cf131bfb46..52d2a90abf4 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/TraceAdmin.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/TraceAdmin.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.tracing";
- option java_outer_classname = "TraceAdminPB";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto b/hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto
-index 98bc05f4a36..a2b8dd10b30 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ha.proto";
- option java_outer_classname = "ZKFCProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/test/proto/test.proto b/hadoop-common-project/hadoop-common/src/test/proto/test.proto
-index c5425f945c0..be148d1f475 100644
---- a/hadoop-common-project/hadoop-common/src/test/proto/test.proto
-+++ b/hadoop-common-project/hadoop-common/src/test/proto/test.proto
-@@ -15,7 +15,7 @@
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ipc.protobuf";
- option java_outer_classname = "TestProtos";
- option java_generate_equals_and_hash = true;
-diff --git a/hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto b/hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto
-index 9fdfb59dbd6..b0121ca217a 100644
---- a/hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto
-+++ b/hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto
-@@ -15,7 +15,6 @@
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--syntax = "proto2";
- option java_package = "org.apache.hadoop.ipc.protobuf";
- option java_outer_classname = "TestRpcServiceProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
-index 8b1b3dcdcc8..fe26603d81e 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
-@@ -124,18 +124,35 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
- </configuration>
- </plugin>
- <plugin>
-- <groupId>org.xolstice.maven.plugins</groupId>
-- <artifactId>protobuf-maven-plugin</artifactId>
-+ <groupId>org.apache.hadoop</groupId>
-+ <artifactId>hadoop-maven-plugins</artifactId>
- <executions>
- <execution>
-- <id>src-compile-protoc</id>
-+ <id>compile-protoc</id>
-+ <goals>
-+ <goal>protoc</goal>
-+ </goals>
- <configuration>
-- <skip>false</skip>
-- <additionalProtoPathElements>
-- <additionalProtoPathElement>
-- ${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
-- </additionalProtoPathElement>
-- </additionalProtoPathElements>
-+ <protocVersion>${protobuf.version}</protocVersion>
-+ <protocCommand>${protoc.path}</protocCommand>
-+ <imports>
-+ <param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
-+ <param>${basedir}/src/main/proto</param>
-+ </imports>
-+ <source>
-+ <directory>${basedir}/src/main/proto</directory>
-+ <includes>
-+ <include>ClientDatanodeProtocol.proto</include>
-+ <include>ClientNamenodeProtocol.proto</include>
-+ <include>acl.proto</include>
-+ <include>xattr.proto</include>
-+ <include>datatransfer.proto</include>
-+ <include>hdfs.proto</include>
-+ <include>encryption.proto</include>
-+ <include>inotify.proto</include>
-+ <include>ReconfigurationProtocol.proto</include>
-+ </includes>
-+ </source>
- </configuration>
- </execution>
- </executions>
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientDatanodeProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientDatanodeProtocol.proto
-index bcc6ed4f7ca..b9077f6ef01 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientDatanodeProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientDatanodeProtocol.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax="proto2";
-+
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
-
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientNamenodeProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientNamenodeProtocol.proto
-index bb4d5e8c3d9..1dbf56ced3f 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientNamenodeProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientNamenodeProtocol.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax="proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "ClientNamenodeProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ReconfigurationProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ReconfigurationProtocol.proto
-index bad9f45cf62..12a38b110fe 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ReconfigurationProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ReconfigurationProtocol.proto
-@@ -15,7 +15,7 @@
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--syntax="proto2";
-+
- // This file contains protocol buffers that are used to reconfigure NameNode
- // and DataNode by HDFS admin.
-
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/acl.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/acl.proto
-index 32aac91df30..bb7fdb0168f 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/acl.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/acl.proto
-@@ -15,7 +15,7 @@
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--syntax="proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "AclProtos";
- option java_generate_equals_and_hash = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/datatransfer.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/datatransfer.proto
-index 91b58d2bd23..135bab1625f 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/datatransfer.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/datatransfer.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax="proto2";
-+
- // This file contains protocol buffers that are used to transfer data
- // to and from the datanode, as well as between datanodes.
-
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/encryption.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/encryption.proto
-index 6a2e0a19769..68b2f3af29c 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/encryption.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/encryption.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax="proto2";
-+
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
-
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto
-index c07dd9e8c97..04f8730c5f7 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax="proto2";
-+
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
-
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/inotify.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/inotify.proto
-index dfc6618b10a..53399029582 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/inotify.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/inotify.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax="proto2";
-+
- // This file contains protocol buffers used to communicate edits to clients
- // as part of the inotify system.
-
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/xattr.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/xattr.proto
-index a53aa86c16d..6c8b5eb5943 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/xattr.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/xattr.proto
-@@ -15,7 +15,7 @@
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--syntax="proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "XAttrProtos";
- option java_generate_equals_and_hash = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-rbf/pom.xml
-index 3944308f8b2..02339640576 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-rbf/pom.xml
-+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/pom.xml
-@@ -179,21 +179,29 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
- </executions>
- </plugin>
- <plugin>
-- <groupId>org.xolstice.maven.plugins</groupId>
-- <artifactId>protobuf-maven-plugin</artifactId>
-+ <groupId>org.apache.hadoop</groupId>
-+ <artifactId>hadoop-maven-plugins</artifactId>
- <executions>
- <execution>
-- <id>src-compile-protoc</id>
-+ <id>compile-protoc</id>
-+ <goals>
-+ <goal>protoc</goal>
-+ </goals>
- <configuration>
-- <skip>false</skip>
-- <additionalProtoPathElements>
-- <additionalProtoPathElement>
-- ${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
-- </additionalProtoPathElement>
-- <additionalProtoPathElement>
-- ${basedir}/../hadoop-hdfs-client/src/main/proto
-- </additionalProtoPathElement>
-- </additionalProtoPathElements>
-+ <protocVersion>${protobuf.version}</protocVersion>
-+ <protocCommand>${protoc.path}</protocCommand>
-+ <imports>
-+ <param>${basedir}/../hadoop-hdfs-client/src/main/proto</param>
-+ <param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
-+ <param>${basedir}/src/main/proto</param>
-+ </imports>
-+ <source>
-+ <directory>${basedir}/src/main/proto</directory>
-+ <includes>
-+ <include>FederationProtocol.proto</include>
-+ <include>RouterProtocol.proto</include>
-+ </includes>
-+ </source>
- </configuration>
- </execution>
- </executions>
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/FederationProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/FederationProtocol.proto
-index a7ef1a46843..2ea240c11d1 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/FederationProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/FederationProtocol.proto
-@@ -15,7 +15,7 @@
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.federation.protocol.proto";
- option java_outer_classname = "HdfsServerFederationProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/RouterProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/RouterProtocol.proto
-index 3eff0929775..f3a2b6e8abc 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/RouterProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/RouterProtocol.proto
-@@ -15,7 +15,7 @@
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "RouterProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
-index 97c7cec5998..573e2d68dd8 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
-+++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
-@@ -324,21 +324,37 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
- </executions>
- </plugin>
- <plugin>
-- <groupId>org.xolstice.maven.plugins</groupId>
-- <artifactId>protobuf-maven-plugin</artifactId>
-+ <groupId>org.apache.hadoop</groupId>
-+ <artifactId>hadoop-maven-plugins</artifactId>
- <executions>
- <execution>
-- <id>src-compile-protoc</id>
-+ <id>compile-protoc</id>
-+ <goals>
-+ <goal>protoc</goal>
-+ </goals>
- <configuration>
-- <skip>false</skip>
-- <additionalProtoPathElements>
-- <additionalProtoPathElement>
-- ${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
-- </additionalProtoPathElement>
-- <additionalProtoPathElement>
-- ${basedir}/../hadoop-hdfs-client/src/main/proto
-- </additionalProtoPathElement>
-- </additionalProtoPathElements>
-+ <protocVersion>${protobuf.version}</protocVersion>
-+ <protocCommand>${protoc.path}</protocCommand>
-+ <imports>
-+ <param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
-+ <param>${basedir}/../hadoop-hdfs-client/src/main/proto</param>
-+ <param>${basedir}/src/main/proto</param>
-+ </imports>
-+ <source>
-+ <directory>${basedir}/src/main/proto</directory>
-+ <includes>
-+ <include>HdfsServer.proto</include>
-+ <include>DatanodeProtocol.proto</include>
-+ <include>DatanodeLifelineProtocol.proto</include>
-+ <include>HAZKInfo.proto</include>
-+ <include>InterDatanodeProtocol.proto</include>
-+ <include>JournalProtocol.proto</include>
-+ <include>NamenodeProtocol.proto</include>
-+ <include>QJournalProtocol.proto</include>
-+ <include>editlog.proto</include>
-+ <include>fsimage.proto</include>
-+ </includes>
-+ </source>
- </configuration>
- </execution>
- </executions>
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/main/proto/bkjournal.proto b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/main/proto/bkjournal.proto
-index 4b7a5f261cf..fc487186c53 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/main/proto/bkjournal.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/src/main/proto/bkjournal.proto
-@@ -19,7 +19,6 @@
- // This file contains protocol buffers that are used by bkjournal
- // mostly for storing data in zookeeper
-
--syntax = "proto2";
- option java_package = "org.apache.hadoop.contrib.bkjournal";
- option java_outer_classname = "BKJournalProtos";
- option java_generate_equals_and_hash = true;
-@@ -46,4 +45,4 @@ message MaxTxIdProto {
- message CurrentInprogressProto {
- required string path = 1;
- optional string hostname = 2;
--}
-+}
-\ No newline at end of file
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeLifelineProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeLifelineProtocol.proto
-index e10a8861e61..b6ab75653a1 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeLifelineProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeLifelineProtocol.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "DatanodeLifelineProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeProtocol.proto
-index 89b7c96efc7..0e4b2fb959c 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeProtocol.proto
-@@ -24,7 +24,7 @@
-
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "DatanodeProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HAZKInfo.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HAZKInfo.proto
-index 6d45a935ee4..aa8b6be4532 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HAZKInfo.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HAZKInfo.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.server.namenode.ha.proto";
- option java_outer_classname = "HAZKInfoProtos";
- package hadoop.hdfs;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto
-index 266024e3696..4c84615a494 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto
-@@ -24,7 +24,7 @@
-
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
--syntax = "proto2";
-+
-
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "HdfsServerProtos";
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterDatanodeProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterDatanodeProtocol.proto
-index 47332a8817b..580f8d34730 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterDatanodeProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterDatanodeProtocol.proto
-@@ -24,7 +24,7 @@
-
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "InterDatanodeProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/JournalProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/JournalProtocol.proto
-index 35c401e33e5..3fd029b7362 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/JournalProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/JournalProtocol.proto
-@@ -24,7 +24,7 @@
-
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "JournalProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/NamenodeProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/NamenodeProtocol.proto
-index 201039890a4..8aa09d32ce4 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/NamenodeProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/NamenodeProtocol.proto
-@@ -24,7 +24,7 @@
-
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "NamenodeProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/QJournalProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/QJournalProtocol.proto
-index b33d99c39a9..d230166e908 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/QJournalProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/QJournalProtocol.proto
-@@ -21,7 +21,7 @@
- * Please see http://wiki.apache.org/hadoop/Compatibility
- * for what changes are allowed for a *stable* .proto interface.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.qjournal.protocol";
- option java_outer_classname = "QJournalProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/editlog.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/editlog.proto
-index 22fd7437bb8..f25fe591ad3 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/editlog.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/editlog.proto
-@@ -15,7 +15,7 @@
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "EditLogProtos";
- option java_generate_equals_and_hash = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto
-index 2ea26cff940..5fb7c69a7fd 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto
-@@ -15,7 +15,7 @@
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.server.namenode";
- option java_outer_classname = "FsImageProto";
-
-diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
-index f52c0c9ae29..ad3daaf589e 100644
---- a/hadoop-project/pom.xml
-+++ b/hadoop-project/pom.xml
-@@ -1429,64 +1429,9 @@
- <artifactId>hadoop-maven-plugins</artifactId>
- <version>${project.version}</version>
- </plugin>
-- <plugin>
-- <groupId>org.xolstice.maven.plugins</groupId>
-- <artifactId>protobuf-maven-plugin</artifactId>
-- <version>${protobuf-maven-plugin.version}</version>
-- <extensions>true</extensions>
-- <dependencies>
-- <!-- for compiling with java7 -->
-- <dependency>
-- <groupId>com.google.guava</groupId>
-- <artifactId>guava</artifactId>
-- <version>20.0</version>
-- </dependency>
-- </dependencies>
-- <configuration>
-- <protocArtifact>
-- com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier}
-- </protocArtifact>
-- <attachProtoSources>false</attachProtoSources>
-- </configuration>
-- <executions>
-- <execution>
-- <id>src-compile-protoc</id>
-- <phase>generate-sources</phase>
-- <goals>
-- <goal>compile</goal>
-- </goals>
-- <configuration>
-- <includeDependenciesInDescriptorSet>false</includeDependenciesInDescriptorSet>
-- <protoSourceRoot>${basedir}/src/main/proto</protoSourceRoot>
-- <outputDirectory>${project.build.directory}/generated-sources/java</outputDirectory>
-- <clearOutputDirectory>false</clearOutputDirectory>
-- <skip>true</skip>
-- </configuration>
-- </execution>
-- <execution>
-- <id>src-test-compile-protoc</id>
-- <phase>generate-test-sources</phase>
-- <goals>
-- <goal>test-compile</goal>
-- </goals>
-- <configuration>
-- <protoTestSourceRoot>${basedir}/src/test/proto</protoTestSourceRoot>
-- <outputDirectory>${project.build.directory}/generated-test-sources/java</outputDirectory>
-- <clearOutputDirectory>false</clearOutputDirectory>
-- <skip>true</skip>
-- </configuration>
-- </execution>
-- </executions>
-- </plugin>
- </plugins>
- </pluginManagement>
-- <extensions>
-- <extension>
-- <groupId>kr.motd.maven</groupId>
-- <artifactId>os-maven-plugin</artifactId>
-- <version>${os-maven-plugin.version}</version>
-- </extension>
-- </extensions>
-+
- <plugins>
- <plugin>
- <groupId>org.codehaus.mojo</groupId>
diff --git a/bigtop-packages/src/common/hadoop/patch1-HADOOP-15939.diff b/bigtop-packages/src/common/hadoop/patch1-HADOOP-15939.diff
new file mode 100644
index 0000000..51eca6a
--- /dev/null
+++ b/bigtop-packages/src/common/hadoop/patch1-HADOOP-15939.diff
@@ -0,0 +1,13 @@
+diff --git a/hadoop-client-modules/hadoop-client-minicluster/pom.xml b/hadoop-client-modules/hadoop-client-minicluster/pom.xml
+index ddebfa93a43..c356b1921cb 100644
+--- a/hadoop-client-modules/hadoop-client-minicluster/pom.xml
++++ b/hadoop-client-modules/hadoop-client-minicluster/pom.xml
+@@ -729,6 +729,8 @@
+ <exclude>objenesis-license.txt</exclude>
+ <exclude>org/hamcrest/**/*.class</exclude>
+ <exclude>org/hamcrest/*.class</exclude>
++ <exclude>org/objenesis/**/*.class</exclude>
++ <exclude>org/objenesis/*.class</exclude>
+ </excludes>
+ </filter>
+ <!-- skip grizzly internals we don't need to run. -->
diff --git a/bigtop-packages/src/deb/hadoop/control b/bigtop-packages/src/deb/hadoop/control
index 503bfaa..943d787 100644
--- a/bigtop-packages/src/deb/hadoop/control
+++ b/bigtop-packages/src/deb/hadoop/control
@@ -195,14 +195,14 @@ Description: Development support for libhdfs0
Package: hadoop-httpfs
Architecture: any
-Depends: hadoop-hdfs (= ${binary:Version}), bigtop-tomcat
+Depends: hadoop-hdfs (= ${binary:Version})
Description: HTTPFS for Hadoop
The server providing HTTP REST API support for the complete FileSystem/FileContext
interface in HDFS.
Package: hadoop-kms
Architecture: any
-Depends: hadoop (= ${binary:Version}), bigtop-tomcat, adduser
+Depends: hadoop (= ${binary:Version}), adduser
Description: KMS for Hadoop
The server providing cryptographic key management based on Hadoop KeyProvider API.
diff --git a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.dirs b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.dirs
index 7319c2d..01b1af6 100644
--- a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.dirs
+++ b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.dirs
@@ -1,3 +1,2 @@
-/etc/hadoop-httpfs
/usr/lib/hadoop-httpfs
/var/log/hadoop-httpfs
diff --git a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.install b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.install
index a594bc0..eeb8db2 100644
--- a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.install
+++ b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.install
@@ -1,5 +1,3 @@
/etc/default/hadoop-httpfs
-/etc/hadoop-httpfs
-/usr/lib/hadoop/libexec/httpfs-config.sh
/usr/lib/hadoop-httpfs
/var/lib/hadoop-httpfs
diff --git a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.postinst b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.postinst
index 9a03c4a..eeb5bcf 100644
--- a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.postinst
+++ b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.postinst
@@ -22,9 +22,6 @@ set -e
case "$1" in
configure)
chown httpfs:httpfs /var/run/hadoop-httpfs /var/log/hadoop-httpfs
- update-alternatives --install /etc/hadoop-httpfs/conf hadoop-httpfs-conf /etc/hadoop-httpfs/conf.empty 10
- update-alternatives --install /etc/hadoop-httpfs/tomcat-conf hadoop-httpfs-tomcat-conf /etc/hadoop-httpfs/tomcat-conf.dist 10
- update-alternatives --install /etc/hadoop-httpfs/tomcat-conf hadoop-httpfs-tomcat-conf /etc/hadoop-httpfs/tomcat-conf.https 5
;;
abort-upgrade|abort-remove|abort-deconfigure)
diff --git a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.prerm b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.prerm
index dd76992..3993ea9 100644
--- a/bigtop-packages/src/deb/hadoop/hadoop-httpfs.prerm
+++ b/bigtop-packages/src/deb/hadoop/hadoop-httpfs.prerm
@@ -35,8 +35,6 @@ set -e
case "$1" in
remove|upgrade|deconfigure)
- update-alternatives --remove hadoop-httpfs-conf /etc/hadoop-httpfs/conf.empty || :
- update-alternatives --remove-all hadoop-httpfs-tomcat-conf || :
;;
failed-upgrade)
diff --git a/bigtop-packages/src/deb/hadoop/hadoop-kms.dirs b/bigtop-packages/src/deb/hadoop/hadoop-kms.dirs
index 9abd9ee..df40a84 100644
--- a/bigtop-packages/src/deb/hadoop/hadoop-kms.dirs
+++ b/bigtop-packages/src/deb/hadoop/hadoop-kms.dirs
@@ -1,3 +1,2 @@
-/etc/hadoop-kms
/usr/lib/hadoop-kms
/var/log/hadoop-kms
diff --git a/bigtop-packages/src/deb/hadoop/hadoop-kms.install b/bigtop-packages/src/deb/hadoop/hadoop-kms.install
index d221e07..63cdd3c 100644
--- a/bigtop-packages/src/deb/hadoop/hadoop-kms.install
+++ b/bigtop-packages/src/deb/hadoop/hadoop-kms.install
@@ -1,5 +1,3 @@
/etc/default/hadoop-kms
-/etc/hadoop-kms
-/usr/lib/hadoop/libexec/kms-config.sh
/usr/lib/hadoop-kms
/var/lib/hadoop-kms
diff --git a/bigtop-packages/src/deb/hadoop/hadoop-kms.postinst b/bigtop-packages/src/deb/hadoop/hadoop-kms.postinst
index fa300e1..e379225 100644
--- a/bigtop-packages/src/deb/hadoop/hadoop-kms.postinst
+++ b/bigtop-packages/src/deb/hadoop/hadoop-kms.postinst
@@ -23,9 +23,6 @@ case "$1" in
configure)
mkdir -p /var/log/hadoop-kms /var/run/hadoop-kms || :
chown kms:kms /var/lib/hadoop-kms /var/log/hadoop-kms /var/run/hadoop-kms
- update-alternatives --install /etc/hadoop-kms/conf hadoop-kms-conf /etc/hadoop-kms/conf.empty 10
- update-alternatives --install /etc/hadoop-kms/tomcat-conf hadoop-kms-tomcat-conf /etc/hadoop-kms/tomcat-conf.dist 10
- update-alternatives --install /etc/hadoop-kms/tomcat-conf hadoop-kms-tomcat-conf /etc/hadoop-kms/tomcat-conf.https 5
;;
abort-upgrade|abort-remove|abort-deconfigure)
diff --git a/bigtop-packages/src/deb/hadoop/hadoop-kms.prerm b/bigtop-packages/src/deb/hadoop/hadoop-kms.prerm
index 8ff9afd..10df75a 100644
--- a/bigtop-packages/src/deb/hadoop/hadoop-kms.prerm
+++ b/bigtop-packages/src/deb/hadoop/hadoop-kms.prerm
@@ -35,8 +35,6 @@ set -e
case "$1" in
remove|upgrade|deconfigure)
- update-alternatives --remove hadoop-kms-conf /etc/hadoop-kms/conf.empty || :
- update-alternatives --remove-all hadoop-kms-tomcat-conf || :
;;
failed-upgrade)
diff --git a/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.install b/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.install
index 34a3796..ee971a9 100644
--- a/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.install
+++ b/bigtop-packages/src/deb/hadoop/hadoop-mapreduce.install
@@ -2,7 +2,6 @@
/etc/hadoop/conf.empty/mapred-site.xml
/etc/hadoop/conf.empty/mapred-env.sh
/etc/hadoop/conf.empty/mapred-queues.xml.template
-/etc/hadoop/conf.empty/mapred-site.xml.template
/usr/lib/hadoop-mapreduce
/usr/lib/hadoop/libexec/mapred-config.sh
/usr/bin/mapred
diff --git a/bigtop-packages/src/deb/hadoop/hadoop.install b/bigtop-packages/src/deb/hadoop/hadoop.install
index 2700cb6..28601a3 100644
--- a/bigtop-packages/src/deb/hadoop/hadoop.install
+++ b/bigtop-packages/src/deb/hadoop/hadoop.install
@@ -1,7 +1,6 @@
-/etc/hadoop/conf.empty/hadoop-metrics.properties
/etc/hadoop/conf.empty/hadoop-metrics2.properties
/etc/hadoop/conf.empty/log4j.properties
-/etc/hadoop/conf.empty/slaves
+/etc/hadoop/conf.empty/workers
/etc/hadoop/conf.empty/ssl-client.xml.example
/etc/hadoop/conf.empty/ssl-server.xml.example
/etc/hadoop/conf.empty/core-site.xml
@@ -13,6 +12,8 @@
/usr/lib/hadoop/etc
/usr/lib/hadoop/libexec/hadoop-config.sh
/usr/lib/hadoop/libexec/hadoop-layout.sh
+/usr/lib/hadoop/libexec/hadoop-functions.sh
+/usr/lib/hadoop/libexec/shellprofile.d
/usr/lib/hadoop/*.jar
/usr/lib/hadoop/lib
/usr/lib/hadoop/sbin
diff --git a/bigtop-packages/src/deb/hadoop/rules b/bigtop-packages/src/deb/hadoop/rules
index c587fcc..5a00588 100755
--- a/bigtop-packages/src/deb/hadoop/rules
+++ b/bigtop-packages/src/deb/hadoop/rules
@@ -58,9 +58,7 @@ override_dh_auto_install:
--distro-dir=debian \
--build-dir=${PWD}/build \
--httpfs-dir=debian/tmp/usr/lib/hadoop-httpfs \
- --httpfs-etc-dir=debian/tmp/etc/hadoop-httpfs \
--kms-dir=debian/tmp/usr/lib/hadoop-kms \
- --kms-etc-dir=debian/tmp/etc/hadoop-kms \
--system-lib-dir=debian/tmp/usr/lib/ \
--system-libexec-dir=debian/tmp/usr/lib/hadoop/libexec/ \
--system-include-dir=debian/tmp/usr/include \
diff --git a/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec b/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
index 4c67900..10dff7c 100644
--- a/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
+++ b/bigtop-packages/src/rpm/hadoop/SPECS/hadoop.spec
@@ -24,14 +24,8 @@
%define hadoop_name hadoop
%define etc_hadoop /etc/%{name}
%define etc_yarn /etc/yarn
-%define etc_httpfs /etc/%{name}-httpfs
-%define etc_kms /etc/%{name}-kms
%define config_hadoop %{etc_hadoop}/conf
%define config_yarn %{etc_yarn}/conf
-%define config_httpfs %{etc_httpfs}/conf
-%define config_kms %{etc_kms}/conf
-%define tomcat_deployment_httpfs %{etc_httpfs}/tomcat-conf
-%define tomcat_deployment_kms %{etc_kms}/tomcat-conf
%define lib_hadoop_dirname /usr/lib
%define lib_hadoop %{lib_hadoop_dirname}/%{name}
%define lib_httpfs %{lib_hadoop_dirname}/%{name}-httpfs
@@ -175,14 +169,12 @@ Source21: yarn.default
Source22: hadoop-layout.sh
Source23: hadoop-hdfs-zkfc.svc
Source24: hadoop-hdfs-journalnode.svc
-Source25: httpfs-tomcat-deployment.sh
Source26: yarn.1
Source27: hdfs.1
Source28: mapred.1
Source29: hadoop-yarn-timelineserver.svc
Source30: hadoop-kms.svc
Source31: kms.default
-Source32: kms-tomcat-deployment.sh
#BIGTOP_PATCH_FILES
Buildroot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id} -u -n)
BuildRequires: fuse-devel, fuse, cmake
@@ -340,7 +332,7 @@ blocks of data over the network to Hadoop Distributed Filesystem
%package httpfs
Summary: HTTPFS for Hadoop
Group: System/Daemons
-Requires: %{name}-hdfs = %{version}-%{release}, bigtop-tomcat
+Requires: %{name}-hdfs = %{version}-%{release}
Requires(pre): %{name} = %{version}-%{release}
Requires(pre): %{name}-hdfs = %{version}-%{release}
@@ -351,7 +343,7 @@ interface in HDFS.
%package kms
Summary: KMS for Hadoop
Group: System/Daemons
-Requires: %{name}-client = %{version}-%{release}, bigtop-tomcat
+Requires: %{name}-client = %{version}-%{release}
Requires(pre): %{name} = %{version}-%{release}
%description kms
@@ -518,8 +510,6 @@ env HADOOP_VERSION=%{hadoop_base_version} bash %{SOURCE2} \
--system-lib-dir=$RPM_BUILD_ROOT%{_libdir} \
--system-libexec-dir=$RPM_BUILD_ROOT/%{lib_hadoop}/libexec \
--hadoop-etc-dir=$RPM_BUILD_ROOT%{etc_hadoop} \
- --httpfs-etc-dir=$RPM_BUILD_ROOT%{etc_httpfs} \
- --kms-etc-dir=$RPM_BUILD_ROOT%{etc_kms} \
--prefix=$RPM_BUILD_ROOT \
--doc-dir=$RPM_BUILD_ROOT%{doc_hadoop} \
--example-dir=$RPM_BUILD_ROOT%{doc_hadoop}/examples \
@@ -604,17 +594,9 @@ getent passwd mapred >/dev/null || /usr/sbin/useradd --comment "Hadoop MapReduce
%{alternatives_cmd} --install %{config_hadoop} %{name}-conf %{etc_hadoop}/conf.empty 10
%post httpfs
-%{alternatives_cmd} --install %{config_httpfs} %{name}-httpfs-conf %{etc_httpfs}/conf.empty 10
-%{alternatives_cmd} --install %{tomcat_deployment_httpfs} %{name}-httpfs-tomcat-conf %{etc_httpfs}/tomcat-conf.dist 10
-%{alternatives_cmd} --install %{tomcat_deployment_httpfs} %{name}-httpfs-tomcat-conf %{etc_httpfs}/tomcat-conf.https 5
-
chkconfig --add %{name}-httpfs
%post kms
-%{alternatives_cmd} --install %{config_kms} %{name}-kms-conf %{etc_kms}/conf.empty 10
-%{alternatives_cmd} --install %{tomcat_deployment_kms} %{name}-kms-tomcat-conf %{etc_kms}/tomcat-conf.dist 10
-%{alternatives_cmd} --install %{tomcat_deployment_kms} %{name}-kms-tomcat-conf %{etc_kms}/tomcat-conf.https 5
-
chkconfig --add %{name}-kms
%preun
@@ -626,9 +608,6 @@ fi
if [ $1 = 0 ]; then
service %{name}-httpfs stop > /dev/null 2>&1
chkconfig --del %{name}-httpfs
- %{alternatives_cmd} --remove %{name}-httpfs-conf %{etc_httpfs}/conf.empty || :
- %{alternatives_cmd} --remove %{name}-httpfs-tomcat-conf %{etc_httpfs}/tomcat-conf.dist || :
- %{alternatives_cmd} --remove %{name}-httpfs-tomcat-conf %{etc_httpfs}/tomcat-conf.https || :
fi
%postun httpfs
@@ -640,9 +619,6 @@ fi
if [ $1 = 0 ]; then
service %{name}-kms stop > /dev/null 2>&1
chkconfig --del %{name}-kms
- %{alternatives_cmd} --remove %{name}-kms-conf %{etc_kms}/conf.empty || :
- %{alternatives_cmd} --remove %{name}-kms-tomcat-conf %{etc_kms}/tomcat-conf.dist || :
- %{alternatives_cmd} --remove %{name}-kms-tomcat-conf %{etc_kms}/tomcat-conf.https || :
fi
%postun kms
@@ -686,7 +662,6 @@ fi
%config(noreplace) %{etc_hadoop}/conf.empty/mapred-site.xml
%config(noreplace) %{etc_hadoop}/conf.empty/mapred-env.sh
%config(noreplace) %{etc_hadoop}/conf.empty/mapred-queues.xml.template
-%config(noreplace) %{etc_hadoop}/conf.empty/mapred-site.xml.template
%config(noreplace) /etc/security/limits.d/mapreduce.conf
%{lib_mapreduce}
%{lib_hadoop}/libexec/mapred-config.sh
@@ -700,10 +675,9 @@ fi
%files
%defattr(-,root,root)
%config(noreplace) %{etc_hadoop}/conf.empty/core-site.xml
-%config(noreplace) %{etc_hadoop}/conf.empty/hadoop-metrics.properties
%config(noreplace) %{etc_hadoop}/conf.empty/hadoop-metrics2.properties
%config(noreplace) %{etc_hadoop}/conf.empty/log4j.properties
-%config(noreplace) %{etc_hadoop}/conf.empty/slaves
+%config(noreplace) %{etc_hadoop}/conf.empty/workers
%config(noreplace) %{etc_hadoop}/conf.empty/ssl-client.xml.example
%config(noreplace) %{etc_hadoop}/conf.empty/ssl-server.xml.example
%config(noreplace) %{etc_hadoop}/conf.empty/configuration.xsl
@@ -718,7 +692,8 @@ fi
%{lib_hadoop}/etc
%{lib_hadoop}/libexec/hadoop-config.sh
%{lib_hadoop}/libexec/hadoop-layout.sh
-%{lib_hadoop}/libexec/kms-config.sh
+%{lib_hadoop}/libexec/hadoop-functions.sh
+%{lib_hadoop}/libexec/shellprofile.d
%{bin_hadoop}/hadoop
%{man_hadoop}/man1/hadoop.1.*
%{man_hadoop}/man1/yarn.1.*
@@ -734,9 +709,7 @@ fi
%files httpfs
%defattr(-,root,root)
-%config(noreplace) %{etc_httpfs}
%config(noreplace) /etc/default/%{name}-httpfs
-%{lib_hadoop}/libexec/httpfs-config.sh
%{initd_dir}/%{name}-httpfs
%{lib_httpfs}
%attr(0775,httpfs,httpfs) %{run_httpfs}
@@ -745,9 +718,7 @@ fi
%files kms
%defattr(-,root,root)
-%config(noreplace) %{etc_kms}
%config(noreplace) /etc/default/%{name}-kms
-%{lib_hadoop}/libexec/kms-config.sh
%{initd_dir}/%{name}-kms
%{lib_kms}
%attr(0775,kms,kms) %{run_kms}
diff --git a/bigtop.bom b/bigtop.bom
index 8b809ee..2b5fc6d 100644
--- a/bigtop.bom
+++ b/bigtop.bom
@@ -148,7 +148,7 @@ bigtop {
'hadoop' {
name = 'hadoop'
relNotes = 'Apache Hadoop'
- version { base = '2.10.1'; pkg = base; release = 1 }
+ version { base = '3.2.1'; pkg = base; release = 1 }
tarball { destination = "${name}-${version.base}.tar.gz"
source = "${name}-${version.base}-src.tar.gz" }
url { download_path = "/$name/common/$name-${version.base}"
diff --git a/bigtop_toolchain/manifests/protobuf.pp b/bigtop_toolchain/manifests/protobuf.pp
index 4358687..c2c0eb1 100644
--- a/bigtop_toolchain/manifests/protobuf.pp
+++ b/bigtop_toolchain/manifests/protobuf.pp
@@ -35,7 +35,7 @@ class bigtop_toolchain::protobuf {
exec { "install protobuf":
cwd => "/usr/src/$protobuf8dir",
- command => "/usr/src/$protobuf8dir/configure --prefix=/usr/local --disable-shared && /usr/bin/make install",
+ command => "/usr/src/$protobuf8dir/configure --prefix=/usr/local --disable-shared --with-pic && /usr/bin/make install",
creates => "/usr/local/bin/protoc",
require => EXEC["download protobuf"],
timeout => 3000