You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by se...@apache.org on 2022/03/29 08:59:39 UTC

[bigtop] branch master updated: BIGTOP-3606. Bump Hadoop to 3.2.3. (#878)

This is an automated email from the ASF dual-hosted git repository.

sekikn pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/bigtop.git


The following commit(s) were added to refs/heads/master by this push:
     new dfeb8da  BIGTOP-3606. Bump Hadoop to 3.2.3. (#878)
dfeb8da is described below

commit dfeb8dae48d666c9b72f6a14b92bbb49eee76f87
Author: Masatake Iwasaki <iw...@apache.org>
AuthorDate: Tue Mar 29 17:59:33 2022 +0900

    BIGTOP-3606. Bump Hadoop to 3.2.3. (#878)
---
 .../src/common/hadoop/do-component-build           |   5 +-
 .../common/hadoop/patch0-revert-HADOOP-16598.diff  | 833 ---------------------
 .../src/common/hadoop/patch1-HADOOP-15939.diff     |  13 -
 .../src/common/hadoop/patch10-HADOOP-14922.diff    |  22 -
 .../src/common/hadoop/patch11-YARN-9783.diff       |  58 --
 .../patch2-exclude-spotbugs-annotations.diff       |  15 -
 ...atch4-revert-HADOOP-17119-and-HADOOP-16676.diff | 281 -------
 .../src/common/hadoop/patch8-HADOOP-17569.diff     |  36 -
 bigtop.bom                                         |   2 +-
 9 files changed, 5 insertions(+), 1260 deletions(-)

diff --git a/bigtop-packages/src/common/hadoop/do-component-build b/bigtop-packages/src/common/hadoop/do-component-build
index d0f202d..0a52d78 100644
--- a/bigtop-packages/src/common/hadoop/do-component-build
+++ b/bigtop-packages/src/common/hadoop/do-component-build
@@ -39,6 +39,8 @@ if [ $HOSTTYPE = "powerpc64le" ] ; then
         #cleanup
         rm -rf ${LEVELDBJNI_HOME}
         rm -rf ${LEVELDB_HOME}
+        mvn install:install-file -DgroupId=com.google.protobuf -DartifactId=protoc -Dversion=2.5.0 \
+            -Dclassifier=linux-ppcle_64 -Dpackaging=exe -Dfile=/usr/local/bin/protoc
 fi
 ## BIGTOP-2288
 
@@ -101,6 +103,8 @@ EOF
         #cleanup
         rm -rf ${LEVELDBJNI_HOME}
         rm -rf ${LEVELDB_HOME}
+        mvn install:install-file -DgroupId=com.google.protobuf -DartifactId=protoc -Dversion=2.5.0 \
+            -Dclassifier=linux-aarch_64 -Dpackaging=exe -Dfile=/usr/local/bin/protoc
 fi
 ## BIGTOP-3027
 
@@ -118,7 +122,6 @@ mkdir build/src
  
 # Build artifacts
 MAVEN_OPTS="-Dzookeeper.version=$ZOOKEEPER_VERSION "
-MAVEN_OPTS+="-Djetty.version=9.3.29.v20201019 "
 MAVEN_OPTS+="-DskipTests -DskipTest -DskipITs "
 
 # Include common Maven Deployment logic
diff --git a/bigtop-packages/src/common/hadoop/patch0-revert-HADOOP-16598.diff b/bigtop-packages/src/common/hadoop/patch0-revert-HADOOP-16598.diff
deleted file mode 100644
index 3c3d828..0000000
--- a/bigtop-packages/src/common/hadoop/patch0-revert-HADOOP-16598.diff
+++ /dev/null
@@ -1,833 +0,0 @@
-commit 0296d8d024a2ade8d0dfc2c38e82f70052269339
-Author: Masatake Iwasaki <iw...@apache.org>
-Date:   Mon Jan 18 04:27:18 2021 +0000
-
-    Revert "HADOOP-16598. Backport "HADOOP-16558 [COMMON+HDFS] use protobuf-maven-plugin to generate protobuf classes" to all active branches"
-    
-    This reverts commit a37a4bc62ff64f965693b328974e9556c2044892.
-
-diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
-index fa4a838babb..0542ae83d77 100644
---- a/hadoop-common-project/hadoop-common/pom.xml
-+++ b/hadoop-common-project/hadoop-common/pom.xml
-@@ -360,20 +360,6 @@
-       </resource>
-     </resources>
-     <plugins>
--      <plugin>
--        <groupId>org.xolstice.maven.plugins</groupId>
--        <artifactId>protobuf-maven-plugin</artifactId>
--        <executions>
--          <execution>
--            <id>src-compile-protoc</id>
--            <configuration><skip>false</skip></configuration>
--          </execution>
--          <execution>
--            <id>src-test-compile-protoc</id>
--            <configuration><skip>false</skip></configuration>
--          </execution>
--        </executions>
--      </plugin>
-       <plugin>
-         <groupId>org.apache.hadoop</groupId>
-         <artifactId>hadoop-maven-plugins</artifactId>
-@@ -394,6 +380,58 @@
-               </source>
-             </configuration>
-           </execution>
-+          <execution>
-+            <id>compile-protoc</id>
-+            <goals>
-+              <goal>protoc</goal>
-+            </goals>
-+            <configuration>
-+              <protocVersion>${protobuf.version}</protocVersion>
-+              <protocCommand>${protoc.path}</protocCommand>
-+              <imports>
-+                <param>${basedir}/src/main/proto</param>
-+              </imports>
-+              <source>
-+                <directory>${basedir}/src/main/proto</directory>
-+                <includes>
-+                  <include>HAServiceProtocol.proto</include>
-+                  <include>IpcConnectionContext.proto</include>
-+                  <include>ProtocolInfo.proto</include>
-+                  <include>RpcHeader.proto</include>
-+                  <include>ZKFCProtocol.proto</include>
-+                  <include>ProtobufRpcEngine.proto</include>
-+                  <include>Security.proto</include>
-+                  <include>GetUserMappingsProtocol.proto</include>
-+                  <include>TraceAdmin.proto</include>
-+                  <include>RefreshAuthorizationPolicyProtocol.proto</include>
-+                  <include>RefreshUserMappingsProtocol.proto</include>
-+                  <include>RefreshCallQueueProtocol.proto</include>
-+                  <include>GenericRefreshProtocol.proto</include>
-+                  <include>FSProtos.proto</include>
-+                </includes>
-+              </source>
-+            </configuration>
-+          </execution>
-+          <execution>
-+            <id>compile-test-protoc</id>
-+            <goals>
-+              <goal>test-protoc</goal>
-+            </goals>
-+            <configuration>
-+              <protocVersion>${protobuf.version}</protocVersion>
-+              <protocCommand>${protoc.path}</protocCommand>
-+              <imports>
-+                <param>${basedir}/src/test/proto</param>
-+              </imports>
-+              <source>
-+                <directory>${basedir}/src/test/proto</directory>
-+                <includes>
-+                  <include>test.proto</include>
-+                  <include>test_rpc_service.proto</include>
-+                </includes>
-+              </source>
-+            </configuration>
-+          </execution>
-           <execution>
-             <id>resource-gz</id>
-             <phase>generate-resources</phase>
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/FSProtos.proto b/hadoop-common-project/hadoop-common/src/main/proto/FSProtos.proto
-index c895bce757b..c3b768ab67e 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/FSProtos.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/FSProtos.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.fs";
- option java_outer_classname = "FSProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/GenericRefreshProtocol.proto b/hadoop-common-project/hadoop-common/src/main/proto/GenericRefreshProtocol.proto
-index 6296f88da69..fe465490b19 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/GenericRefreshProtocol.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/GenericRefreshProtocol.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ipc.proto";
- option java_outer_classname = "GenericRefreshProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/GetUserMappingsProtocol.proto b/hadoop-common-project/hadoop-common/src/main/proto/GetUserMappingsProtocol.proto
-index cb91a13b048..51552b879f3 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/GetUserMappingsProtocol.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/GetUserMappingsProtocol.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.tools.proto";
- option java_outer_classname = "GetUserMappingsProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto b/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto
-index 5a88a7ff03f..16ee9a2e0a5 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ha.proto";
- option java_outer_classname = "HAServiceProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto b/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
-index 16e2fb7c4db..4557e893cff 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ipc.protobuf";
- option java_outer_classname = "IpcConnectionContextProtos";
- option java_generate_equals_and_hash = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto b/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
-index fa113134027..a17e2078e94 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- /**
-  * These are the messages used by Hadoop RPC for the Rpc Engine Protocol Buffer
-  * to marshal the request and response in the RPC layer.
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto b/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto
-index 0e9d0d4baa4..fdbc440d91c 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ipc.protobuf";
- option java_outer_classname = "ProtocolInfoProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/RefreshAuthorizationPolicyProtocol.proto b/hadoop-common-project/hadoop-common/src/main/proto/RefreshAuthorizationPolicyProtocol.proto
-index f57c6d63039..5ef1c2d0a8c 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/RefreshAuthorizationPolicyProtocol.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/RefreshAuthorizationPolicyProtocol.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.security.proto";
- option java_outer_classname = "RefreshAuthorizationPolicyProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/RefreshCallQueueProtocol.proto b/hadoop-common-project/hadoop-common/src/main/proto/RefreshCallQueueProtocol.proto
-index 463b7c548fe..67ed1332510 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/RefreshCallQueueProtocol.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/RefreshCallQueueProtocol.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ipc.proto";
- option java_outer_classname = "RefreshCallQueueProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/RefreshUserMappingsProtocol.proto b/hadoop-common-project/hadoop-common/src/main/proto/RefreshUserMappingsProtocol.proto
-index a1130f5c2d9..41031ed9ea0 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/RefreshUserMappingsProtocol.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/RefreshUserMappingsProtocol.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.security.proto";
- option java_outer_classname = "RefreshUserMappingsProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto b/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
-index 4705b4276b8..e8d8cbbfe70 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ipc.protobuf";
- option java_outer_classname = "RpcHeaderProtos";
- option java_generate_equals_and_hash = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/Security.proto b/hadoop-common-project/hadoop-common/src/main/proto/Security.proto
-index 5177a86ef11..037a8781a9a 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/Security.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/Security.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.security.proto";
- option java_outer_classname = "SecurityProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/TraceAdmin.proto b/hadoop-common-project/hadoop-common/src/main/proto/TraceAdmin.proto
-index 8cf131bfb46..52d2a90abf4 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/TraceAdmin.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/TraceAdmin.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.tracing";
- option java_outer_classname = "TraceAdminPB";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto b/hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto
-index 98bc05f4a36..a2b8dd10b30 100644
---- a/hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto
-+++ b/hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ha.proto";
- option java_outer_classname = "ZKFCProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-common-project/hadoop-common/src/test/proto/test.proto b/hadoop-common-project/hadoop-common/src/test/proto/test.proto
-index 2c41aa2bc7c..37e9a0bf7aa 100644
---- a/hadoop-common-project/hadoop-common/src/test/proto/test.proto
-+++ b/hadoop-common-project/hadoop-common/src/test/proto/test.proto
-@@ -15,7 +15,7 @@
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.ipc.protobuf";
- option java_outer_classname = "TestProtos";
- option java_generate_equals_and_hash = true;
-diff --git a/hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto b/hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto
-index f6990279140..0df67a0ea3e 100644
---- a/hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto
-+++ b/hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto
-@@ -15,7 +15,6 @@
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
--syntax = "proto2";
- option java_package = "org.apache.hadoop.ipc.protobuf";
- option java_outer_classname = "TestRpcServiceProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
-index ffcf3624db0..bf915ba75fa 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
-@@ -136,18 +136,36 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
-         </configuration>
-       </plugin>
-       <plugin>
--        <groupId>org.xolstice.maven.plugins</groupId>
--        <artifactId>protobuf-maven-plugin</artifactId>
-+        <groupId>org.apache.hadoop</groupId>
-+        <artifactId>hadoop-maven-plugins</artifactId>
-         <executions>
-           <execution>
--            <id>src-compile-protoc</id>
-+            <id>compile-protoc</id>
-+            <goals>
-+              <goal>protoc</goal>
-+            </goals>
-             <configuration>
--              <skip>false</skip>
--              <additionalProtoPathElements>
--                <additionalProtoPathElement>
--                  ${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
--                </additionalProtoPathElement>
--              </additionalProtoPathElements>
-+              <protocVersion>${protobuf.version}</protocVersion>
-+              <protocCommand>${protoc.path}</protocCommand>
-+              <imports>
-+                <param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
-+                <param>${basedir}/src/main/proto</param>
-+              </imports>
-+              <source>
-+                <directory>${basedir}/src/main/proto</directory>
-+                <includes>
-+                  <include>ClientDatanodeProtocol.proto</include>
-+                  <include>ClientNamenodeProtocol.proto</include>
-+                  <include>acl.proto</include>
-+                  <include>xattr.proto</include>
-+                  <include>datatransfer.proto</include>
-+                  <include>hdfs.proto</include>
-+                  <include>encryption.proto</include>
-+                  <include>inotify.proto</include>
-+                  <include>erasurecoding.proto</include>
-+                  <include>ReconfigurationProtocol.proto</include>
-+                </includes>
-+              </source>
-             </configuration>
-           </execution>
-         </executions>
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientDatanodeProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientDatanodeProtocol.proto
-index 84cd771da49..52f6330e0bd 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientDatanodeProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientDatanodeProtocol.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax="proto2";
-+
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
- 
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientNamenodeProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientNamenodeProtocol.proto
-index f353c033a50..8cf07a86cb9 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientNamenodeProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ClientNamenodeProtocol.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax="proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "ClientNamenodeProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ReconfigurationProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ReconfigurationProtocol.proto
-index bad9f45cf62..12a38b110fe 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ReconfigurationProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/ReconfigurationProtocol.proto
-@@ -15,7 +15,7 @@
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
--syntax="proto2";
-+
-  // This file contains protocol buffers that are used to reconfigure NameNode
-  // and DataNode by HDFS admin.
- 
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/acl.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/acl.proto
-index e9a8ccb7569..c2529c90c32 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/acl.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/acl.proto
-@@ -15,7 +15,7 @@
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
--syntax="proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "AclProtos";
- option java_generate_equals_and_hash = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/datatransfer.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/datatransfer.proto
-index 66a69a9fcde..43a03e96e08 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/datatransfer.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/datatransfer.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax="proto2";
-+
- // This file contains protocol buffers that are used to transfer data
- // to and from the datanode, as well as between datanodes.
- 
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/encryption.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/encryption.proto
-index bcd82d63e05..75d3a0e2e44 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/encryption.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/encryption.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax="proto2";
-+
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
- 
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/erasurecoding.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/erasurecoding.proto
-index d92dd4cb84c..9f576237ef5 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/erasurecoding.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/erasurecoding.proto
-@@ -15,7 +15,7 @@
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
--syntax="proto2";
-+ 
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "ErasureCodingProtos";
- option java_generate_equals_and_hash = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto
-index 38459349ad4..8600a6fdc4b 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/hdfs.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax="proto2";
-+
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
- 
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/inotify.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/inotify.proto
-index afcccaa13bd..f1934082589 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/inotify.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/inotify.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax="proto2";
-+
- // This file contains protocol buffers used to communicate edits to clients
- // as part of the inotify system.
- 
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/xattr.proto b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/xattr.proto
-index a53aa86c16d..6c8b5eb5943 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/xattr.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/proto/xattr.proto
-@@ -15,7 +15,7 @@
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
--syntax="proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "XAttrProtos";
- option java_generate_equals_and_hash = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-rbf/pom.xml
-index cc0dbf01e03..67c6831e2ad 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-rbf/pom.xml
-+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/pom.xml
-@@ -174,21 +174,29 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
-         </executions>
-       </plugin>
-       <plugin>
--        <groupId>org.xolstice.maven.plugins</groupId>
--        <artifactId>protobuf-maven-plugin</artifactId>
-+        <groupId>org.apache.hadoop</groupId>
-+        <artifactId>hadoop-maven-plugins</artifactId>
-         <executions>
-           <execution>
--            <id>src-compile-protoc</id>
-+            <id>compile-protoc</id>
-+            <goals>
-+              <goal>protoc</goal>
-+            </goals>
-             <configuration>
--              <skip>false</skip>
--              <additionalProtoPathElements>
--                <additionalProtoPathElement>
--                  ${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
--                </additionalProtoPathElement>
--                <additionalProtoPathElement>
--                  ${basedir}/../hadoop-hdfs-client/src/main/proto
--                </additionalProtoPathElement>
--              </additionalProtoPathElements>
-+              <protocVersion>${protobuf.version}</protocVersion>
-+              <protocCommand>${protoc.path}</protocCommand>
-+              <imports>
-+                <param>${basedir}/../hadoop-hdfs-client/src/main/proto</param>
-+                <param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
-+                <param>${basedir}/src/main/proto</param>
-+              </imports>
-+              <source>
-+                <directory>${basedir}/src/main/proto</directory>
-+                <includes>
-+                  <include>FederationProtocol.proto</include>
-+                  <include>RouterProtocol.proto</include>
-+                </includes>
-+              </source>
-             </configuration>
-           </execution>
-         </executions>
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/FederationProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/FederationProtocol.proto
-index 1263630a223..b1a62b1c345 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/FederationProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/FederationProtocol.proto
-@@ -15,7 +15,7 @@
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.federation.protocol.proto";
- option java_outer_classname = "HdfsServerFederationProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/RouterProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/RouterProtocol.proto
-index 3eff0929775..f3a2b6e8abc 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/RouterProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/proto/RouterProtocol.proto
-@@ -15,7 +15,7 @@
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "RouterProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
-index bc6dabc5294..fea414c8b53 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
-+++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
-@@ -306,29 +306,41 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
-         </executions>
-       </plugin>
-       <plugin>
--        <groupId>org.xolstice.maven.plugins</groupId>
--        <artifactId>protobuf-maven-plugin</artifactId>
-+        <groupId>org.apache.hadoop</groupId>
-+        <artifactId>hadoop-maven-plugins</artifactId>
-         <executions>
-           <execution>
--            <id>src-compile-protoc</id>
-+            <id>compile-protoc</id>
-+            <goals>
-+              <goal>protoc</goal>
-+            </goals>
-             <configuration>
--              <skip>false</skip>
--              <additionalProtoPathElements>
--                <additionalProtoPathElement>
--                  ${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto
--                </additionalProtoPathElement>
--                <additionalProtoPathElement>
--                  ${basedir}/../hadoop-hdfs-client/src/main/proto
--                </additionalProtoPathElement>
--              </additionalProtoPathElements>
-+              <protocVersion>${protobuf.version}</protocVersion>
-+              <protocCommand>${protoc.path}</protocCommand>
-+              <imports>
-+                <param>${basedir}/../../hadoop-common-project/hadoop-common/src/main/proto</param>
-+                <param>${basedir}/../hadoop-hdfs-client/src/main/proto</param>
-+                <param>${basedir}/src/main/proto</param>
-+              </imports>
-+              <source>
-+                <directory>${basedir}/src/main/proto</directory>
-+                <includes>
-+                  <include>HdfsServer.proto</include>
-+                  <include>DatanodeProtocol.proto</include>
-+                  <include>DatanodeLifelineProtocol.proto</include>
-+                  <include>HAZKInfo.proto</include>
-+                  <include>InterDatanodeProtocol.proto</include>
-+                  <include>JournalProtocol.proto</include>
-+                  <include>NamenodeProtocol.proto</include>
-+                  <include>QJournalProtocol.proto</include>
-+                  <include>editlog.proto</include>
-+                  <include>fsimage.proto</include>
-+                  <include>AliasMapProtocol.proto</include>
-+                  <include>InterQJournalProtocol.proto</include>
-+                </includes>
-+              </source>
-             </configuration>
-           </execution>
--        </executions>
--      </plugin>
--      <plugin>
--        <groupId>org.apache.hadoop</groupId>
--        <artifactId>hadoop-maven-plugins</artifactId>
--        <executions>
-           <execution>
-             <id>resource-gz</id>
-             <phase>generate-resources</phase>
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/AliasMapProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/AliasMapProtocol.proto
-index 8050f35454e..01dd9523bfb 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/AliasMapProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/AliasMapProtocol.proto
-@@ -15,7 +15,7 @@
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "AliasMapProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeLifelineProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeLifelineProtocol.proto
-index e10a8861e61..b6ab75653a1 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeLifelineProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeLifelineProtocol.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "DatanodeLifelineProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeProtocol.proto
-index 0e241301e08..4a8f9f00f22 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/DatanodeProtocol.proto
-@@ -24,7 +24,7 @@
- 
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "DatanodeProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HAZKInfo.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HAZKInfo.proto
-index 6d45a935ee4..aa8b6be4532 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HAZKInfo.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HAZKInfo.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.server.namenode.ha.proto";
- option java_outer_classname = "HAZKInfoProtos";
- package hadoop.hdfs;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto
-index 78607efddab..85cfb6c0d2e 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/HdfsServer.proto
-@@ -24,7 +24,7 @@
- 
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
--syntax = "proto2";
-+
- 
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "HdfsServerProtos";
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterDatanodeProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterDatanodeProtocol.proto
-index 47332a8817b..580f8d34730 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterDatanodeProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterDatanodeProtocol.proto
-@@ -24,7 +24,7 @@
- 
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "InterDatanodeProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterQJournalProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterQJournalProtocol.proto
-index e73ca23e92f..0ecdff1d372 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterQJournalProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/InterQJournalProtocol.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.qjournal.protocol";
- option java_outer_classname = "InterQJournalProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/JournalProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/JournalProtocol.proto
-index 35c401e33e5..3fd029b7362 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/JournalProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/JournalProtocol.proto
-@@ -24,7 +24,7 @@
- 
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "JournalProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/NamenodeProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/NamenodeProtocol.proto
-index 97f5bcaf61f..89edfbf2ea6 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/NamenodeProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/NamenodeProtocol.proto
-@@ -24,7 +24,7 @@
- 
- // This file contains protocol buffers that are used throughout HDFS -- i.e.
- // by the client, server, and data transfer protocols.
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "NamenodeProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/QJournalProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/QJournalProtocol.proto
-index e366d1fb8d7..b4d2b312f99 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/QJournalProtocol.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/QJournalProtocol.proto
-@@ -21,7 +21,7 @@
-  * Please see http://wiki.apache.org/hadoop/Compatibility
-  * for what changes are allowed for a *stable* .proto interface.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.qjournal.protocol";
- option java_outer_classname = "QJournalProtocolProtos";
- option java_generic_services = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/editlog.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/editlog.proto
-index 22fd7437bb8..f25fe591ad3 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/editlog.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/editlog.proto
-@@ -15,7 +15,7 @@
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.protocol.proto";
- option java_outer_classname = "EditLogProtos";
- option java_generate_equals_and_hash = true;
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto
-index 532aa6e2707..d943dbccd64 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/fsimage.proto
-@@ -15,7 +15,7 @@
-  * See the License for the specific language governing permissions and
-  * limitations under the License.
-  */
--syntax = "proto2";
-+
- option java_package = "org.apache.hadoop.hdfs.server.namenode";
- option java_outer_classname = "FsImageProto";
- 
-diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
-index e76347962b2..a5e7a8f4121 100644
---- a/hadoop-project/pom.xml
-+++ b/hadoop-project/pom.xml
-@@ -1649,56 +1649,9 @@
-           <artifactId>hadoop-maven-plugins</artifactId>
-           <version>${hadoop.version}</version>
-         </plugin>
--        <plugin>
--          <groupId>org.xolstice.maven.plugins</groupId>
--          <artifactId>protobuf-maven-plugin</artifactId>
--          <version>${protobuf-maven-plugin.version}</version>
--          <extensions>true</extensions>
--          <configuration>
--            <protocArtifact>
--              com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier}
--            </protocArtifact>
--            <attachProtoSources>false</attachProtoSources>
--          </configuration>
--          <executions>
--            <execution>
--              <id>src-compile-protoc</id>
--              <phase>generate-sources</phase>
--              <goals>
--                <goal>compile</goal>
--              </goals>
--              <configuration>
--                <includeDependenciesInDescriptorSet>false</includeDependenciesInDescriptorSet>
--                <protoSourceRoot>${basedir}/src/main/proto</protoSourceRoot>
--                <outputDirectory>${project.build.directory}/generated-sources/java</outputDirectory>
--                <clearOutputDirectory>false</clearOutputDirectory>
--                <skip>true</skip>
--              </configuration>
--            </execution>
--            <execution>
--              <id>src-test-compile-protoc</id>
--              <phase>generate-test-sources</phase>
--              <goals>
--                <goal>test-compile</goal>
--              </goals>
--              <configuration>
--                <protoTestSourceRoot>${basedir}/src/test/proto</protoTestSourceRoot>
--                <outputDirectory>${project.build.directory}/generated-test-sources/java</outputDirectory>
--                <clearOutputDirectory>false</clearOutputDirectory>
--                <skip>true</skip>
--              </configuration>
--            </execution>
--          </executions>
--        </plugin>
-       </plugins>
-     </pluginManagement>
--    <extensions>
--      <extension>
--        <groupId>kr.motd.maven</groupId>
--        <artifactId>os-maven-plugin</artifactId>
--        <version>${os-maven-plugin.version}</version>
--      </extension>
--    </extensions>
-+
-     <plugins>
-       <plugin>
-         <artifactId>maven-clean-plugin</artifactId>
diff --git a/bigtop-packages/src/common/hadoop/patch1-HADOOP-15939.diff b/bigtop-packages/src/common/hadoop/patch1-HADOOP-15939.diff
deleted file mode 100644
index 51eca6a..0000000
--- a/bigtop-packages/src/common/hadoop/patch1-HADOOP-15939.diff
+++ /dev/null
@@ -1,13 +0,0 @@
-diff --git a/hadoop-client-modules/hadoop-client-minicluster/pom.xml b/hadoop-client-modules/hadoop-client-minicluster/pom.xml
-index ddebfa93a43..c356b1921cb 100644
---- a/hadoop-client-modules/hadoop-client-minicluster/pom.xml
-+++ b/hadoop-client-modules/hadoop-client-minicluster/pom.xml
-@@ -729,6 +729,8 @@
-                         <exclude>objenesis-license.txt</exclude>
-                         <exclude>org/hamcrest/**/*.class</exclude>
-                         <exclude>org/hamcrest/*.class</exclude>
-+                        <exclude>org/objenesis/**/*.class</exclude>
-+                        <exclude>org/objenesis/*.class</exclude>
-                       </excludes>
-                     </filter>
-                     <!-- skip grizzly internals we don't need to run. -->
diff --git a/bigtop-packages/src/common/hadoop/patch10-HADOOP-14922.diff b/bigtop-packages/src/common/hadoop/patch10-HADOOP-14922.diff
deleted file mode 100644
index a9e7b55..0000000
--- a/bigtop-packages/src/common/hadoop/patch10-HADOOP-14922.diff
+++ /dev/null
@@ -1,22 +0,0 @@
-diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/primitives.h b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/primitives.h
-index 3bf5f767ed2..000c4b91fd9 100644
---- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/primitives.h
-+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/primitives.h
-@@ -99,6 +99,8 @@ inline void simple_memcpy(void * dest, const void * src, size_t len) {
- inline uint32_t bswap(uint32_t val) {
- #ifdef __aarch64__
-   __asm__("rev %w[dst], %w[src]" : [dst]"=r"(val) : [src]"r"(val));
-+#elif defined(__ppc64__)||(__PPC64__)||(__powerpc64__)
-+  return  __builtin_bswap32(val);
- #else
-   __asm__("bswap %0" : "=r" (val) : "0" (val));
- #endif
-@@ -108,6 +110,8 @@ inline uint32_t bswap(uint32_t val) {
- inline uint64_t bswap64(uint64_t val) {
- #ifdef __aarch64__
-   __asm__("rev %[dst], %[src]" : [dst]"=r"(val) : [src]"r"(val));
-+#elif defined(__ppc64__)||(__PPC64__)||(__powerpc64__)
-+  return __builtin_bswap64(val);
- #else
- #ifdef __X64
-   __asm__("bswapq %0" : "=r" (val) : "0" (val));
diff --git a/bigtop-packages/src/common/hadoop/patch11-YARN-9783.diff b/bigtop-packages/src/common/hadoop/patch11-YARN-9783.diff
deleted file mode 100644
index ac5fe42..0000000
--- a/bigtop-packages/src/common/hadoop/patch11-YARN-9783.diff
+++ /dev/null
@@ -1,58 +0,0 @@
-qdiff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/secure/TestSecureRegistry.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/secure/TestSecureRegistry.java
-index 9d5848ea034..27d32ea9d26 100644
---- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/secure/TestSecureRegistry.java
-+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-registry/src/test/java/org/apache/hadoop/registry/secure/TestSecureRegistry.java
-@@ -24,16 +24,12 @@
- import org.apache.hadoop.registry.client.impl.zk.CuratorService;
- import org.apache.hadoop.registry.client.impl.zk.RegistrySecurity;
- import org.apache.zookeeper.CreateMode;
--import org.apache.zookeeper.Login;
--import org.apache.zookeeper.server.ZooKeeperSaslServer;
--import org.apache.zookeeper.server.auth.SaslServerCallbackHandler;
- import org.junit.After;
- import org.junit.Before;
- import org.junit.Test;
- import org.slf4j.Logger;
- import org.slf4j.LoggerFactory;
- 
--import javax.security.auth.login.AppConfigurationEntry;
- import javax.security.auth.login.LoginContext;
- 
- import static org.apache.hadoop.registry.client.api.RegistryConstants.*;
-@@ -58,36 +54,6 @@ public void afterTestSecureZKService() throws Throwable {
-     RegistrySecurity.clearZKSaslClientProperties();
-   }
- 
--  /**
--  * this is a cut and paste of some of the ZK internal code that was
--   * failing on windows and swallowing its exceptions
--   */
--  @Test
--  public void testLowlevelZKSaslLogin() throws Throwable {
--    RegistrySecurity.bindZKToServerJAASContext(ZOOKEEPER_SERVER_CONTEXT);
--    String serverSection =
--        System.getProperty(ZooKeeperSaslServer.LOGIN_CONTEXT_NAME_KEY,
--            ZooKeeperSaslServer.DEFAULT_LOGIN_CONTEXT_NAME);
--    assertEquals(ZOOKEEPER_SERVER_CONTEXT, serverSection);
--
--    AppConfigurationEntry entries[];
--    entries = javax.security.auth.login.Configuration.getConfiguration()
--                                                     .getAppConfigurationEntry(
--                                                         serverSection);
--
--    assertNotNull("null entries", entries);
--
--    SaslServerCallbackHandler saslServerCallbackHandler =
--        new SaslServerCallbackHandler(
--            javax.security.auth.login.Configuration.getConfiguration());
--    Login login = new Login(serverSection, saslServerCallbackHandler);
--    try {
--      login.startThreadIfNeeded();
--    } finally {
--      login.shutdown();
--    }
--  }
--
-   @Test
-   public void testCreateSecureZK() throws Throwable {
-     startSecureZK();
diff --git a/bigtop-packages/src/common/hadoop/patch2-exclude-spotbugs-annotations.diff b/bigtop-packages/src/common/hadoop/patch2-exclude-spotbugs-annotations.diff
deleted file mode 100644
index 26a970b..0000000
--- a/bigtop-packages/src/common/hadoop/patch2-exclude-spotbugs-annotations.diff
+++ /dev/null
@@ -1,15 +0,0 @@
-diff --git a/hadoop-client-modules/hadoop-client-minicluster/pom.xml b/hadoop-client-modules/hadoop-client-minicluster/pom.xml
-index 780ae0c4d63..d9eeba0aade 100644
---- a/hadoop-client-modules/hadoop-client-minicluster/pom.xml
-+++ b/hadoop-client-modules/hadoop-client-minicluster/pom.xml
-@@ -322,6 +322,10 @@
-           <groupId>dnsjava</groupId>
-           <artifactId>dnsjava</artifactId>
-         </exclusion>
-+        <exclusion>
-+          <groupId>com.github.spotbugs</groupId>
-+          <artifactId>spotbugs-annotations</artifactId>
-+        </exclusion>
-       </exclusions>
-     </dependency>
-     <!-- Add optional runtime dependency on the in-development timeline server module
diff --git a/bigtop-packages/src/common/hadoop/patch4-revert-HADOOP-17119-and-HADOOP-16676.diff b/bigtop-packages/src/common/hadoop/patch4-revert-HADOOP-17119-and-HADOOP-16676.diff
deleted file mode 100644
index 5dde1e4..0000000
--- a/bigtop-packages/src/common/hadoop/patch4-revert-HADOOP-17119-and-HADOOP-16676.diff
+++ /dev/null
@@ -1,281 +0,0 @@
-diff --git a/hadoop-client-modules/hadoop-client-minicluster/pom.xml b/hadoop-client-modules/hadoop-client-minicluster/pom.xml
-index 780ae0c4d63..118df814bb1 100644
---- a/hadoop-client-modules/hadoop-client-minicluster/pom.xml
-+++ b/hadoop-client-modules/hadoop-client-minicluster/pom.xml
-@@ -779,19 +779,6 @@
-                         <exclude>ehcache-core.xsd</exclude>
-                       </excludes>
-                     </filter>
--                    <!-- Jetty 9.4.x: jetty-client and jetty-xml are depended by org.eclipse.jetty.websocket:websocket-client.-->
--                    <filter>
--                      <artifact>org.eclipse.jetty:jetty-client</artifact>
--                      <excludes>
--                        <exclude>*/**</exclude>
--                      </excludes>
--                    </filter>
--                    <filter>
--                      <artifact>org.eclipse.jetty:jetty-xml</artifact>
--                      <excludes>
--                        <exclude>*/**</exclude>
--                      </excludes>
--                    </filter>
-                   </filters>
- 
-                   <!-- relocate classes from mssql-jdbc -->
-@@ -920,13 +907,6 @@
-                         <exclude>**/pom.xml</exclude>
-                       </excludes>
-                     </relocation>
--                    <relocation>
--                      <pattern>javax/websocket/</pattern>
--                      <shadedPattern>${shaded.dependency.prefix}.javax.websocket.</shadedPattern>
--                      <excludes>
--                        <exclude>**/pom.xml</exclude>
--                      </excludes>
--                    </relocation>
-                     <relocation>
-                       <pattern>jersey/</pattern>
-                       <shadedPattern>${shaded.dependency.prefix}.jersey.</shadedPattern>
-diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java
-index b2f18538b6c..05573a8de95 100644
---- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java
-+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpRequestLog.java
-@@ -24,8 +24,7 @@
- import org.apache.commons.logging.LogConfigurationException;
- import org.apache.commons.logging.LogFactory;
- import org.apache.log4j.Appender;
--import org.eclipse.jetty.server.AsyncRequestLogWriter;
--import org.eclipse.jetty.server.CustomRequestLog;
-+import org.eclipse.jetty.server.NCSARequestLog;
- import org.eclipse.jetty.server.RequestLog;
- import org.slf4j.Logger;
- import org.slf4j.LoggerFactory;
-@@ -86,11 +85,10 @@ public static RequestLog getRequestLog(String name) {
-       if (appender instanceof HttpRequestLogAppender) {
-         HttpRequestLogAppender requestLogAppender
-           = (HttpRequestLogAppender)appender;
--        AsyncRequestLogWriter logWriter = new AsyncRequestLogWriter();
--        logWriter.setFilename(requestLogAppender.getFilename());
--        logWriter.setRetainDays(requestLogAppender.getRetainDays());
--        return new CustomRequestLog(logWriter,
--            CustomRequestLog.EXTENDED_NCSA_FORMAT);
-+        NCSARequestLog requestLog = new NCSARequestLog();
-+        requestLog.setFilename(requestLogAppender.getFilename());
-+        requestLog.setRetainDays(requestLogAppender.getRetainDays());
-+        return requestLog;
-       } else {
-         LOG.warn("Jetty request log for {} was of the wrong class", loggerName);
-         return null;
-diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
-index a126c1c71a8..5b9b71a4199 100644
---- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
-+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
-@@ -82,10 +82,12 @@
- import org.eclipse.jetty.server.SecureRequestCustomizer;
- import org.eclipse.jetty.server.Server;
- import org.eclipse.jetty.server.ServerConnector;
-+import org.eclipse.jetty.server.SessionManager;
- import org.eclipse.jetty.server.SslConnectionFactory;
- import org.eclipse.jetty.server.handler.ContextHandlerCollection;
- import org.eclipse.jetty.server.handler.HandlerCollection;
- import org.eclipse.jetty.server.handler.RequestLogHandler;
-+import org.eclipse.jetty.server.session.AbstractSessionManager;
- import org.eclipse.jetty.server.session.SessionHandler;
- import org.eclipse.jetty.servlet.DefaultServlet;
- import org.eclipse.jetty.servlet.FilterHolder;
-@@ -529,8 +531,7 @@ private ServerConnector createHttpsChannelConnector(
-           new SecureRequestCustomizer(sniHostCheckEnabled));
-       ServerConnector conn = createHttpChannelConnector(server, httpConfig);
- 
--      SslContextFactory.Server sslContextFactory =
--          new SslContextFactory.Server();
-+      SslContextFactory sslContextFactory = new SslContextFactory();
-       sslContextFactory.setNeedClientAuth(needsClientAuth);
-       sslContextFactory.setKeyManagerPassword(keyPassword);
-       if (keyStore != null) {
-@@ -598,9 +599,12 @@ private void initializeWebServer(String name, String hostName,
-       threadPool.setMaxThreads(maxThreads);
-     }
- 
--    SessionHandler handler = webAppContext.getSessionHandler();
--    handler.setHttpOnly(true);
--    handler.getSessionCookieConfig().setSecure(true);
-+    SessionManager sm = webAppContext.getSessionHandler().getSessionManager();
-+    if (sm instanceof AbstractSessionManager) {
-+      AbstractSessionManager asm = (AbstractSessionManager)sm;
-+      asm.setHttpOnly(true);
-+      asm.getSessionCookieConfig().setSecure(true);
-+    }
- 
-     ContextHandlerCollection contexts = new ContextHandlerCollection();
-     RequestLog requestLog = HttpRequestLog.getRequestLog(name);
-@@ -739,8 +743,12 @@ protected void addDefaultApps(ContextHandlerCollection parent,
-       }
-       logContext.setDisplayName("logs");
-       SessionHandler handler = new SessionHandler();
--      handler.setHttpOnly(true);
--      handler.getSessionCookieConfig().setSecure(true);
-+      SessionManager sm = handler.getSessionManager();
-+      if (sm instanceof AbstractSessionManager) {
-+        AbstractSessionManager asm = (AbstractSessionManager) sm;
-+        asm.setHttpOnly(true);
-+        asm.getSessionCookieConfig().setSecure(true);
-+      }
-       logContext.setSessionHandler(handler);
-       setContextAttributes(logContext, conf);
-       addNoCacheFilter(logContext);
-@@ -757,8 +765,12 @@ protected void addDefaultApps(ContextHandlerCollection parent,
-     params.put("org.eclipse.jetty.servlet.Default.dirAllowed", "false");
-     params.put("org.eclipse.jetty.servlet.Default.gzip", "true");
-     SessionHandler handler = new SessionHandler();
--    handler.setHttpOnly(true);
--    handler.getSessionCookieConfig().setSecure(true);
-+    SessionManager sm = handler.getSessionManager();
-+    if (sm instanceof AbstractSessionManager) {
-+      AbstractSessionManager asm = (AbstractSessionManager) sm;
-+      asm.setHttpOnly(true);
-+      asm.getSessionCookieConfig().setSecure(true);
-+    }
-     staticContext.setSessionHandler(handler);
-     setContextAttributes(staticContext, conf);
-     defaultContexts.put(staticContext, true);
-@@ -1226,7 +1238,7 @@ private static void bindListener(ServerConnector listener) throws Exception {
-    * @return
-    */
-   private static BindException constructBindException(ServerConnector listener,
--      IOException ex) {
-+      BindException ex) {
-     BindException be = new BindException("Port in use: "
-         + listener.getHost() + ":" + listener.getPort());
-     if (ex != null) {
-@@ -1248,7 +1260,7 @@ private void bindForSinglePort(ServerConnector listener, int port)
-       try {
-         bindListener(listener);
-         break;
--      } catch (IOException ex) {
-+      } catch (BindException ex) {
-         if (port == 0 || !findPort) {
-           throw constructBindException(listener, ex);
-         }
-@@ -1268,13 +1280,13 @@ private void bindForSinglePort(ServerConnector listener, int port)
-    */
-   private void bindForPortRange(ServerConnector listener, int startPort)
-       throws Exception {
--    IOException ioException = null;
-+    BindException bindException = null;
-     try {
-       bindListener(listener);
-       return;
--    } catch (IOException ex) {
-+    } catch (BindException ex) {
-       // Ignore exception.
--      ioException = ex;
-+      bindException = ex;
-     }
-     for(Integer port : portRanges) {
-       if (port == startPort) {
-@@ -1285,16 +1297,12 @@ private void bindForPortRange(ServerConnector listener, int startPort)
-       try {
-         bindListener(listener);
-         return;
--      } catch (IOException ex) {
--        if (!(ex instanceof BindException)
--            && !(ex.getCause() instanceof BindException)) {
--          throw ex;
--        }
-+      } catch (BindException ex) {
-         // Ignore exception. Move to next port.
--        ioException = ex;
-+        bindException = ex;
-       }
-     }
--    throw constructBindException(listener, ioException);
-+    throw constructBindException(listener, bindException);
-   }
- 
-   /**
-diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java
-index d0123e32039..212807f78ef 100644
---- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java
-+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestHttpRequestLog.java
-@@ -18,7 +18,7 @@
- package org.apache.hadoop.http;
- 
- import org.apache.log4j.Logger;
--import org.eclipse.jetty.server.CustomRequestLog;
-+import org.eclipse.jetty.server.NCSARequestLog;
- import org.eclipse.jetty.server.RequestLog;
- import org.junit.Test;
- 
-@@ -42,7 +42,6 @@ public void testAppenderDefined() {
-     RequestLog requestLog = HttpRequestLog.getRequestLog("test");
-     Logger.getLogger("http.requests.test").removeAppender(requestLogAppender);
-     assertNotNull("RequestLog should not be null", requestLog);
--    assertEquals("Class mismatch",
--        CustomRequestLog.class, requestLog.getClass());
-+    assertEquals("Class mismatch", NCSARequestLog.class, requestLog.getClass());
-   }
- }
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
-index e4df1184356..1da3901b395 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
-+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/test/TestJettyHelper.java
-@@ -105,8 +105,7 @@ private Server createJettyServer() {
-       conn.setHost(host);
-       conn.setPort(port);
-       if (ssl) {
--        SslContextFactory.Server sslContextFactory =
--            new SslContextFactory.Server();
-+        SslContextFactory sslContextFactory = new SslContextFactory();
-         sslContextFactory.setNeedClientAuth(false);
-         sslContextFactory.setKeyStorePath(keyStore);
-         sslContextFactory.setKeyStoreType(keyStoreType);
-diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java
-index fade4b4ee98..1c7850608ae 100644
---- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java
-+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java
-@@ -93,9 +93,8 @@
-   // set them to the minimum possible
-   private static final int HTTP_SELECTOR_THREADS = 1;
-   private static final int HTTP_ACCEPTOR_THREADS = 1;
--  // Jetty 9.4.x: Adding one more thread to HTTP_MAX_THREADS.
-   private static final int HTTP_MAX_THREADS =
--      HTTP_SELECTOR_THREADS + HTTP_ACCEPTOR_THREADS + 2;
-+      HTTP_SELECTOR_THREADS + HTTP_ACCEPTOR_THREADS + 1;
- 
-   public DatanodeHttpServer(final Configuration conf,
-       final DataNode datanode,
-diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
-index e76347962b2..c476e712aee 100644
---- a/hadoop-project/pom.xml
-+++ b/hadoop-project/pom.xml
-@@ -35,7 +35,7 @@
- 
-     <failIfNoTests>false</failIfNoTests>
-     <maven.test.redirectTestOutputToFile>true</maven.test.redirectTestOutputToFile>
--    <jetty.version>9.4.20.v20190813</jetty.version>
-+    <jetty.version>9.3.27.v20190418</jetty.version>
-     <test.exclude>_</test.exclude>
-     <test.exclude.pattern>_</test.exclude.pattern>
- 
-diff --git a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java
-index 0a469ad66b7..9779193ad49 100644
---- a/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java
-+++ b/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/SLSRunner.java
-@@ -93,6 +93,7 @@
- import org.apache.hadoop.yarn.util.UTCClock;
- import org.apache.hadoop.yarn.util.resource.ResourceUtils;
- import org.apache.hadoop.yarn.util.resource.Resources;
-+import org.eclipse.jetty.util.ConcurrentHashSet;
- import org.slf4j.Logger;
- import org.slf4j.LoggerFactory;
- 
-@@ -334,7 +335,7 @@ private void startNM() throws YarnException, IOException,
- 
-     // create NM simulators
-     Random random = new Random();
--    Set<String> rackSet = ConcurrentHashMap.newKeySet();
-+    Set<String> rackSet = new ConcurrentHashSet<>();
-     int threadPoolSize = Math.max(poolSize,
-         SLSConfiguration.RUNNER_POOL_SIZE_DEFAULT);
-     ExecutorService executorService = Executors.
diff --git a/bigtop-packages/src/common/hadoop/patch8-HADOOP-17569.diff b/bigtop-packages/src/common/hadoop/patch8-HADOOP-17569.diff
deleted file mode 100644
index 0506e45..0000000
--- a/bigtop-packages/src/common/hadoop/patch8-HADOOP-17569.diff
+++ /dev/null
@@ -1,36 +0,0 @@
-commit 1a83235fb0f32aca47be9dcd5c81da45f6c047dc
-Author: Masatake Iwasaki <iw...@apache.org>
-Date:   Thu Apr 15 21:33:18 2021 +0900
-
-    HADOOP-17569. Building native code fails on Fedora 33. (#2886)
-    
-    (cherry picked from commit 2717203f858ff654de0fc01cfb9afef2e705e33c)
-
-diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/exception.c b/hadoop-common-project/hadoop-common/src/main/native/src/exception.c
-index fc072e8002b..a25cc3d3b7e 100644
---- a/hadoop-common-project/hadoop-common/src/main/native/src/exception.c
-+++ b/hadoop-common-project/hadoop-common/src/main/native/src/exception.c
-@@ -111,8 +111,8 @@ jthrowable newIOException(JNIEnv* env, const char *fmt, ...)
- const char* terror(int errnum)
- {
- 
--#if defined(__sun)
--// MT-Safe under Solaris which doesn't support sys_errlist/sys_nerr
-+#if defined(__sun) || defined(__GLIBC_PREREQ) && __GLIBC_PREREQ(2, 32)
-+// MT-Safe under Solaris or glibc >= 2.32 not supporting sys_errlist/sys_nerr
-   return strerror(errnum); 
- #else
-   if ((errnum < 0) || (errnum >= sys_nerr)) {
-diff --git a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/CMakeLists.txt b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/CMakeLists.txt
-index cb8fe378515..18396c78554 100644
---- a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/CMakeLists.txt
-+++ b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/CMakeLists.txt
-@@ -65,6 +65,8 @@ if(WIN32)
-     set(OUT_DIR bin)
- else()
-     set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fvisibility=hidden")
-+    # using old default behavior on GCC >= 10.0
-+    set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fcommon")
-     set(OS_DIR ${CMAKE_SOURCE_DIR}/main/native/libhdfs/os/posix)
- 
-     # IMPORTANT: OUT_DIR MUST be relative to maven's
diff --git a/bigtop.bom b/bigtop.bom
index 5fd845e..271f9ef 100644
--- a/bigtop.bom
+++ b/bigtop.bom
@@ -145,7 +145,7 @@ bigtop {
     'hadoop' {
       name    = 'hadoop'
       relNotes = 'Apache Hadoop'
-      version { base = '3.2.2'; pkg = base; release = 1 }
+      version { base = '3.2.3'; pkg = base; release = 1 }
       tarball { destination = "${name}-${version.base}.tar.gz"
                 source      = "${name}-${version.base}-src.tar.gz" }
       url     { download_path = "/$name/common/$name-${version.base}"