You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@solr.apache.org by kr...@apache.org on 2022/03/05 19:01:18 UTC

[solr] branch main updated: SOLR-16039: Upgrade to Hadoop 3.3.2 (#682)

This is an automated email from the ASF dual-hosted git repository.

krisden pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/solr.git


The following commit(s) were added to refs/heads/main by this push:
     new 62616d7  SOLR-16039: Upgrade to Hadoop 3.3.2 (#682)
62616d7 is described below

commit 62616d7284d94f38eeb29d4e016deeb2f1829649
Author: Kevin Risden <ri...@users.noreply.github.com>
AuthorDate: Sat Mar 5 14:01:11 2022 -0500

    SOLR-16039: Upgrade to Hadoop 3.3.2 (#682)
---
 gradle/globals.gradle                              |   2 +-
 .../randomization/policies/solr-tests.policy       |   3 -
 solr/CHANGES.txt                                   |   2 +
 solr/licenses/hadoop-annotations-3.3.1.jar.sha1    |   1 -
 solr/licenses/hadoop-annotations-3.3.2.jar.sha1    |   1 +
 solr/licenses/hadoop-auth-3.3.1.jar.sha1           |   1 -
 solr/licenses/hadoop-auth-3.3.2.jar.sha1           |   1 +
 solr/licenses/hadoop-client-api-3.3.1.jar.sha1     |   1 -
 solr/licenses/hadoop-client-api-3.3.2.jar.sha1     |   1 +
 .../hadoop-client-minicluster-3.3.1.jar.sha1       |   1 -
 .../hadoop-client-minicluster-3.3.2.jar.sha1       |   1 +
 solr/licenses/hadoop-client-runtime-3.3.1.jar.sha1 |   1 -
 solr/licenses/hadoop-client-runtime-3.3.2.jar.sha1 |   1 +
 solr/licenses/hadoop-common-3.3.1.jar.sha1         |   1 -
 solr/licenses/hadoop-common-3.3.2.jar.sha1         |   1 +
 solr/licenses/hadoop-hdfs-3.3.1-tests.jar.sha1     |   1 -
 solr/licenses/hadoop-hdfs-3.3.1.jar.sha1           |   1 -
 solr/licenses/hadoop-hdfs-3.3.2-tests.jar.sha1     |   1 +
 solr/licenses/hadoop-hdfs-3.3.2.jar.sha1           |   1 +
 solr/licenses/hadoop-minikdc-3.3.1.jar.sha1        |   1 -
 solr/licenses/hadoop-minikdc-3.3.2.jar.sha1        |   1 +
 .../htrace-core4-4.1.0-incubating.jar.sha1         |   1 -
 solr/licenses/htrace-core4-LICENSE-ASL.txt         | 182 ---------------------
 solr/licenses/htrace-core4-NOTICE.txt              |  18 --
 solr/licenses/junit-4.13.1.jar.sha1                |   1 -
 solr/licenses/junit-4.13.2.jar.sha1                |   1 +
 solr/licenses/snappy-java-1.1.7.6.jar.sha1         |   1 -
 solr/licenses/snappy-java-1.1.8.2.jar.sha1         |   1 +
 solr/modules/hadoop-auth/build.gradle              |   1 -
 solr/modules/hdfs/build.gradle                     |   1 +
 .../src/test/org/apache/hadoop/fs/FileUtil.java    | 119 ++++++++------
 .../src/test/org/apache/hadoop/fs/HardLink.java    |   4 +-
 .../org/apache/hadoop/fs/RawLocalFileSystem.java   |  36 ++--
 .../datanode/fsdataset/impl/BlockPoolSlice.java    |  28 ++--
 .../test/org/apache/hadoop/util/DiskChecker.java   |  12 +-
 solr/server/etc/security.policy                    |   3 -
 versions.lock                                      |  21 ++-
 versions.props                                     |   5 +-
 38 files changed, 137 insertions(+), 323 deletions(-)

diff --git a/gradle/globals.gradle b/gradle/globals.gradle
index cdd2689..a854304 100644
--- a/gradle/globals.gradle
+++ b/gradle/globals.gradle
@@ -152,4 +152,4 @@ allprojects {
       return taskList
     }
   }
-}
\ No newline at end of file
+}
diff --git a/gradle/testing/randomization/policies/solr-tests.policy b/gradle/testing/randomization/policies/solr-tests.policy
index 7232474..2d699cd 100644
--- a/gradle/testing/randomization/policies/solr-tests.policy
+++ b/gradle/testing/randomization/policies/solr-tests.policy
@@ -107,9 +107,6 @@ grant {
   permission java.lang.management.ManagementPermission "control";
   permission java.lang.management.ManagementPermission "monitor";
 
-  // needed by hadoop htrace
-  permission java.net.NetPermission "getNetworkInformation";
-
   // needed by DIH - possibly even after DIH is a package
   permission java.sql.SQLPermission "deregisterDriver";
 
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 262d44c..f5dfa82 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -59,6 +59,8 @@ Other Changes
 
 * SOLR-15886: Remove deprecated showItems configuration value from solrconfig.xml files (Andy Lester via Eric Pugh)
 
+* SOLR-16039: Upgrade to Hadoop 3.3.2 (Kevin Risden)
+
 Build
 ---------------------
 * SOLR-16053: Upgrade scriptDepVersions (Kevin Risden)
diff --git a/solr/licenses/hadoop-annotations-3.3.1.jar.sha1 b/solr/licenses/hadoop-annotations-3.3.1.jar.sha1
deleted file mode 100644
index 2fc67fe..0000000
--- a/solr/licenses/hadoop-annotations-3.3.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-6d70d2a8682f650f0dd6e66945448a2e2222a44a
diff --git a/solr/licenses/hadoop-annotations-3.3.2.jar.sha1 b/solr/licenses/hadoop-annotations-3.3.2.jar.sha1
new file mode 100644
index 0000000..d5a4186
--- /dev/null
+++ b/solr/licenses/hadoop-annotations-3.3.2.jar.sha1
@@ -0,0 +1 @@
+2b47cd39c02c873b6bb29193962735d1d56f6572
diff --git a/solr/licenses/hadoop-auth-3.3.1.jar.sha1 b/solr/licenses/hadoop-auth-3.3.1.jar.sha1
deleted file mode 100644
index c3bcba3..0000000
--- a/solr/licenses/hadoop-auth-3.3.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-862a3da7b7c75cca2e0ffe48f1f449d0897245de
diff --git a/solr/licenses/hadoop-auth-3.3.2.jar.sha1 b/solr/licenses/hadoop-auth-3.3.2.jar.sha1
new file mode 100644
index 0000000..889352f
--- /dev/null
+++ b/solr/licenses/hadoop-auth-3.3.2.jar.sha1
@@ -0,0 +1 @@
+32b81a77ea6ffcbf524d1cda8ab20ea3522e6fd5
diff --git a/solr/licenses/hadoop-client-api-3.3.1.jar.sha1 b/solr/licenses/hadoop-client-api-3.3.1.jar.sha1
deleted file mode 100644
index 26531ad..0000000
--- a/solr/licenses/hadoop-client-api-3.3.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4b9c9cdd9967495838fb521001699c4c9dddf183
diff --git a/solr/licenses/hadoop-client-api-3.3.2.jar.sha1 b/solr/licenses/hadoop-client-api-3.3.2.jar.sha1
new file mode 100644
index 0000000..9c2cc21
--- /dev/null
+++ b/solr/licenses/hadoop-client-api-3.3.2.jar.sha1
@@ -0,0 +1 @@
+48f1af0a3a0270095dc59dc9f7d698969de4b4bf
diff --git a/solr/licenses/hadoop-client-minicluster-3.3.1.jar.sha1 b/solr/licenses/hadoop-client-minicluster-3.3.1.jar.sha1
deleted file mode 100644
index e487160..0000000
--- a/solr/licenses/hadoop-client-minicluster-3.3.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-545968c946585792b3d5bb4ed379a7c114fc7051
diff --git a/solr/licenses/hadoop-client-minicluster-3.3.2.jar.sha1 b/solr/licenses/hadoop-client-minicluster-3.3.2.jar.sha1
new file mode 100644
index 0000000..379ff4a
--- /dev/null
+++ b/solr/licenses/hadoop-client-minicluster-3.3.2.jar.sha1
@@ -0,0 +1 @@
+c08ddd065de27d21c2c2b398084092377f16a06b
diff --git a/solr/licenses/hadoop-client-runtime-3.3.1.jar.sha1 b/solr/licenses/hadoop-client-runtime-3.3.1.jar.sha1
deleted file mode 100644
index 58dbafa..0000000
--- a/solr/licenses/hadoop-client-runtime-3.3.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-f3a55d882328ee87a1054f99d62ba987fa9029a4
diff --git a/solr/licenses/hadoop-client-runtime-3.3.2.jar.sha1 b/solr/licenses/hadoop-client-runtime-3.3.2.jar.sha1
new file mode 100644
index 0000000..bd6ffdb
--- /dev/null
+++ b/solr/licenses/hadoop-client-runtime-3.3.2.jar.sha1
@@ -0,0 +1 @@
+0112f2b7420fa77c62148799175c073594197e6c
diff --git a/solr/licenses/hadoop-common-3.3.1.jar.sha1 b/solr/licenses/hadoop-common-3.3.1.jar.sha1
deleted file mode 100644
index 541d526..0000000
--- a/solr/licenses/hadoop-common-3.3.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-227027e98079d3f0f24c56f323fe27a129658073
diff --git a/solr/licenses/hadoop-common-3.3.2.jar.sha1 b/solr/licenses/hadoop-common-3.3.2.jar.sha1
new file mode 100644
index 0000000..a97b8aa
--- /dev/null
+++ b/solr/licenses/hadoop-common-3.3.2.jar.sha1
@@ -0,0 +1 @@
+edec4cdc7f1b1208f7d135f9f228ba44b83cf58f
diff --git a/solr/licenses/hadoop-hdfs-3.3.1-tests.jar.sha1 b/solr/licenses/hadoop-hdfs-3.3.1-tests.jar.sha1
deleted file mode 100644
index 95b2946..0000000
--- a/solr/licenses/hadoop-hdfs-3.3.1-tests.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5e37d582c2cf6320b0422a7ea3b5442a4fa0b3b3
diff --git a/solr/licenses/hadoop-hdfs-3.3.1.jar.sha1 b/solr/licenses/hadoop-hdfs-3.3.1.jar.sha1
deleted file mode 100644
index 3eb461a..0000000
--- a/solr/licenses/hadoop-hdfs-3.3.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5da7f270cb6564e099e0d2d424285a24fca62bd2
diff --git a/solr/licenses/hadoop-hdfs-3.3.2-tests.jar.sha1 b/solr/licenses/hadoop-hdfs-3.3.2-tests.jar.sha1
new file mode 100644
index 0000000..a3abe8e
--- /dev/null
+++ b/solr/licenses/hadoop-hdfs-3.3.2-tests.jar.sha1
@@ -0,0 +1 @@
+20b47a6fe7780ae0c250081d2b9f30cd07691b6c
diff --git a/solr/licenses/hadoop-hdfs-3.3.2.jar.sha1 b/solr/licenses/hadoop-hdfs-3.3.2.jar.sha1
new file mode 100644
index 0000000..7f48dfd
--- /dev/null
+++ b/solr/licenses/hadoop-hdfs-3.3.2.jar.sha1
@@ -0,0 +1 @@
+aed57238fd4e669043bcc29d6bea3f0e6420950d
diff --git a/solr/licenses/hadoop-minikdc-3.3.1.jar.sha1 b/solr/licenses/hadoop-minikdc-3.3.1.jar.sha1
deleted file mode 100644
index 1aed63c..0000000
--- a/solr/licenses/hadoop-minikdc-3.3.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-353cb158ac0fc045e6f92045170f20288f0ca5dc
diff --git a/solr/licenses/hadoop-minikdc-3.3.2.jar.sha1 b/solr/licenses/hadoop-minikdc-3.3.2.jar.sha1
new file mode 100644
index 0000000..7e62097
--- /dev/null
+++ b/solr/licenses/hadoop-minikdc-3.3.2.jar.sha1
@@ -0,0 +1 @@
+749d44e9fe84566b4daa8898c62d8e88fd8e016f
diff --git a/solr/licenses/htrace-core4-4.1.0-incubating.jar.sha1 b/solr/licenses/htrace-core4-4.1.0-incubating.jar.sha1
deleted file mode 100644
index 7162ab7..0000000
--- a/solr/licenses/htrace-core4-4.1.0-incubating.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-12b3e2adda95e8c41d9d45d33db075137871d2e2
diff --git a/solr/licenses/htrace-core4-LICENSE-ASL.txt b/solr/licenses/htrace-core4-LICENSE-ASL.txt
deleted file mode 100644
index 2c41ec8..0000000
--- a/solr/licenses/htrace-core4-LICENSE-ASL.txt
+++ /dev/null
@@ -1,182 +0,0 @@
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-   
-This project contains annotations derived from JCIP-ANNOTATIONS
-Copyright (c) 2005 Brian Goetz and Tim Peierls.
-See http://www.jcip.net and the Creative Commons Attribution License 
-(http://creativecommons.org/licenses/by/2.5)
-
diff --git a/solr/licenses/htrace-core4-NOTICE.txt b/solr/licenses/htrace-core4-NOTICE.txt
deleted file mode 100644
index 19f97eb..0000000
--- a/solr/licenses/htrace-core4-NOTICE.txt
+++ /dev/null
@@ -1,18 +0,0 @@
-This product includes software developed by The Apache Software
-Foundation (http://www.apache.org/).
-
-In addition, this product includes software developed by:
-
-JUnit (http://www.junit.org/) included under the Common Public License v1.0.  See
-the full text here: http://junit.sourceforge.net/cpl-v10.html
-
-levigo, a go wrapper for leveldb, is copyright Jeffrey M Hodges and
-is MIT licensed: https://github.com/jmhodges/levigo/blob/master/LICENSE
-
-Units, unit multipliers and functions for go, has license
-(TBD https://github.com/alecthomas/units/issues/1).
-It is by alecthomas: https://github.com/alecthomas/units
-
-Kingpin, a go command line and flag parser is licensed MIT
-(https://github.com/alecthomas/kingpin/blob/master/COPYING)
-by alecthomas
diff --git a/solr/licenses/junit-4.13.1.jar.sha1 b/solr/licenses/junit-4.13.1.jar.sha1
deleted file mode 100644
index fc8ae23..0000000
--- a/solr/licenses/junit-4.13.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-cdd00374f1fee76b11e2a9d127405aa3f6be5b6a
diff --git a/solr/licenses/junit-4.13.2.jar.sha1 b/solr/licenses/junit-4.13.2.jar.sha1
new file mode 100644
index 0000000..d0aeec1
--- /dev/null
+++ b/solr/licenses/junit-4.13.2.jar.sha1
@@ -0,0 +1 @@
+8ac9e16d933b6fb43bc7f576336b8f4d7eb5ba12
diff --git a/solr/licenses/snappy-java-1.1.7.6.jar.sha1 b/solr/licenses/snappy-java-1.1.7.6.jar.sha1
deleted file mode 100644
index 1766919..0000000
--- a/solr/licenses/snappy-java-1.1.7.6.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-2900879ed8049a19b0f0f30ecd00a84e5a2b80c0
diff --git a/solr/licenses/snappy-java-1.1.8.2.jar.sha1 b/solr/licenses/snappy-java-1.1.8.2.jar.sha1
new file mode 100644
index 0000000..050c871
--- /dev/null
+++ b/solr/licenses/snappy-java-1.1.8.2.jar.sha1
@@ -0,0 +1 @@
+4205e3cf9c44264731ad002fcd2520eb1b2bb801
diff --git a/solr/modules/hadoop-auth/build.gradle b/solr/modules/hadoop-auth/build.gradle
index 069172c..8aab68c 100644
--- a/solr/modules/hadoop-auth/build.gradle
+++ b/solr/modules/hadoop-auth/build.gradle
@@ -70,7 +70,6 @@ dependencies {
   runtimeOnly 'commons-collections:commons-collections'
   runtimeOnly 'com.google.re2j:re2j'
   runtimeOnly 'org.apache.commons:commons-configuration2'
-  runtimeOnly 'org.apache.htrace:htrace-core4' // note: removed in Hadoop 3.3.2
   runtimeOnly 'org.apache.kerby:kerb-core'
   runtimeOnly 'org.apache.kerby:kerb-util'
 
diff --git a/solr/modules/hdfs/build.gradle b/solr/modules/hdfs/build.gradle
index f2f4544..607ccc9 100644
--- a/solr/modules/hdfs/build.gradle
+++ b/solr/modules/hdfs/build.gradle
@@ -46,6 +46,7 @@ dependencies {
   // hadoop dependencies for tests
   testImplementation ('org.apache.hadoop:hadoop-hdfs') { transitive = false }
   testImplementation ('org.apache.hadoop:hadoop-hdfs::tests') { transitive = false }
+  testImplementation 'org.apache.hadoop.thirdparty:hadoop-shaded-guava'
   testRuntimeOnly 'org.apache.hadoop:hadoop-client-minicluster'
 
   testImplementation 'org.slf4j:jcl-over-slf4j'
diff --git a/solr/modules/hdfs/src/test/org/apache/hadoop/fs/FileUtil.java b/solr/modules/hdfs/src/test/org/apache/hadoop/fs/FileUtil.java
index 01df4d4..677114d 100644
--- a/solr/modules/hdfs/src/test/org/apache/hadoop/fs/FileUtil.java
+++ b/solr/modules/hdfs/src/test/org/apache/hadoop/fs/FileUtil.java
@@ -315,9 +315,9 @@ public class FileUtil {
   // generate exception
   //
   private static void checkDependencies(FileSystem srcFS,
-                                        Path src,
-                                        FileSystem dstFS,
-                                        Path dst)
+      Path src,
+      FileSystem dstFS,
+      Path dst)
       throws IOException {
     if (srcFS == dstFS) {
       String srcq = srcFS.makeQualified(src).toString() + Path.SEPARATOR;
@@ -335,16 +335,16 @@ public class FileUtil {
 
   /** Copy files between FileSystems. */
   public static boolean copy(FileSystem srcFS, Path src,
-                             FileSystem dstFS, Path dst,
-                             boolean deleteSource,
-                             Configuration conf) throws IOException {
+      FileSystem dstFS, Path dst,
+      boolean deleteSource,
+      Configuration conf) throws IOException {
     return copy(srcFS, src, dstFS, dst, deleteSource, true, conf);
   }
 
   public static boolean copy(FileSystem srcFS, Path[] srcs,
-                             FileSystem dstFS, Path dst,
-                             boolean deleteSource,
-                             boolean overwrite, Configuration conf)
+      FileSystem dstFS, Path dst,
+      boolean deleteSource,
+      boolean overwrite, Configuration conf)
       throws IOException {
     boolean gotException = false;
     boolean returnVal = true;
@@ -371,8 +371,8 @@ public class FileUtil {
           returnVal = false;
       } catch (IOException e) {
         gotException = true;
-        exceptions.append(e.getMessage());
-        exceptions.append("\n");
+        exceptions.append(e.getMessage())
+            .append("\n");
       }
     }
     if (gotException) {
@@ -383,20 +383,20 @@ public class FileUtil {
 
   /** Copy files between FileSystems. */
   public static boolean copy(FileSystem srcFS, Path src,
-                             FileSystem dstFS, Path dst,
-                             boolean deleteSource,
-                             boolean overwrite,
-                             Configuration conf) throws IOException {
+      FileSystem dstFS, Path dst,
+      boolean deleteSource,
+      boolean overwrite,
+      Configuration conf) throws IOException {
     FileStatus fileStatus = srcFS.getFileStatus(src);
     return copy(srcFS, fileStatus, dstFS, dst, deleteSource, overwrite, conf);
   }
 
   /** Copy files between FileSystems. */
   public static boolean copy(FileSystem srcFS, FileStatus srcStatus,
-                             FileSystem dstFS, Path dst,
-                             boolean deleteSource,
-                             boolean overwrite,
-                             Configuration conf) throws IOException {
+      FileSystem dstFS, Path dst,
+      boolean deleteSource,
+      boolean overwrite,
+      Configuration conf) throws IOException {
     Path src = srcStatus.getPath();
     dst = checkDest(src.getName(), dstFS, dst, overwrite);
     if (srcStatus.isDirectory()) {
@@ -433,9 +433,9 @@ public class FileUtil {
 
   /** Copy local files to a FileSystem. */
   public static boolean copy(File src,
-                             FileSystem dstFS, Path dst,
-                             boolean deleteSource,
-                             Configuration conf) throws IOException {
+      FileSystem dstFS, Path dst,
+      boolean deleteSource,
+      Configuration conf) throws IOException {
     dst = checkDest(src.getName(), dstFS, dst, false);
 
     if (src.isDirectory()) {
@@ -476,16 +476,16 @@ public class FileUtil {
 
   /** Copy FileSystem files to local files. */
   public static boolean copy(FileSystem srcFS, Path src,
-                             File dst, boolean deleteSource,
-                             Configuration conf) throws IOException {
+      File dst, boolean deleteSource,
+      Configuration conf) throws IOException {
     FileStatus filestatus = srcFS.getFileStatus(src);
     return copy(srcFS, filestatus, dst, deleteSource, conf);
   }
 
   /** Copy FileSystem files to local files. */
   private static boolean copy(FileSystem srcFS, FileStatus srcStatus,
-                              File dst, boolean deleteSource,
-                              Configuration conf) throws IOException {
+      File dst, boolean deleteSource,
+      Configuration conf) throws IOException {
     Path src = srcStatus.getPath();
     if (srcStatus.isDirectory()) {
       if (!dst.mkdirs()) {
@@ -509,7 +509,7 @@ public class FileUtil {
   }
 
   private static Path checkDest(String srcName, FileSystem dstFS, Path dst,
-                                boolean overwrite) throws IOException {
+      boolean overwrite) throws IOException {
     FileStatus sdst;
     try {
       sdst = dstFS.getFileStatus(dst);
@@ -519,6 +519,9 @@ public class FileUtil {
     if (null != sdst) {
       if (sdst.isDirectory()) {
         if (null == srcName) {
+          if (overwrite) {
+            return dst;
+          }
           throw new PathIsDirectoryException(dst.toString());
         }
         return checkDest(null, dstFS, new Path(dst, srcName), overwrite);
@@ -815,7 +818,7 @@ public class FileUtil {
    * @throws ExecutionException task submit failed
    */
   public static void unTar(InputStream inputStream, File untarDir,
-                           boolean gzipped)
+      boolean gzipped)
       throws IOException, InterruptedException, ExecutionException {
     if (!untarDir.mkdirs()) {
       if (!untarDir.isDirectory()) {
@@ -865,7 +868,7 @@ public class FileUtil {
   }
 
   private static void unTarUsingTar(InputStream inputStream, File untarDir,
-                                    boolean gzipped)
+      boolean gzipped)
       throws IOException, InterruptedException, ExecutionException {
     StringBuilder untarCommand = new StringBuilder();
     if (gzipped) {
@@ -883,7 +886,7 @@ public class FileUtil {
   }
 
   private static void unTarUsingTar(File inFile, File untarDir,
-                                    boolean gzipped) throws IOException {
+      boolean gzipped) throws IOException {
     StringBuffer untarCommand = new StringBuffer();
     if (gzipped) {
       untarCommand.append(" gzip -dc '")
@@ -911,7 +914,7 @@ public class FileUtil {
   }
 
   static void unTarUsingJava(File inFile, File untarDir,
-                             boolean gzipped) throws IOException {
+      boolean gzipped) throws IOException {
     InputStream inputStream = null;
     TarArchiveInputStream tis = null;
     try {
@@ -936,7 +939,7 @@ public class FileUtil {
   }
 
   private static void unTarUsingJava(InputStream inputStream, File untarDir,
-                                     boolean gzipped) throws IOException {
+      boolean gzipped) throws IOException {
     TarArchiveInputStream tis = null;
     try {
       if (gzipped) {
@@ -955,7 +958,7 @@ public class FileUtil {
   }
 
   private static void unpackEntries(TarArchiveInputStream tis,
-                                    TarArchiveEntry entry, File outputDir) throws IOException {
+      TarArchiveEntry entry, File outputDir) throws IOException {
     String targetDirPath = outputDir.getCanonicalPath() + File.separator;
     File outputFile = new File(outputDir, entry.getName());
     if (!outputFile.getCanonicalPath().startsWith(targetDirPath)) {
@@ -1124,7 +1127,7 @@ public class FileUtil {
    * @throws IOException exception on setOwner
    */
   public static void setOwner(File file, String username,
-                              String groupname) throws IOException {
+      String groupname) throws IOException {
     if (username == null && groupname == null) {
       throw new IOException("username == null && groupname == null");
     }
@@ -1309,7 +1312,7 @@ public class FileUtil {
   }
 
   private static void checkReturnValue(boolean rv, File p,
-                                       FsPermission permission
+      FsPermission permission
   ) throws IOException {
     if (!rv) {
       throw new IOException("Failed to set permissions of path: " + p +
@@ -1319,7 +1322,7 @@ public class FileUtil {
   }
 
   private static void execSetPermission(File f,
-                                        FsPermission permission
+      FsPermission permission
   )  throws IOException {
     if (NativeIO.isAvailable()) {
       NativeIO.POSIX.chmod(f.getCanonicalPath(), permission.toShort());
@@ -1348,8 +1351,8 @@ public class FileUtil {
    * @see java.io.File#deleteOnExit()
    */
   public static final File createLocalTempFile(final File basefile,
-                                               final String prefix,
-                                               final boolean isDeleteOnExit)
+      final String prefix,
+      final boolean isDeleteOnExit)
       throws IOException {
     File tmp = File.createTempFile(prefix + basefile.getName(),
         "", basefile.getParentFile());
@@ -1431,7 +1434,7 @@ public class FileUtil {
   }
 
   public static String[] createJarWithClassPath(String inputClassPath, Path pwd,
-                                                Map<String, String> callerEnv) throws IOException {
+      Map<String, String> callerEnv) throws IOException {
     return createJarWithClassPath(inputClassPath, pwd, pwd, callerEnv);
   }
 
@@ -1467,8 +1470,8 @@ public class FileUtil {
    * @throws IOException if there is an I/O error while writing the jar file
    */
   public static String[] createJarWithClassPath(String inputClassPath, Path pwd,
-                                                Path targetDir,
-                                                Map<String, String> callerEnv) throws IOException {
+      Path targetDir,
+      Map<String, String> callerEnv) throws IOException {
     // Replace environment variables, case-insensitive on Windows
     @SuppressWarnings("unchecked")
     Map<String, String> env = Shell.WINDOWS ? new CaseInsensitiveMap<>(callerEnv) :
@@ -1649,7 +1652,7 @@ public class FileUtil {
    * @throws IOException if an I/O error occurs creating or writing to the file
    */
   public static FileSystem write(final FileSystem fs, final Path path,
-                                 final byte[] bytes) throws IOException {
+      final byte[] bytes) throws IOException {
 
     Objects.requireNonNull(path);
     Objects.requireNonNull(bytes);
@@ -1676,7 +1679,7 @@ public class FileUtil {
    * @throws IOException if an I/O error occurs creating or writing to the file
    */
   public static FileContext write(final FileContext fileContext,
-                                  final Path path, final byte[] bytes) throws IOException {
+      final Path path, final byte[] bytes) throws IOException {
 
     Objects.requireNonNull(path);
     Objects.requireNonNull(bytes);
@@ -1708,7 +1711,7 @@ public class FileUtil {
    * @throws IOException if an I/O error occurs creating or writing to the file
    */
   public static FileSystem write(final FileSystem fs, final Path path,
-                                 final Iterable<? extends CharSequence> lines, final Charset cs)
+      final Iterable<? extends CharSequence> lines, final Charset cs)
       throws IOException {
 
     Objects.requireNonNull(path);
@@ -1746,8 +1749,8 @@ public class FileUtil {
    * @throws IOException if an I/O error occurs creating or writing to the file
    */
   public static FileContext write(final FileContext fileContext,
-                                  final Path path, final Iterable<? extends CharSequence> lines,
-                                  final Charset cs) throws IOException {
+      final Path path, final Iterable<? extends CharSequence> lines,
+      final Charset cs) throws IOException {
 
     Objects.requireNonNull(path);
     Objects.requireNonNull(lines);
@@ -1781,7 +1784,7 @@ public class FileUtil {
    * @throws IOException if an I/O error occurs creating or writing to the file
    */
   public static FileSystem write(final FileSystem fs, final Path path,
-                                 final CharSequence charseq, final Charset cs) throws IOException {
+      final CharSequence charseq, final Charset cs) throws IOException {
 
     Objects.requireNonNull(path);
     Objects.requireNonNull(charseq);
@@ -1812,7 +1815,7 @@ public class FileUtil {
    * @throws IOException if an I/O error occurs creating or writing to the file
    */
   public static FileContext write(final FileContext fs, final Path path,
-                                  final CharSequence charseq, final Charset cs) throws IOException {
+      final CharSequence charseq, final Charset cs) throws IOException {
 
     Objects.requireNonNull(path);
     Objects.requireNonNull(charseq);
@@ -1842,7 +1845,7 @@ public class FileUtil {
    * @throws IOException if an I/O error occurs creating or writing to the file
    */
   public static FileSystem write(final FileSystem fs, final Path path,
-                                 final CharSequence charseq) throws IOException {
+      final CharSequence charseq) throws IOException {
     return write(fs, path, charseq, StandardCharsets.UTF_8);
   }
 
@@ -1861,7 +1864,23 @@ public class FileUtil {
    * @throws IOException if an I/O error occurs creating or writing to the file
    */
   public static FileContext write(final FileContext fileContext,
-                                  final Path path, final CharSequence charseq) throws IOException {
+      final Path path, final CharSequence charseq) throws IOException {
     return write(fileContext, path, charseq, StandardCharsets.UTF_8);
   }
+
+  @InterfaceAudience.LimitedPrivate({"ViewDistributedFileSystem"})
+  @InterfaceStability.Unstable
+  /**
+   * Used in ViewDistributedFileSystem rename API to get access to the protected
+   * API of FileSystem interface. Even though Rename with options API
+   * deprecated, we are still using as part of trash. If any filesystem provided
+   * implementation to this protected FileSystem API, we can't invoke it with
+   * out casting to the specific filesystem. This util method is proposed to get
+   * the access to FileSystem#rename with options.
+   */
+  @SuppressWarnings("deprecation")
+  public static void rename(FileSystem srcFs, Path src, Path dst,
+      final Options.Rename... options) throws IOException {
+    srcFs.rename(src, dst, options);
+  }
 }
diff --git a/solr/modules/hdfs/src/test/org/apache/hadoop/fs/HardLink.java b/solr/modules/hdfs/src/test/org/apache/hadoop/fs/HardLink.java
index e043703..2532d28 100644
--- a/solr/modules/hdfs/src/test/org/apache/hadoop/fs/HardLink.java
+++ b/solr/modules/hdfs/src/test/org/apache/hadoop/fs/HardLink.java
@@ -79,7 +79,7 @@ public class HardLink {
    * @param linkDir - where the hardlinks should be put. It must already exist.
    */
   public static void createHardLinkMult(File parentDir, String[] fileBaseNames,
-                                        File linkDir) throws IOException {
+      File linkDir) throws IOException {
     if (parentDir == null) {
       throw new IOException(
           "invalid arguments to createHardLinkMult: parent directory is null");
@@ -119,7 +119,7 @@ public class HardLink {
 
   /* Create an IOException for failing to get link count. */
   private static IOException createIOException(File f, String message,
-                                               String error, int exitvalue, Exception cause) {
+      String error, int exitvalue, Exception cause) {
 
     final String s = "Failed to get link count on file " + f
         + ": message=" + message
diff --git a/solr/modules/hdfs/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java b/solr/modules/hdfs/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java
index c059ef8..8b971de 100644
--- a/solr/modules/hdfs/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java
+++ b/solr/modules/hdfs/src/test/org/apache/hadoop/fs/RawLocalFileSystem.java
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.fs;
 
-import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
 
 import java.io.BufferedOutputStream;
 import java.io.DataOutput;
@@ -309,7 +309,7 @@ public class RawLocalFileSystem extends FileSystem {
         .build();
 
     private LocalFSFileOutputStream(Path f, boolean append,
-                                    FsPermission permission) throws IOException {
+        FsPermission permission) throws IOException {
       File file = pathToFile(f);
       if (!append && permission == null) {
         permission = FsPermission.getFileDefault();
@@ -400,7 +400,7 @@ public class RawLocalFileSystem extends FileSystem {
 
   @Override
   public FSDataOutputStream append(Path f, int bufferSize,
-                                   Progressable progress) throws IOException {
+      Progressable progress) throws IOException {
     FileStatus status = getFileStatus(f);
     if (status.isDirectory()) {
       throw new IOException("Cannot append to a diretory (=" + f + " )");
@@ -412,15 +412,15 @@ public class RawLocalFileSystem extends FileSystem {
 
   @Override
   public FSDataOutputStream create(Path f, boolean overwrite, int bufferSize,
-                                   short replication, long blockSize, Progressable progress)
+      short replication, long blockSize, Progressable progress)
       throws IOException {
     return create(f, overwrite, true, bufferSize, replication, blockSize,
         progress, null);
   }
 
   private FSDataOutputStream create(Path f, boolean overwrite,
-                                    boolean createParent, int bufferSize, short replication, long blockSize,
-                                    Progressable progress, FsPermission permission) throws IOException {
+      boolean createParent, int bufferSize, short replication, long blockSize,
+      Progressable progress, FsPermission permission) throws IOException {
     if (exists(f) && !overwrite) {
       throw new FileAlreadyExistsException("File already exists: " + f);
     }
@@ -439,14 +439,14 @@ public class RawLocalFileSystem extends FileSystem {
   }
 
   protected OutputStream createOutputStreamWithMode(Path f, boolean append,
-                                                    FsPermission permission) throws IOException {
+      FsPermission permission) throws IOException {
     return new LocalFSFileOutputStream(f, append, permission);
   }
 
   @Override
   public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
-                                               EnumSet<CreateFlag> flags, int bufferSize, short replication, long blockSize,
-                                               Progressable progress) throws IOException {
+      EnumSet<CreateFlag> flags, int bufferSize, short replication, long blockSize,
+      Progressable progress) throws IOException {
     if (exists(f) && !flags.contains(CreateFlag.OVERWRITE)) {
       throw new FileAlreadyExistsException("File already exists: " + f);
     }
@@ -457,8 +457,8 @@ public class RawLocalFileSystem extends FileSystem {
 
   @Override
   public FSDataOutputStream create(Path f, FsPermission permission,
-                                   boolean overwrite, int bufferSize, short replication, long blockSize,
-                                   Progressable progress) throws IOException {
+      boolean overwrite, int bufferSize, short replication, long blockSize,
+      Progressable progress) throws IOException {
 
     FSDataOutputStream out = create(f, overwrite, true, bufferSize, replication,
         blockSize, progress, permission);
@@ -467,9 +467,9 @@ public class RawLocalFileSystem extends FileSystem {
 
   @Override
   public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
-                                               boolean overwrite,
-                                               int bufferSize, short replication, long blockSize,
-                                               Progressable progress) throws IOException {
+      boolean overwrite,
+      int bufferSize, short replication, long blockSize,
+      Progressable progress) throws IOException {
     FSDataOutputStream out = create(f, overwrite, false, bufferSize, replication,
         blockSize, progress, permission);
     return out;
@@ -511,7 +511,7 @@ public class RawLocalFileSystem extends FileSystem {
 
   @VisibleForTesting
   public final boolean handleEmptyDstDirectoryOnWindows(Path src, File srcFile,
-                                                        Path dst, File dstFile) throws IOException {
+      Path dst, File dstFile) throws IOException {
 
     // Enforce POSIX rename behavior that a source directory replaces an
     // existing destination if the destination is an empty directory. On most
@@ -992,7 +992,7 @@ public class RawLocalFileSystem extends FileSystem {
    *            {@link PathHandle} reference.
    */
   protected PathHandle createPathHandle(FileStatus stat,
-                                        Options.HandleOpt... opts) {
+      Options.HandleOpt... opts) {
     if (stat.isDirectory() || stat.isSymlink()) {
       throw new IllegalArgumentException("PathHandle only available for files");
     }
@@ -1077,7 +1077,7 @@ public class RawLocalFileSystem extends FileSystem {
    * @throws IOException Exception on getFileLinkStatusInternal
    */
   private FileStatus getFileLinkStatusInternal(final Path f,
-                                               boolean dereference) throws IOException {
+      boolean dereference) throws IOException {
     if (!useDeprecatedFileStatus) {
       return getNativeFileLinkStatus(f, dereference);
     } else if (dereference) {
@@ -1144,7 +1144,7 @@ public class RawLocalFileSystem extends FileSystem {
    * @throws IOException Exception on getNativeFileLinkStatus
    */
   private FileStatus getNativeFileLinkStatus(final Path f,
-                                             boolean dereference) throws IOException {
+      boolean dereference) throws IOException {
     checkPath(f);
     Stat stat = new Stat(f, defaultBlockSize, dereference, this);
     FileStatus status = stat.getFileStatus();
diff --git a/solr/modules/hdfs/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java b/solr/modules/hdfs/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
index 6f2d142..32aa3c6 100644
--- a/solr/modules/hdfs/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
+++ b/solr/modules/hdfs/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
@@ -71,7 +71,7 @@ import org.apache.hadoop.util.ShutdownHookManager;
 import org.apache.hadoop.util.Timer;
 import org.apache.solr.hdfs.cloud.HdfsTestUtil;
 
-import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
 
 /**
  * A block pool slice represents a portion of a block pool stored on a volume.
@@ -131,7 +131,7 @@ public class BlockPoolSlice {
    * @throws IOException Error making directories
    */
   BlockPoolSlice(String bpid, FsVolumeImpl volume, File bpDir,
-                 Configuration conf, Timer timer) throws IOException {
+      Configuration conf, Timer timer) throws IOException {
     this.bpid = bpid;
     this.volume = volume;
     this.fileIoProvider = volume.getFileIoProvider();
@@ -211,7 +211,7 @@ public class BlockPoolSlice {
   }
 
   private synchronized static void initializeAddReplicaPool(Configuration conf,
-                                                            FsDatasetImpl dataset) {
+      FsDatasetImpl dataset) {
     if (addReplicaThreadPool == null) {
       int numberOfBlockPoolSlice = dataset.getVolumeCount()
           * dataset.getBPServiceCount();
@@ -299,7 +299,7 @@ public class BlockPoolSlice {
    * under finalized.
    */
   ReplicaInfo activateSavedReplica(ReplicaInfo replicaInfo,
-                                   RamDiskReplica replicaState) throws IOException {
+      RamDiskReplica replicaState) throws IOException {
     File metaFile = replicaState.getSavedMetaFile();
     File blockFile = replicaState.getSavedBlockFile();
     final long blockId = replicaInfo.getBlockId();
@@ -328,8 +328,10 @@ public class BlockPoolSlice {
     DiskChecker.checkDir(rbwDir);
   }
 
+
+
   void getVolumeMap(ReplicaMap volumeMap,
-                    final RamDiskReplicaTracker lazyWriteReplicaMap)
+      final RamDiskReplicaTracker lazyWriteReplicaMap)
       throws IOException {
     // Recover lazy persist replicas, they will be added to the volumeMap
     // when we scan the finalized directory.
@@ -381,7 +383,7 @@ public class BlockPoolSlice {
    *           throw if any sub task or multiple sub tasks failed.
    */
   private void waitForSubTaskToFinish(Queue<RecursiveAction> subTaskQueue,
-                                      List<IOException> exceptions) throws IOException {
+      List<IOException> exceptions) throws IOException {
     while (!subTaskQueue.isEmpty()) {
       RecursiveAction task = subTaskQueue.poll();
       if (task != null) {
@@ -475,7 +477,7 @@ public class BlockPoolSlice {
   }
 
   private void addReplicaToReplicasMap(Block block, ReplicaMap volumeMap,
-                                       final RamDiskReplicaTracker lazyWriteReplicaMap,boolean isFinalized)
+      final RamDiskReplicaTracker lazyWriteReplicaMap,boolean isFinalized)
       throws IOException {
     ReplicaInfo newReplica = null;
     long blockId = block.getBlockId();
@@ -572,8 +574,8 @@ public class BlockPoolSlice {
    * @param subTaskQueue queue of sub tasks
    */
   void addToReplicasMap(ReplicaMap volumeMap, File dir,
-                        final RamDiskReplicaTracker lazyWriteReplicaMap, boolean isFinalized,
-                        List<IOException> exceptions, Queue<RecursiveAction> subTaskQueue)
+      final RamDiskReplicaTracker lazyWriteReplicaMap, boolean isFinalized,
+      List<IOException> exceptions, Queue<RecursiveAction> subTaskQueue)
       throws IOException {
     File[] files = fileIoProvider.listFiles(volume, dir);
     Arrays.sort(files, FILE_COMPARATOR);
@@ -650,7 +652,7 @@ public class BlockPoolSlice {
 
   @VisibleForTesting
   static ReplicaInfo selectReplicaToDelete(final ReplicaInfo replica1,
-                                           final ReplicaInfo replica2) {
+      final ReplicaInfo replica2) {
     ReplicaInfo replicaToKeep;
     ReplicaInfo replicaToDelete;
 
@@ -783,7 +785,7 @@ public class BlockPoolSlice {
   }
 
   private boolean readReplicasFromCache(ReplicaMap volumeMap,
-                                        final RamDiskReplicaTracker lazyWriteReplicaMap) {
+      final RamDiskReplicaTracker lazyWriteReplicaMap) {
     ReplicaMap tmpReplicaMap = new ReplicaMap(new ReentrantReadWriteLock());
     File replicaFile = new File(replicaCacheDir, REPLICA_CACHE_FILE);
     // Check whether the file exists or not.
@@ -935,8 +937,8 @@ public class BlockPoolSlice {
      *          queue of sub tasks
      */
     AddReplicaProcessor(ReplicaMap volumeMap, File dir,
-                        RamDiskReplicaTracker lazyWriteReplicaMap, boolean isFinalized,
-                        List<IOException> exceptions, Queue<RecursiveAction> subTaskQueue) {
+        RamDiskReplicaTracker lazyWriteReplicaMap, boolean isFinalized,
+        List<IOException> exceptions, Queue<RecursiveAction> subTaskQueue) {
       this.volumeMap = volumeMap;
       this.dir = dir;
       this.lazyWriteReplicaMap = lazyWriteReplicaMap;
diff --git a/solr/modules/hdfs/src/test/org/apache/hadoop/util/DiskChecker.java b/solr/modules/hdfs/src/test/org/apache/hadoop/util/DiskChecker.java
index 98127b0..fab293b 100644
--- a/solr/modules/hdfs/src/test/org/apache/hadoop/util/DiskChecker.java
+++ b/solr/modules/hdfs/src/test/org/apache/hadoop/util/DiskChecker.java
@@ -25,7 +25,7 @@ import java.io.IOException;
 import java.util.UUID;
 import java.util.concurrent.atomic.AtomicReference;
 
-import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -45,7 +45,7 @@ import org.slf4j.LoggerFactory;
 public class DiskChecker {
   public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
 
-  public static final Logger log = LoggerFactory.getLogger(DiskChecker.class); // nowarn_valid_logger
+  public static final Logger LOG = LoggerFactory.getLogger(DiskChecker.class); // nowarn_valid_logger
 
   public static class DiskErrorException extends IOException {
     public DiskErrorException(String msg) {
@@ -113,7 +113,7 @@ public class DiskChecker {
    * @throws IOException exception checking dir
    */
   public static void checkDir(LocalFileSystem localFS, Path dir,
-                              FsPermission expected)
+      FsPermission expected)
       throws DiskErrorException, IOException {
     checkDirInternal(localFS, dir, expected);
   }
@@ -131,14 +131,14 @@ public class DiskChecker {
    * @throws IOException exception checking dir
    */
   public static void checkDirWithDiskIo(LocalFileSystem localFS, Path dir,
-                                        FsPermission expected)
+      FsPermission expected)
       throws DiskErrorException, IOException {
     checkDirInternal(localFS, dir, expected);
     doDiskIo(localFS.pathToFile(dir));
   }
 
   private static void checkDirInternal(LocalFileSystem localFS, Path dir,
-                                       FsPermission expected)
+      FsPermission expected)
       throws DiskErrorException, IOException {
     mkdirsWithExistsAndPermissionCheck(localFS, dir, expected);
     checkAccessByFileMethods(localFS.pathToFile(dir));
@@ -293,7 +293,7 @@ public class DiskChecker {
       }
       file = null;
     } finally {
-      IOUtils.cleanupWithLogger(log, fos);
+      IOUtils.cleanupWithLogger(LOG, fos);
       FileUtils.deleteQuietly(file);
     }
   }
diff --git a/solr/server/etc/security.policy b/solr/server/etc/security.policy
index 18e56df..b0a07d8 100644
--- a/solr/server/etc/security.policy
+++ b/solr/server/etc/security.policy
@@ -113,9 +113,6 @@ grant {
   permission java.lang.management.ManagementPermission "control";
   permission java.lang.management.ManagementPermission "monitor";
 
-  // needed by hadoop htrace
-  permission java.net.NetPermission "getNetworkInformation";
-
   // needed by DIH - possibly even after DIH is a package
   permission java.sql.SQLPermission "deregisterDriver";
 
diff --git a/versions.lock b/versions.lock
index 3efc1fe..b81b67c 100644
--- a/versions.lock
+++ b/versions.lock
@@ -106,7 +106,7 @@ jakarta.xml.bind:jakarta.xml.bind-api:2.3.3 (3 constraints: 882a5cbd)
 javax.measure:unit-api:1.0 (5 constraints: 8e3e2cc5)
 javax.servlet:javax.servlet-api:3.1.0 (3 constraints: 75209943)
 joda-time:joda-time:2.9.9 (4 constraints: eb1fe06a)
-junit:junit:4.13.1 (7 constraints: af6cf716)
+junit:junit:4.13.2 (7 constraints: b26cac18)
 net.arnx:jsonic:1.2.7 (2 constraints: db10d4d1)
 net.java.dev.jna:jna:5.10.0 (1 constraints: 8d0c870e)
 net.jcip:jcip-annotations:1.0 (6 constraints: 6130731b)
@@ -129,13 +129,12 @@ org.apache.commons:commons-text:1.8 (2 constraints: fe1528d0)
 org.apache.curator:curator-client:4.3.0 (2 constraints: e214cba2)
 org.apache.curator:curator-framework:4.3.0 (2 constraints: ff13b474)
 org.apache.curator:curator-recipes:4.3.0 (1 constraints: 09050836)
-org.apache.hadoop:hadoop-annotations:3.3.1 (1 constraints: 09050436)
-org.apache.hadoop:hadoop-auth:3.3.1 (1 constraints: 09050436)
-org.apache.hadoop:hadoop-client-api:3.3.1 (3 constraints: 1628155e)
-org.apache.hadoop:hadoop-client-runtime:3.3.1 (2 constraints: 6517ce42)
-org.apache.hadoop:hadoop-common:3.3.1 (1 constraints: 09050436)
+org.apache.hadoop:hadoop-annotations:3.3.2 (1 constraints: 0a050536)
+org.apache.hadoop:hadoop-auth:3.3.2 (1 constraints: 0a050536)
+org.apache.hadoop:hadoop-client-api:3.3.2 (3 constraints: 1928ac5e)
+org.apache.hadoop:hadoop-client-runtime:3.3.2 (2 constraints: 67170443)
+org.apache.hadoop:hadoop-common:3.3.2 (1 constraints: 0a050536)
 org.apache.hadoop.thirdparty:hadoop-shaded-guava:1.1.1 (1 constraints: 0505f435)
-org.apache.htrace:htrace-core4:4.1.0-incubating (2 constraints: 581ec435)
 org.apache.httpcomponents:httpclient:4.5.13 (10 constraints: d1907cfc)
 org.apache.httpcomponents:httpcore:4.4.15 (9 constraints: b87d9f84)
 org.apache.httpcomponents:httpmime:4.5.13 (3 constraints: ea1bb9dc)
@@ -268,7 +267,7 @@ org.tallison:metadata-extractor:2.15.0.1 (1 constraints: ee0c2f28)
 org.tallison.xmp:xmpcore-shaded:6.1.11 (1 constraints: 310e8e49)
 org.threeten:threetenbp:1.5.2 (4 constraints: 0433f25e)
 org.tukaani:xz:1.9 (1 constraints: 030c5be9)
-org.xerial.snappy:snappy-java:1.1.7.6 (1 constraints: 6f05a240)
+org.xerial.snappy:snappy-java:1.1.8.2 (4 constraints: 8c382ee2)
 software.amazon.awssdk:annotations:2.17.63 (17 constraints: e2ff18d3)
 software.amazon.awssdk:apache-client:2.17.63 (3 constraints: 951d4b90)
 software.amazon.awssdk:arns:2.17.63 (2 constraints: 21185ac1)
@@ -326,9 +325,9 @@ net.bytebuddy:byte-buddy:1.10.20 (2 constraints: 7c10a9d0)
 net.minidev:accessors-smart:2.4.7 (1 constraints: 4e0a90b8)
 net.minidev:json-smart:2.4.7 (1 constraints: 160e936e)
 no.nav.security:mock-oauth2-server:0.4.3 (1 constraints: 0905fa35)
-org.apache.hadoop:hadoop-client-minicluster:3.3.1 (1 constraints: 09050436)
-org.apache.hadoop:hadoop-hdfs:3.3.1 (1 constraints: 09050436)
-org.apache.hadoop:hadoop-minikdc:3.3.1 (1 constraints: 09050436)
+org.apache.hadoop:hadoop-client-minicluster:3.3.2 (1 constraints: 0a050536)
+org.apache.hadoop:hadoop-hdfs:3.3.2 (1 constraints: 0a050536)
+org.apache.hadoop:hadoop-minikdc:3.3.2 (1 constraints: 0a050536)
 org.apache.kerby:kerb-admin:1.0.1 (1 constraints: 840d892f)
 org.apache.kerby:kerb-client:1.0.1 (1 constraints: 840d892f)
 org.apache.kerby:kerb-common:1.0.1 (2 constraints: a51841ca)
diff --git a/versions.props b/versions.props
index 6a3fb96..d73e19a 100644
--- a/versions.props
+++ b/versions.props
@@ -25,7 +25,7 @@ io.opentracing:*=0.33.0
 io.prometheus:*=0.2.0
 javax.servlet:javax.servlet-api=3.1.0
 joda-time:joda-time=2.9.9
-junit:junit=4.13.1
+junit:junit=4.13.2
 net.arnx:jsonic=1.2.7
 net.bytebuddy:byte-buddy=1.9.3
 net.sourceforge.argparse4j:argparse4j=0.8.1
@@ -42,8 +42,7 @@ org.apache.commons:commons-math3=3.6.1
 org.apache.commons:commons-text=1.8
 org.apache.curator:*=4.3.0
 org.apache.hadoop.thirdparty:*=1.1.1
-org.apache.hadoop:*=3.3.1
-org.apache.htrace:htrace-core4=4.1.0-incubating
+org.apache.hadoop:*=3.3.2
 org.apache.httpcomponents:httpclient=4.5.13
 org.apache.httpcomponents:httpcore=4.4.15
 org.apache.httpcomponents:httpmime=4.5.13