You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ho...@apache.org on 2021/08/31 17:45:32 UTC

[lucene-solr] branch branch_8x updated: SOLR-15599: Upgrade AWS SDK from v1 to v2 (#2563)

This is an automated email from the ASF dual-hosted git repository.

houston pushed a commit to branch branch_8x
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git


The following commit(s) were added to refs/heads/branch_8x by this push:
     new b83f0e7  SOLR-15599: Upgrade AWS SDK from v1 to v2 (#2563)
b83f0e7 is described below

commit b83f0e7344c11e78108ffcb787a1f8d427ebdcdd
Author: Houston Putman <ho...@apache.org>
AuthorDate: Tue Aug 31 13:45:11 2021 -0400

    SOLR-15599: Upgrade AWS SDK from v1 to v2 (#2563)
    
    Also removed woodstox-core-asl dependency, and replaced with com.fasterxml.woodstox:woodstox-core:6.2.4, the newer version of the dependency.
---
 lucene/ivy-versions.properties                     |   9 +-
 solr/CHANGES.txt                                   |   4 +
 solr/contrib/s3-repository/README.md               |   8 +-
 solr/contrib/s3-repository/ivy.xml                 |  13 +-
 .../org/apache/solr/s3/S3BackupRepository.java     |  10 +-
 .../apache/solr/s3/S3BackupRepositoryConfig.java   |  20 +-
 .../java/org/apache/solr/s3/S3OutputStream.java    | 102 +++---
 .../java/org/apache/solr/s3/S3StorageClient.java   | 359 ++++++++++----------
 .../org/apache/solr/s3/AbstractS3ClientTest.java   |   9 +-
 .../org/apache/solr/s3/S3BackupRepositoryTest.java |  35 +-
 .../apache/solr/s3/S3IncrementalBackupTest.java    |   7 +-
 .../test/org/apache/solr/s3/S3IndexInputTest.java  |  68 ++--
 .../org/apache/solr/s3/S3OutputStreamTest.java     | 108 +++---
 .../src/test/org/apache/solr/s3/S3PathsTest.java   |   8 +-
 .../src/java/org/apache/solr/core/NodeConfig.java  |   3 +-
 solr/licenses/jaxb-api-2.3.1.jar.sha1              |   1 -
 solr/licenses/jaxb-api-LICENSE-CDDL.txt            | 362 ---------------------
 solr/licenses/jaxb-api-NOTICE.txt                  |   1 -
 solr/licenses/netty-nio-client-2.16.93.jar.sha1    |   1 -
 solr/licenses/netty-nio-client-LICENSE-ASL.txt     | 206 ------------
 solr/licenses/netty-nio-client-NOTICE.txt          |  25 --
 .../licenses/netty-reactive-streams-2.0.5.jar.sha1 |   1 -
 .../netty-reactive-streams-LICENSE-ASL.txt         | 202 ------------
 solr/licenses/netty-reactive-streams-NOTICE.txt    |   1 -
 .../netty-reactive-streams-http-2.0.5.jar.sha1     |   1 -
 .../netty-reactive-streams-http-LICENSE-ASL.txt    | 202 ------------
 .../netty-reactive-streams-http-NOTICE.txt         |   1 -
 solr/licenses/reactive-streams-1.0.3.jar.sha1      |   1 +
 solr/licenses/reactive-streams-LICENSE-PD.txt      |   8 +
 solr/licenses/reactive-streams-NOTICE.txt          |   0
 solr/licenses/woodstox-core-asl-4.4.1.jar.sha1     |   1 -
 solr/server/etc/security.policy                    |   8 +-
 .../src/making-and-restoring-backups.adoc          |  31 +-
 solr/solrj/ivy.xml                                 |   2 +-
 34 files changed, 417 insertions(+), 1401 deletions(-)

diff --git a/lucene/ivy-versions.properties b/lucene/ivy-versions.properties
index 682bf9f..21a9053 100644
--- a/lucene/ivy-versions.properties
+++ b/lucene/ivy-versions.properties
@@ -87,10 +87,6 @@ com.sun.jersey.version = 1.19
 
 /com.tdunning/t-digest = 3.1
 
-com.typesafe.netty.version = 2.0.5
-/com.typesafe.netty/netty-reactive-streams = ${com.typesafe.netty.version}
-/com.typesafe.netty/netty-reactive-streams-http = ${com.typesafe.netty.version}
-
 /com.vaadin.external.google/android-json = 0.0.20131108.vaadin1
 /com.zaxxer/SparseBitSet = 1.2
 /commons-cli/commons-cli = 1.4
@@ -152,7 +148,6 @@ io.prometheus.version = 0.2.0
 
 /javax.activation/activation = 1.1.1
 /javax.servlet/javax.servlet-api = 3.1.0
-/javax.xml.bind/jaxb-api = 2.3.1
 
 /joda-time/joda-time = 2.9.9
 /junit/junit = 4.13.1
@@ -314,7 +309,6 @@ org.codehaus.janino.version = 3.0.9
 /org.codehaus.janino/janino = ${org.codehaus.janino.version}
 
 /org.codehaus.woodstox/stax2-api = 4.2.1
-/org.codehaus.woodstox/woodstox-core-asl = 4.4.1
 
 org.eclipse.jetty.version = 9.4.41.v20210516
 /org.eclipse.jetty.http2/http2-client = ${org.eclipse.jetty.version}
@@ -370,6 +364,8 @@ org.ow2.asm.version = 8.0.1
 /org.ow2.asm/asm = ${org.ow2.asm.version}
 /org.ow2.asm/asm-commons = ${org.ow2.asm.version}
 
+/org.reactivestreams/reactive-streams = 1.0.3
+
 /org.rrd4j/rrd4j = 3.5
 
 org.slf4j.version = 1.7.24
@@ -424,7 +420,6 @@ software.amazon.awssdk.version = 2.16.93
 /software.amazon.awssdk/aws-xml-protocol = ${software.amazon.awssdk.version}
 /software.amazon.awssdk/http-client-spi = ${software.amazon.awssdk.version}
 /software.amazon.awssdk/metrics-spi = ${software.amazon.awssdk.version}
-/software.amazon.awssdk/netty-nio-client = ${software.amazon.awssdk.version}
 /software.amazon.awssdk/profiles = ${software.amazon.awssdk.version}
 /software.amazon.awssdk/protocol-core = ${software.amazon.awssdk.version}
 /software.amazon.awssdk/regions = ${software.amazon.awssdk.version}
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 03dc95e..2fed1ca 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -51,6 +51,8 @@ Improvements
 
 * SOLR-9853: Project multi-valued fields in SQL query results (Timothy Potter)
 
+* SOLR-15599: Upgrade AWS SDK from v1 to v2 for S3 Repository (Houston Putman)
+
 Optimizations
 ---------------------
 * SOLR-15433: Replace transient core cache LRU by Caffeine cache. (Bruno Roustant)
@@ -96,6 +98,8 @@ Other Changes
 * SOLR-15486: During node shutdown pausing of updates and waiting for in-flight update requests to finish
   before closing cores is no longer SolrCloud specific. (Christine Poerschke, David Smiley)
 
+* SOLR-15599: woodstox-core-asl:4.4.1 (org.codehaus) replaced with woodstox-core:6.2.4 (com.fasterxml) (Houston Putman)
+
 ==================  8.9.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
diff --git a/solr/contrib/s3-repository/README.md b/solr/contrib/s3-repository/README.md
index 977d389..762d62e 100644
--- a/solr/contrib/s3-repository/README.md
+++ b/solr/contrib/s3-repository/README.md
@@ -7,14 +7,16 @@ This S3 repository is a backup repository implementation designed to provide bac
 
 Add this to your `solr.xml`:
 
+```xml
     <backup>
         <repository name="s3" class="org.apache.solr.s3.S3BackupRepository" default="false">
             <str name="s3.bucket.name">BUCKET_NAME</str>
             <str name="s3.region">us-west-2</str>
         </repository>
     </backup>
+```
 
-This plugin uses the [default AWS credentials provider chain](https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/credentials.html), so ensure that your credentials are set appropriately (e.g., via env var, or in `~/.aws/credentials`, etc.).
+This plugin uses the [default AWS credentials provider chain](https://docs.aws.amazon.com/sdk-for-java/v2/developer-guide/credentials.html), so ensure that your credentials are set appropriately (e.g., via env var, or in `~/.aws/credentials`, etc.).
 
 ## Testing locally
 
@@ -25,6 +27,7 @@ To run / test locally, first spin up S3Mock:
 
 Add this to your `solr.xml`:
 
+```xml
     <backup>
         <repository name="s3" class="org.apache.solr.s3.S3BackupRepository" default="false">
             <str name="s3.endpoint">http://localhost:9090</str>
@@ -32,6 +35,7 @@ Add this to your `solr.xml`:
             <str name="s3.region">us-east-1</str>
         </repository>
     </backup>
+```
 
 Start Solr, and create a collection (e.g., "foo"). Then hit the following URL, which will take a backup and persist it in S3Mock under the name `test`:
 
@@ -45,6 +49,7 @@ http://localhost:8983/solr/admin/collections?action=RESTORE&repository=s3&locati
 
 If you are also running Solr in a docker image, and need to set the endpoint of S3Mock to be different than `localhost`, then add the following under `<repository>`:
 
+```xml
     <backup>
         <repository name="s3" class="org.apache.solr.s3.S3BackupRepository" default="false">
             <str name="s3.bucket.name">TEST_BUCKET</str>
@@ -52,6 +57,7 @@ If you are also running Solr in a docker image, and need to set the endpoint of
             <str name="s3.region">us-east-1</str>
         </repository>
     </backup>
+```
 
 This works for the regular S3 backup repository as well (not mock).
 But the plugin only provides official support for AWS S3, not _S3 compatible_ products.
diff --git a/solr/contrib/s3-repository/ivy.xml b/solr/contrib/s3-repository/ivy.xml
index aed91ab..013a93c 100644
--- a/solr/contrib/s3-repository/ivy.xml
+++ b/solr/contrib/s3-repository/ivy.xml
@@ -23,10 +23,10 @@
     <conf name="test" transitive="false"/>
   </configurations>
   <dependencies>
-    <dependency org="com.amazonaws" name="aws-java-sdk-core" rev="${/com.amazonaws/aws-java-sdk-core}" conf="compile"/>
-    <dependency org="com.amazonaws" name="aws-java-sdk-s3" rev="${/com.amazonaws/aws-java-sdk-s3}" conf="compile"/>
-    <dependency org="javax.xml.bind" name="jaxb-api" rev="${/javax.xml.bind/jaxb-api}" conf="compile"/>
     <dependency org="joda-time" name="joda-time" rev="${/joda-time/joda-time}" conf="compile"/>
+    <dependency org="com.fasterxml.woodstox" name="woodstox-core" rev="${/com.fasterxml.woodstox/woodstox-core}" conf="compile"/>
+    <dependency org="org.codehaus.woodstox" name="stax2-api" rev="${/org.codehaus.woodstox/stax2-api}" conf="compile"/>
+    <dependency org="org.reactivestreams" name="reactive-streams" rev="${/org.reactivestreams/reactive-streams}" conf="compile"/>
     <dependency org="software.amazon.awssdk" name="apache-client" rev="${/software.amazon.awssdk/apache-client}" conf="compile"/>
     <dependency org="software.amazon.awssdk" name="arns" rev="${/software.amazon.awssdk/arns}" conf="compile"/>
     <dependency org="software.amazon.awssdk" name="auth" rev="${/software.amazon.awssdk/auth}" conf="compile"/>
@@ -35,27 +35,25 @@
     <dependency org="software.amazon.awssdk" name="aws-xml-protocol" rev="${/software.amazon.awssdk/aws-xml-protocol}" conf="compile"/>
     <dependency org="software.amazon.awssdk" name="http-client-spi" rev="${/software.amazon.awssdk/http-client-spi}" conf="compile"/>
     <dependency org="software.amazon.awssdk" name="metrics-spi" rev="${/software.amazon.awssdk/metrics-spi}" conf="compile"/>
-    <dependency org="software.amazon.awssdk" name="netty-nio-client" rev="${/software.amazon.awssdk/netty-nio-client}" conf="compile"/>
     <dependency org="software.amazon.awssdk" name="profiles" rev="${/software.amazon.awssdk/profiles}" conf="compile"/>
     <dependency org="software.amazon.awssdk" name="protocol-core" rev="${/software.amazon.awssdk/protocol-core}" conf="compile"/>
     <dependency org="software.amazon.awssdk" name="regions" rev="${/software.amazon.awssdk/regions}" conf="compile"/>
     <dependency org="software.amazon.awssdk" name="s3" rev="${/software.amazon.awssdk/s3}" conf="compile"/>
     <dependency org="software.amazon.awssdk" name="sdk-core" rev="${/software.amazon.awssdk/sdk-core}" conf="compile"/>
-    <dependency org="software.amazon.awssdk" name="url-connection-client" rev="${/software.amazon.awssdk/url-connection-client}" conf="compile"/>
     <dependency org="software.amazon.awssdk" name="utils" rev="${/software.amazon.awssdk/utils}" conf="compile"/>
     <dependency org="software.amazon.ion" name="ion-java" rev="${/software.amazon.ion/ion-java}" conf="compile"/>
 
     <dependency org="com.adobe.testing" name="s3mock" rev="${/com.adobe.testing/s3mock}" conf="test"/>
     <dependency org="com.adobe.testing" name="s3mock-junit4" rev="${/com.adobe.testing/s3mock-junit4}" conf="test"/>
     <dependency org="com.adobe.testing" name="s3mock-testsupport-common" rev="${/com.adobe.testing/s3mock-testsupport-common}" conf="test"/>
+    <dependency org="com.amazonaws" name="aws-java-sdk-core" rev="${/com.amazonaws/aws-java-sdk-core}" conf="test"/>
+    <dependency org="com.amazonaws" name="aws-java-sdk-s3" rev="${/com.amazonaws/aws-java-sdk-s3}" conf="test"/>
     <dependency org="com.fasterxml.jackson.dataformat" name="jackson-dataformat-xml" rev="${/com.fasterxml.jackson.dataformat/jackson-dataformat-xml}" conf="test"/>
     <dependency org="com.fasterxml.jackson.datatype" name="jackson-datatype-jdk8" rev="${/com.fasterxml.jackson.datatype/jackson-datatype-jdk8}" conf="test"/>
     <dependency org="com.fasterxml.jackson.datatype" name="jackson-datatype-jsr310" rev="${/com.fasterxml.jackson.datatype/jackson-datatype-jsr310}" conf="test"/>
     <dependency org="com.fasterxml.jackson.module" name="jackson-module-jaxb-annotations" rev="${/com.fasterxml.jackson.module/jackson-module-jaxb-annotations}" conf="test"/>
     <dependency org="com.fasterxml.jackson.module" name="jackson-module-parameter-names" rev="${/com.fasterxml.jackson.module/jackson-module-parameter-names}" conf="test"/>
     <dependency org="com.fasterxml.woodstox" name="woodstox-core" rev="${/com.fasterxml.woodstox/woodstox-core}" conf="test"/>
-    <dependency org="com.typesafe.netty" name="netty-reactive-streams-http" rev="${/com.typesafe.netty/netty-reactive-streams-http}" conf="test"/>
-    <dependency org="com.typesafe.netty" name="netty-reactive-streams" rev="${/com.typesafe.netty/netty-reactive-streams}" conf="test"/>
     <dependency org="io.micrometer" name="micrometer-core" rev="${/io.micrometer/micrometer-core}" conf="test"/>
     <dependency org="jakarta.annotation" name="jakarta.annotation-api" rev="${/jakarta.annotation/jakarta.annotation-api}" conf="test"/>
     <dependency org="jakarta.servlet" name="jakarta.servlet-api" rev="${/jakarta.servlet/jakarta.servlet-api}" conf="test"/>
@@ -86,5 +84,6 @@
     <dependency org="org.springframework" name="spring-jcl" rev="${/org.springframework/spring-jcl}" conf="test"/>
     <dependency org="org.springframework" name="spring-web" rev="${/org.springframework/spring-web}" conf="test"/>
     <dependency org="org.springframework" name="spring-webmvc" rev="${/org.springframework/spring-webmvc}" conf="test"/>
+    <dependency org="software.amazon.awssdk" name="url-connection-client" rev="${/software.amazon.awssdk/url-connection-client}" conf="test"/>
   </dependencies>
 </ivy-module>
diff --git a/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3BackupRepository.java b/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3BackupRepository.java
index d92f285..7e3ed7b 100644
--- a/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3BackupRepository.java
+++ b/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3BackupRepository.java
@@ -88,9 +88,9 @@ public class S3BackupRepository implements BackupRepository {
       if (location.startsWith(S3_SCHEME + ":")) {
         result = new URI(location);
       } else if (location.startsWith("/")) {
-        result = new URI(S3_SCHEME, null, location, null);
+        result = new URI(S3_SCHEME, "", location, null);
       } else {
-        result = new URI(S3_SCHEME, null, "/" + location, null);
+        result = new URI(S3_SCHEME, "", "/" + location, null);
       }
       return result;
     } catch (URISyntaxException ex) {
@@ -125,9 +125,11 @@ public class S3BackupRepository implements BackupRepository {
   @Override
   public URI resolveDirectory(URI baseUri, String... pathComponents) {
     if (pathComponents.length > 0) {
-      pathComponents[pathComponents.length - 1] = pathComponents[pathComponents.length - 1] + "/";
+      if (!pathComponents[pathComponents.length - 1].endsWith("/")) {
+        pathComponents[pathComponents.length - 1] = pathComponents[pathComponents.length - 1] + "/";
+      }
     } else {
-      if (!baseUri.getPath().endsWith("/")) {
+      if (!baseUri.toString().endsWith("/")) {
         baseUri = URI.create(baseUri + "/");
       }
     }
diff --git a/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3BackupRepositoryConfig.java b/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3BackupRepositoryConfig.java
index 04b7afe..229b224 100644
--- a/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3BackupRepositoryConfig.java
+++ b/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3BackupRepositoryConfig.java
@@ -29,26 +29,26 @@ public class S3BackupRepositoryConfig {
   public static final String BUCKET_NAME = "s3.bucket.name";
   public static final String REGION = "s3.region";
   public static final String ENDPOINT = "s3.endpoint";
-  public static final String PROXY_HOST = "s3.proxy.host";
-  public static final String PROXY_PORT = "s3.proxy.port";
+  public static final String PROXY_URL = "s3.proxy.url";
+  public static final String PROXY_USE_SYSTEM_SETTINGS = "s3.proxy.useSystemSettings";
 
   private final String bucketName;
   private final String region;
-  private final String proxyHost;
-  private final int proxyPort;
+  private final String proxyURL;
+  private final boolean proxyUseSystemSettings;
   private final String endpoint;
 
   public S3BackupRepositoryConfig(NamedList<?> config) {
     region = getStringConfig(config, REGION);
     bucketName = getStringConfig(config, BUCKET_NAME);
-    proxyHost = getStringConfig(config, PROXY_HOST);
-    proxyPort = getIntConfig(config, PROXY_PORT);
+    proxyURL = getStringConfig(config, PROXY_URL);
+    proxyUseSystemSettings = getBooleanConfig(config, PROXY_USE_SYSTEM_SETTINGS, true);
     endpoint = getStringConfig(config, ENDPOINT);
   }
 
   /** Construct a {@link S3StorageClient} from the provided config. */
   public S3StorageClient buildClient() {
-    return new S3StorageClient(bucketName, region, proxyHost, proxyPort, endpoint);
+    return new S3StorageClient(bucketName, region, proxyURL, proxyUseSystemSettings, endpoint);
   }
 
   private static String getStringConfig(NamedList<?> config, String property) {
@@ -73,10 +73,14 @@ public class S3BackupRepositoryConfig {
 
   /** If the property as any other value than 'true' or 'TRUE', this will default to false. */
   private static boolean getBooleanConfig(NamedList<?> config, String property) {
+    return getBooleanConfig(config, property, false);
+  }
+
+  private static boolean getBooleanConfig(NamedList<?> config, String property, boolean def) {
     String envProp = System.getenv().get(toEnvVar(property));
     if (envProp == null) {
       Boolean configProp = config.getBooleanArg(property);
-      return configProp != null && configProp;
+      return configProp == null ? def : configProp;
     } else {
       return Boolean.parseBoolean(envProp);
     }
diff --git a/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3OutputStream.java b/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3OutputStream.java
index ded053f..9a46f45 100644
--- a/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3OutputStream.java
+++ b/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3OutputStream.java
@@ -16,16 +16,6 @@
  */
 package org.apache.solr.s3;
 
-import com.amazonaws.AmazonClientException;
-import com.amazonaws.event.ProgressEvent;
-import com.amazonaws.event.SyncProgressListener;
-import com.amazonaws.services.s3.AmazonS3;
-import com.amazonaws.services.s3.model.AbortMultipartUploadRequest;
-import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest;
-import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest;
-import com.amazonaws.services.s3.model.ObjectMetadata;
-import com.amazonaws.services.s3.model.PartETag;
-import com.amazonaws.services.s3.model.UploadPartRequest;
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.OutputStream;
@@ -35,6 +25,12 @@ import java.util.ArrayList;
 import java.util.List;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import software.amazon.awssdk.core.exception.SdkException;
+import software.amazon.awssdk.core.sync.RequestBody;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.model.CompletedPart;
+import software.amazon.awssdk.services.s3.model.UploadPartRequest;
+import software.amazon.awssdk.services.s3.model.UploadPartResponse;
 
 /**
  * Implementation is adapted from
@@ -52,21 +48,19 @@ public class S3OutputStream extends OutputStream {
   static final int PART_SIZE = 16777216;
   static final int MIN_PART_SIZE = 5242880;
 
-  private final AmazonS3 s3Client;
+  private final S3Client s3Client;
   private final String bucketName;
   private final String key;
-  private final SyncProgressListener progressListener;
   private volatile boolean closed;
   private final ByteBuffer buffer;
   private MultipartUpload multiPartUpload;
 
-  public S3OutputStream(AmazonS3 s3Client, String key, String bucketName) {
+  public S3OutputStream(S3Client s3Client, String key, String bucketName) {
     this.s3Client = s3Client;
     this.bucketName = bucketName;
     this.key = key;
     this.closed = false;
     this.buffer = ByteBuffer.allocate(PART_SIZE);
-    this.progressListener = new ConnectProgressListener();
     this.multiPartUpload = null;
 
     if (log.isDebugEnabled()) {
@@ -84,7 +78,7 @@ public class S3OutputStream extends OutputStream {
 
     // If the buffer is now full, push it to remote S3.
     if (!buffer.hasRemaining()) {
-      uploadPart(false);
+      uploadPart();
     }
   }
 
@@ -105,7 +99,7 @@ public class S3OutputStream extends OutputStream {
     while (buffer.remaining() < lenRemaining) {
       int firstPart = buffer.remaining();
       buffer.put(b, currentOffset, firstPart);
-      uploadPart(false);
+      uploadPart();
 
       currentOffset += firstPart;
       lenRemaining -= firstPart;
@@ -119,9 +113,8 @@ public class S3OutputStream extends OutputStream {
     return off < 0 || off > len;
   }
 
-  private void uploadPart(boolean isLastPart) throws IOException {
-
-    int size = buffer.position();
+  private void uploadPart() throws IOException {
+    int size = buffer.position() - buffer.arrayOffset();
 
     if (size == 0) {
       // nothing to upload
@@ -134,8 +127,9 @@ public class S3OutputStream extends OutputStream {
       }
       multiPartUpload = newMultipartUpload();
     }
-    try {
-      multiPartUpload.uploadPart(new ByteArrayInputStream(buffer.array()), size, isLastPart);
+    try (ByteArrayInputStream inputStream =
+        new ByteArrayInputStream(buffer.array(), buffer.arrayOffset(), size)) {
+      multiPartUpload.uploadPart(inputStream, size);
     } catch (Exception e) {
       if (multiPartUpload != null) {
         multiPartUpload.abort();
@@ -158,8 +152,8 @@ public class S3OutputStream extends OutputStream {
 
     // Flush is possible only if we have more data than the required part size
     // If buffer size is lower than than, just skip
-    if (buffer.position() >= MIN_PART_SIZE) {
-      uploadPart(false);
+    if (buffer.position() - buffer.arrayOffset() >= MIN_PART_SIZE) {
+      uploadPart();
     }
   }
 
@@ -170,7 +164,7 @@ public class S3OutputStream extends OutputStream {
     }
 
     // flush first
-    uploadPart(true);
+    uploadPart();
 
     if (multiPartUpload != null) {
       multiPartUpload.complete();
@@ -181,32 +175,21 @@ public class S3OutputStream extends OutputStream {
   }
 
   private MultipartUpload newMultipartUpload() throws IOException {
-    InitiateMultipartUploadRequest initRequest =
-        new InitiateMultipartUploadRequest(bucketName, key, new ObjectMetadata());
-
     try {
-      return new MultipartUpload(s3Client.initiateMultipartUpload(initRequest).getUploadId());
-    } catch (AmazonClientException e) {
+      return new MultipartUpload(
+          s3Client.createMultipartUpload(b -> b.bucket(bucketName).key(key)).uploadId());
+    } catch (SdkException e) {
       throw S3StorageClient.handleAmazonException(e);
     }
   }
 
-  // Placeholder listener for now, just logs the event progress.
-  private static class ConnectProgressListener extends SyncProgressListener {
-    public void progressChanged(ProgressEvent progressEvent) {
-      if (log.isDebugEnabled()) {
-        log.debug("Progress event {}", progressEvent);
-      }
-    }
-  }
-
   private class MultipartUpload {
     private final String uploadId;
-    private final List<PartETag> partETags;
+    private final List<CompletedPart> completedParts;
 
     public MultipartUpload(String uploadId) {
       this.uploadId = uploadId;
-      this.partETags = new ArrayList<>();
+      this.completedParts = new ArrayList<>();
       if (log.isDebugEnabled()) {
         log.debug(
             "Initiated multi-part upload for bucketName '{}' key '{}' with id '{}'",
@@ -216,24 +199,24 @@ public class S3OutputStream extends OutputStream {
       }
     }
 
-    void uploadPart(ByteArrayInputStream inputStream, int partSize, boolean isLastPart) {
-      int currentPartNumber = partETags.size() + 1;
+    void uploadPart(ByteArrayInputStream inputStream, long partSize) {
+      int currentPartNumber = completedParts.size() + 1;
 
       UploadPartRequest request =
-          new UploadPartRequest()
-              .withKey(key)
-              .withBucketName(bucketName)
-              .withUploadId(uploadId)
-              .withInputStream(inputStream)
-              .withPartNumber(currentPartNumber)
-              .withPartSize(partSize)
-              .withLastPart(isLastPart)
-              .withGeneralProgressListener(progressListener);
+          UploadPartRequest.builder()
+              .key(key)
+              .bucket(bucketName)
+              .uploadId(uploadId)
+              .partNumber(currentPartNumber)
+              .build();
 
       if (log.isDebugEnabled()) {
         log.debug("Uploading part {} for id '{}'", currentPartNumber, uploadId);
       }
-      partETags.add(s3Client.uploadPart(request).getPartETag());
+      UploadPartResponse response =
+          s3Client.uploadPart(request, RequestBody.fromInputStream(inputStream, partSize));
+      completedParts.add(
+          CompletedPart.builder().partNumber(currentPartNumber).eTag(response.eTag()).build());
     }
 
     /** To be invoked when closing the stream to mark upload is done. */
@@ -241,9 +224,12 @@ public class S3OutputStream extends OutputStream {
       if (log.isDebugEnabled()) {
         log.debug("Completing multi-part upload for key '{}', id '{}'", key, uploadId);
       }
-      CompleteMultipartUploadRequest completeRequest =
-          new CompleteMultipartUploadRequest(bucketName, key, uploadId, partETags);
-      s3Client.completeMultipartUpload(completeRequest);
+      s3Client.completeMultipartUpload(
+          b ->
+              b.bucket(bucketName)
+                  .key(key)
+                  .uploadId(uploadId)
+                  .multipartUpload(mub -> mub.parts(completedParts)));
     }
 
     public void abort() {
@@ -251,12 +237,10 @@ public class S3OutputStream extends OutputStream {
         log.warn("Aborting multi-part upload with id '{}'", uploadId);
       }
       try {
-        s3Client.abortMultipartUpload(new AbortMultipartUploadRequest(bucketName, key, uploadId));
+        s3Client.abortMultipartUpload(b -> b.bucket(bucketName).key(key).uploadId(uploadId));
       } catch (Exception e) {
         // ignoring failure on abort.
-        if (log.isWarnEnabled()) {
-          log.warn("Unable to abort multipart upload, you may need to purge uploaded parts: ", e);
-        }
+        log.error("Unable to abort multipart upload, you may need to purge uploaded parts: ", e);
       }
     }
   }
diff --git a/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3StorageClient.java b/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3StorageClient.java
index 68c3705..04a385e 100644
--- a/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3StorageClient.java
+++ b/solr/contrib/s3-repository/src/java/org/apache/solr/s3/S3StorageClient.java
@@ -16,29 +16,13 @@
  */
 package org.apache.solr.s3;
 
-import com.amazonaws.AmazonClientException;
-import com.amazonaws.AmazonServiceException;
-import com.amazonaws.ClientConfiguration;
-import com.amazonaws.Protocol;
-import com.amazonaws.client.builder.AwsClientBuilder;
-import com.amazonaws.services.s3.AmazonS3;
-import com.amazonaws.services.s3.AmazonS3ClientBuilder;
-import com.amazonaws.services.s3.model.DeleteObjectsRequest;
-import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion;
-import com.amazonaws.services.s3.model.DeleteObjectsResult;
-import com.amazonaws.services.s3.model.ListObjectsRequest;
-import com.amazonaws.services.s3.model.ObjectListing;
-import com.amazonaws.services.s3.model.ObjectMetadata;
-import com.amazonaws.services.s3.model.PutObjectRequest;
-import com.amazonaws.services.s3.model.S3Object;
-import com.amazonaws.services.s3.model.S3ObjectSummary;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.Lists;
 import java.io.Closeable;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
+import java.net.URI;
 import java.util.Collection;
 import java.util.Comparator;
 import java.util.HashSet;
@@ -46,14 +30,35 @@ import java.util.List;
 import java.util.Locale;
 import java.util.Set;
 import java.util.stream.Collectors;
-import org.apache.commons.io.input.ClosedInputStream;
 import org.apache.curator.shaded.com.google.common.collect.Sets;
+import java.util.stream.Stream;
 import org.apache.solr.common.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import software.amazon.awssdk.awscore.exception.AwsServiceException;
+import software.amazon.awssdk.core.exception.SdkClientException;
+import software.amazon.awssdk.core.exception.SdkException;
+import software.amazon.awssdk.core.sync.RequestBody;
+import software.amazon.awssdk.http.apache.ApacheHttpClient;
+import software.amazon.awssdk.http.apache.ProxyConfiguration;
+import software.amazon.awssdk.regions.Region;
+import software.amazon.awssdk.services.s3.S3Client;
+import software.amazon.awssdk.services.s3.S3ClientBuilder;
+import software.amazon.awssdk.services.s3.model.CommonPrefix;
+import software.amazon.awssdk.services.s3.model.Delete;
+import software.amazon.awssdk.services.s3.model.DeleteObjectsRequest;
+import software.amazon.awssdk.services.s3.model.DeleteObjectsResponse;
+import software.amazon.awssdk.services.s3.model.DeletedObject;
+import software.amazon.awssdk.services.s3.model.HeadObjectResponse;
+import software.amazon.awssdk.services.s3.model.NoSuchBucketException;
+import software.amazon.awssdk.services.s3.model.NoSuchKeyException;
+import software.amazon.awssdk.services.s3.model.ObjectIdentifier;
+import software.amazon.awssdk.services.s3.model.PutObjectRequest;
+import software.amazon.awssdk.services.s3.model.S3Object;
+import software.amazon.awssdk.services.s3.paginators.ListObjectsV2Iterable;
 
 /**
- * Creates a {@link AmazonS3} for communicating with AWS S3. Utilizes the default credential
+ * Creates a {@link S3Client} for communicating with AWS S3. Utilizes the default credential
  * provider chain; reference <a
  * href="https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/credentials.html">AWS SDK
  * docs</a> for details on where this client will fetch credentials from, and the order of
@@ -74,48 +79,51 @@ public class S3StorageClient {
   // Error messages returned by S3 for a key not found.
   private static final Set<String> NOT_FOUND_CODES = Sets.newHashSet("NoSuchKey", "404 Not Found");
 
-  private final AmazonS3 s3Client;
+  private final S3Client s3Client;
 
   // The S3 bucket where we read/write all data.
   private final String bucketName;
 
   S3StorageClient(
-      String bucketName, String region, String proxyHost, int proxyPort, String endpoint) {
-    this(createInternalClient(region, proxyHost, proxyPort, endpoint), bucketName);
+      String bucketName,
+      String region,
+      String proxyUrl,
+      boolean proxyUseSystemSettings,
+      String endpoint) {
+    this(createInternalClient(region, proxyUrl, proxyUseSystemSettings, endpoint), bucketName);
   }
 
   @VisibleForTesting
-  S3StorageClient(AmazonS3 s3Client, String bucketName) {
+  S3StorageClient(S3Client s3Client, String bucketName) {
     this.s3Client = s3Client;
     this.bucketName = bucketName;
   }
 
-  private static AmazonS3 createInternalClient(
-      String region, String proxyHost, int proxyPort, String endpoint) {
-    ClientConfiguration clientConfig = new ClientConfiguration().withProtocol(Protocol.HTTPS);
-
+  private static S3Client createInternalClient(
+      String region, String proxyUrl, boolean proxyUseSystemSettings, String endpoint) {
+    ApacheHttpClient.Builder sdkHttpClientBuilder = ApacheHttpClient.builder();
     // If configured, add proxy
-    if (!StringUtils.isEmpty(proxyHost)) {
-      clientConfig.setProxyHost(proxyHost);
-      if (proxyPort > 0) {
-        clientConfig.setProxyPort(proxyPort);
-      }
+    ProxyConfiguration.Builder proxyConfigurationBuilder = ProxyConfiguration.builder();
+    if (!StringUtils.isEmpty(proxyUrl)) {
+      proxyConfigurationBuilder.endpoint(URI.create(proxyUrl));
+    } else {
+      proxyConfigurationBuilder.useSystemPropertyValues(proxyUseSystemSettings);
     }
+    sdkHttpClientBuilder.proxyConfiguration(proxyConfigurationBuilder.build());
+    sdkHttpClientBuilder.useIdleConnectionReaper(false);
 
     /*
      * Default s3 client builder loads credentials from disk and handles token refreshes
      */
-    AmazonS3ClientBuilder clientBuilder =
-        AmazonS3ClientBuilder.standard()
-            .enablePathStyleAccess()
-            .withClientConfiguration(clientConfig);
+    S3ClientBuilder clientBuilder =
+        S3Client.builder()
+            .serviceConfiguration(builder -> builder.pathStyleAccessEnabled(true))
+            .httpClient(sdkHttpClientBuilder.build());
 
     if (!StringUtils.isEmpty(endpoint)) {
-      clientBuilder.setEndpointConfiguration(
-          new AwsClientBuilder.EndpointConfiguration(endpoint, region));
-    } else {
-      clientBuilder.setRegion(region);
+      clientBuilder.endpointOverride(URI.create(endpoint));
     }
+    clientBuilder.region(Region.of(region));
 
     return clientBuilder.build();
   }
@@ -130,17 +138,16 @@ public class S3StorageClient {
       //            throw new S3Exception("Parent directory doesn't exist, path=" + path);
     }
 
-    ObjectMetadata objectMetadata = new ObjectMetadata();
-    objectMetadata.setContentType(S3_DIR_CONTENT_TYPE);
-    objectMetadata.setContentLength(0);
-
-    // Create empty object with header
-    final InputStream im = ClosedInputStream.CLOSED_INPUT_STREAM;
-
     try {
-      PutObjectRequest putRequest = new PutObjectRequest(bucketName, path, im, objectMetadata);
-      s3Client.putObject(putRequest);
-    } catch (AmazonClientException ase) {
+      // Create empty object with content type header
+      PutObjectRequest putRequest =
+          PutObjectRequest.builder()
+              .bucket(bucketName)
+              .contentType(S3_DIR_CONTENT_TYPE)
+              .key(path)
+              .build();
+      s3Client.putObject(putRequest, RequestBody.empty());
+    } catch (SdkClientException ase) {
       throw handleAmazonException(ase);
     }
   }
@@ -175,14 +182,12 @@ public class S3StorageClient {
   void deleteDirectory(String path) throws S3Exception {
     path = sanitizedDirPath(path);
 
-    Set<String> entries = new HashSet<>();
+    // Get all the files and subdirectories
+    Set<String> entries = listAll(path);
     if (pathExists(path)) {
       entries.add(path);
     }
 
-    // Get all the files and subdirectories
-    entries.addAll(listAll(path));
-
     deleteObjects(entries);
   }
 
@@ -195,55 +200,39 @@ public class S3StorageClient {
   String[] listDir(String path) throws S3Exception {
     path = sanitizedDirPath(path);
 
-    String prefix = path;
-    ListObjectsRequest listRequest =
-        new ListObjectsRequest()
-            .withBucketName(bucketName)
-            .withPrefix(prefix)
-            .withDelimiter(S3_FILE_PATH_DELIMITER);
+    final String prefix = path;
 
-    List<String> entries = new ArrayList<>();
     try {
-      ObjectListing objectListing = s3Client.listObjects(listRequest);
-
-      while (true) {
-        List<String> files =
-            objectListing.getObjectSummaries().stream()
-                .map(S3ObjectSummary::getKey)
-                .collect(Collectors.toList());
-        files.addAll(objectListing.getCommonPrefixes());
-        // This filtering is needed only for S3mock. Real S3 does not ignore the trailing '/' in the
-        // prefix.
-        files =
-            files.stream()
-                .filter(s -> s.startsWith(prefix))
-                .map(s -> s.substring(prefix.length()))
-                .filter(s -> !s.isEmpty())
-                .filter(
-                    s -> {
-                      int slashIndex = s.indexOf(S3_FILE_PATH_DELIMITER);
-                      return slashIndex == -1 || slashIndex == s.length() - 1;
-                    })
-                .map(
-                    s -> {
-                      if (s.endsWith(S3_FILE_PATH_DELIMITER)) {
-                        return s.substring(0, s.length() - 1);
-                      }
-                      return s;
-                    })
-                .collect(Collectors.toList());
-
-        entries.addAll(files);
-
-        if (objectListing.isTruncated()) {
-          objectListing = s3Client.listNextBatchOfObjects(objectListing);
-        } else {
-          break;
-        }
-      }
-      return entries.toArray(new String[0]);
-    } catch (AmazonClientException ase) {
-      throw handleAmazonException(ase);
+      ListObjectsV2Iterable objectListing =
+          s3Client.listObjectsV2Paginator(
+              builder ->
+                  builder
+                      .bucket(bucketName)
+                      .prefix(prefix)
+                      .delimiter(S3_FILE_PATH_DELIMITER)
+                      .build());
+
+      return Stream.concat(
+              objectListing.contents().stream().map(S3Object::key),
+              objectListing.commonPrefixes().stream().map(CommonPrefix::prefix))
+          .filter(s -> s.startsWith(prefix))
+          .map(s -> s.substring(prefix.length()))
+          .filter(s -> !s.isEmpty())
+          .filter(
+              s -> {
+                int slashIndex = s.indexOf(S3_FILE_PATH_DELIMITER);
+                return slashIndex == -1 || slashIndex == s.length() - 1;
+              })
+          .map(
+              s -> {
+                if (s.endsWith(S3_FILE_PATH_DELIMITER)) {
+                  return s.substring(0, s.length() - 1);
+                }
+                return s;
+              })
+          .toArray(String[]::new);
+    } catch (SdkException sdke) {
+      throw handleAmazonException(sdke);
     }
   }
 
@@ -254,17 +243,20 @@ public class S3StorageClient {
    * @return true if path exists, otherwise false?
    */
   boolean pathExists(String path) throws S3Exception {
-    path = sanitizedPath(path);
+    final String s3Path = sanitizedPath(path);
 
     // for root return true
-    if (path.isEmpty() || S3_FILE_PATH_DELIMITER.equals(path)) {
+    if (s3Path.isEmpty() || S3_FILE_PATH_DELIMITER.equals(s3Path)) {
       return true;
     }
 
     try {
-      return s3Client.doesObjectExist(bucketName, path);
-    } catch (AmazonClientException ase) {
-      throw handleAmazonException(ase);
+      s3Client.headObject(builder -> builder.bucket(bucketName).key(s3Path));
+      return true;
+    } catch (NoSuchKeyException e) {
+      return false;
+    } catch (SdkException sdke) {
+      throw handleAmazonException(sdke);
     }
   }
 
@@ -275,15 +267,16 @@ public class S3StorageClient {
    * @return true if path is directory, otherwise false.
    */
   boolean isDirectory(String path) throws S3Exception {
-    path = sanitizedDirPath(path);
+    final String s3Path = sanitizedDirPath(path);
 
     try {
-      ObjectMetadata objectMetadata = s3Client.getObjectMetadata(bucketName, path);
-      String contentType = objectMetadata.getContentType();
+      HeadObjectResponse objectMetadata =
+          s3Client.headObject(builder -> builder.bucket(bucketName).key(s3Path));
+      String contentType = objectMetadata.contentType();
 
       return !StringUtils.isEmpty(contentType) && contentType.equalsIgnoreCase(S3_DIR_CONTENT_TYPE);
-    } catch (AmazonClientException ase) {
-      throw handleAmazonException(ase);
+    } catch (SdkException sdke) {
+      throw handleAmazonException(sdke);
     }
   }
 
@@ -294,17 +287,18 @@ public class S3StorageClient {
    * @return length of file.
    */
   long length(String path) throws S3Exception {
-    path = sanitizedFilePath(path);
+    String s3Path = sanitizedFilePath(path);
     try {
-      ObjectMetadata objectMetadata = s3Client.getObjectMetadata(bucketName, path);
-      String contentType = objectMetadata.getContentType();
+      HeadObjectResponse objectMetadata =
+          s3Client.headObject(b -> b.bucket(bucketName).key(s3Path));
+      String contentType = objectMetadata.contentType();
 
       if (StringUtils.isEmpty(contentType) || !contentType.equalsIgnoreCase(S3_DIR_CONTENT_TYPE)) {
-        return objectMetadata.getContentLength();
+        return objectMetadata.contentLength();
       }
       throw new S3Exception("Path is Directory");
-    } catch (AmazonClientException ase) {
-      throw handleAmazonException(ase);
+    } catch (SdkException sdke) {
+      throw handleAmazonException(sdke);
     }
   }
 
@@ -315,14 +309,13 @@ public class S3StorageClient {
    * @return InputStream for file.
    */
   InputStream pullStream(String path) throws S3Exception {
-    path = sanitizedFilePath(path);
+    final String s3Path = sanitizedFilePath(path);
 
     try {
-      S3Object requestedObject = s3Client.getObject(bucketName, path);
       // This InputStream instance needs to be closed by the caller
-      return requestedObject.getObjectContent();
-    } catch (AmazonClientException ase) {
-      throw handleAmazonException(ase);
+      return s3Client.getObject(b -> b.bucket(bucketName).key(s3Path));
+    } catch (SdkException sdke) {
+      throw handleAmazonException(sdke);
     }
   }
 
@@ -341,14 +334,14 @@ public class S3StorageClient {
 
     try {
       return new S3OutputStream(s3Client, path, bucketName);
-    } catch (AmazonClientException ase) {
-      throw handleAmazonException(ase);
+    } catch (SdkException sdke) {
+      throw handleAmazonException(sdke);
     }
   }
 
   /** Override {@link Closeable} since we throw no exception. */
   void close() {
-    s3Client.shutdown();
+    s3Client.close();
   }
 
   /** Any file path that specifies a non-existent file will not be treated as an error. */
@@ -361,8 +354,8 @@ public class S3StorageClient {
        * However, there's no guarantee the delete did not happen if an exception is thrown.
        */
       return deleteObjects(paths, MAX_KEYS_PER_BATCH_DELETE);
-    } catch (AmazonClientException ase) {
-      throw handleAmazonException(ase);
+    } catch (SdkException sdke) {
+      throw handleAmazonException(sdke);
     }
   }
 
@@ -374,41 +367,43 @@ public class S3StorageClient {
    */
   @VisibleForTesting
   Collection<String> deleteObjects(Collection<String> entries, int batchSize) throws S3Exception {
-    List<KeyVersion> keysToDelete =
-        entries.stream().map(KeyVersion::new).collect(Collectors.toList());
+    List<ObjectIdentifier> keysToDelete =
+        entries.stream()
+            .map(s -> ObjectIdentifier.builder().key(s).build())
+            .sorted(Comparator.comparing(ObjectIdentifier::key).reversed())
+            .collect(Collectors.toList());
 
-    keysToDelete.sort(Comparator.comparing(KeyVersion::getKey).reversed());
-    List<List<KeyVersion>> partitions = Lists.partition(keysToDelete, batchSize);
+    List<List<ObjectIdentifier>> partitions = Lists.partition(keysToDelete, batchSize);
     Set<String> deletedPaths = new HashSet<>();
 
     boolean deleteIndividually = false;
-    for (List<KeyVersion> partition : partitions) {
+    for (List<ObjectIdentifier> partition : partitions) {
       DeleteObjectsRequest request = createBatchDeleteRequest(partition);
 
       try {
-        DeleteObjectsResult result = s3Client.deleteObjects(request);
-
-        result.getDeletedObjects().stream()
-            .map(DeleteObjectsResult.DeletedObject::getKey)
-            .forEach(deletedPaths::add);
-      } catch (AmazonServiceException e) {
-        // This means that the batch-delete is not implemented by this S3 server
-        if (e.getStatusCode() == 501) {
+        DeleteObjectsResponse response = s3Client.deleteObjects(request);
+
+        response.deleted().stream().map(DeletedObject::key).forEach(deletedPaths::add);
+      } catch (AwsServiceException ase) {
+        if (ase.statusCode() == 501) {
+          // This means that the batch-delete is not implemented by this S3 server
           deleteIndividually = true;
           break;
         } else {
-          throw e;
+          throw handleAmazonException(ase);
         }
+      } catch (SdkException sdke) {
+        throw handleAmazonException(sdke);
       }
     }
 
     if (deleteIndividually) {
-      for (KeyVersion k : keysToDelete) {
+      for (ObjectIdentifier k : keysToDelete) {
         try {
-          s3Client.deleteObject(bucketName, k.getKey());
-          deletedPaths.add(k.getKey());
-        } catch (AmazonClientException e) {
-          throw new S3Exception("Could not delete object with key: " + k.getKey(), e);
+          s3Client.deleteObject(b -> b.bucket(bucketName).key(k.key()));
+          deletedPaths.add(k.key());
+        } catch (SdkException sdke) {
+          throw new S3Exception("Could not delete object with key: " + k.key(), sdke);
         }
       }
     }
@@ -416,39 +411,29 @@ public class S3StorageClient {
     return deletedPaths;
   }
 
-  private DeleteObjectsRequest createBatchDeleteRequest(List<KeyVersion> keysToDelete) {
-    return new DeleteObjectsRequest(bucketName).withKeys(keysToDelete);
+  private DeleteObjectsRequest createBatchDeleteRequest(List<ObjectIdentifier> keysToDelete) {
+    return DeleteObjectsRequest.builder()
+        .bucket(bucketName)
+        .delete(Delete.builder().objects(keysToDelete).build())
+        .build();
   }
 
-  private List<String> listAll(String path) throws S3Exception {
+  private Set<String> listAll(String path) throws S3Exception {
     String prefix = sanitizedDirPath(path);
-    ListObjectsRequest listRequest =
-        new ListObjectsRequest().withBucketName(bucketName).withPrefix(prefix);
 
-    List<String> entries = new ArrayList<>();
     try {
-      ObjectListing objectListing = s3Client.listObjects(listRequest);
-
-      while (true) {
-        List<String> files =
-            objectListing.getObjectSummaries().stream()
-                .map(S3ObjectSummary::getKey)
-                // This filtering is needed only for S3mock. Real S3 does not ignore the trailing
-                // '/' in the prefix.
-                .filter(s -> s.startsWith(prefix))
-                .collect(Collectors.toList());
-
-        entries.addAll(files);
-
-        if (objectListing.isTruncated()) {
-          objectListing = s3Client.listNextBatchOfObjects(objectListing);
-        } else {
-          break;
-        }
-      }
-      return entries;
-    } catch (AmazonClientException ase) {
-      throw handleAmazonException(ase);
+      ListObjectsV2Iterable objectListing =
+          s3Client.listObjectsV2Paginator(
+              builder -> builder.bucket(bucketName).prefix(prefix).build());
+
+      return objectListing.contents().stream()
+          .map(S3Object::key)
+          // This filtering is needed only for S3mock. Real S3 does not ignore the trailing
+          // '/' in the prefix.
+          .filter(s -> s.startsWith(prefix))
+          .collect(Collectors.toSet());
+    } catch (SdkException sdke) {
+      throw handleAmazonException(sdke);
     }
   }
 
@@ -536,31 +521,33 @@ public class S3StorageClient {
    * Best effort to handle Amazon exceptions as checked exceptions. Amazon exception are all
    * subclasses of {@link RuntimeException} so some may still be uncaught and propagated.
    */
-  static S3Exception handleAmazonException(AmazonClientException ace) {
+  static S3Exception handleAmazonException(SdkException sdke) {
 
-    if (ace instanceof AmazonServiceException) {
-      AmazonServiceException ase = (AmazonServiceException) ace;
+    if (sdke instanceof AwsServiceException) {
+      AwsServiceException ase = (AwsServiceException) sdke;
       String errMessage =
           String.format(
               Locale.ROOT,
               "An AmazonServiceException was thrown! [serviceName=%s] "
-                  + "[awsRequestId=%s] [httpStatus=%s] [s3ErrorCode=%s] [s3ErrorType=%s] [message=%s]",
-              ase.getServiceName(),
-              ase.getRequestId(),
-              ase.getStatusCode(),
-              ase.getErrorCode(),
-              ase.getErrorType(),
-              ase.getErrorMessage());
+                  + "[awsRequestId=%s] [httpStatus=%s] [s3ErrorCode=%s] [message=%s]",
+              ase.awsErrorDetails().serviceName(),
+              ase.requestId(),
+              ase.statusCode(),
+              ase.awsErrorDetails().errorCode(),
+              ase.awsErrorDetails().errorMessage());
 
       log.error(errMessage);
 
-      if (ase.getStatusCode() == 404 && NOT_FOUND_CODES.contains(ase.getErrorCode())) {
+      if (sdke instanceof NoSuchKeyException
+          || sdke instanceof NoSuchBucketException
+          || (ase.statusCode() == 404
+              && NOT_FOUND_CODES.contains(ase.awsErrorDetails().errorCode()))) {
         return new S3NotFoundException(errMessage, ase);
       } else {
         return new S3Exception(errMessage, ase);
       }
     }
 
-    return new S3Exception(ace);
+    return new S3Exception(sdke);
   }
 }
diff --git a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/AbstractS3ClientTest.java b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/AbstractS3ClientTest.java
index 79635b9..ec0eb03 100644
--- a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/AbstractS3ClientTest.java
+++ b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/AbstractS3ClientTest.java
@@ -17,7 +17,6 @@
 package org.apache.solr.s3;
 
 import com.adobe.testing.s3mock.junit4.S3MockRule;
-import com.amazonaws.services.s3.AmazonS3;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.charset.Charset;
@@ -44,8 +43,12 @@ public class AbstractS3ClientTest extends SolrTestCaseJ4 {
 
   @Before
   public void setUpClient() {
-    AmazonS3 s3 = S3_MOCK_RULE.createS3Client();
-    client = new S3StorageClient(s3, BUCKET_NAME);
+    System.setProperty("aws.accessKeyId", "foo");
+    System.setProperty("aws.secretAccessKey", "bar");
+
+    client =
+        new S3StorageClient(
+            BUCKET_NAME, "us-east-1", "", false, "http://localhost:" + S3_MOCK_RULE.getHttpPort());
   }
 
   @After
diff --git a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3BackupRepositoryTest.java b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3BackupRepositoryTest.java
index c7934be..978eda5 100644
--- a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3BackupRepositoryTest.java
+++ b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3BackupRepositoryTest.java
@@ -19,8 +19,6 @@ package org.apache.solr.s3;
 import static org.apache.solr.s3.S3BackupRepository.S3_SCHEME;
 
 import com.adobe.testing.s3mock.junit4.S3MockRule;
-import com.amazonaws.regions.Regions;
-import com.amazonaws.services.s3.AmazonS3;
 import com.google.common.base.Strings;
 import java.io.File;
 import java.io.IOException;
@@ -46,6 +44,9 @@ import org.junit.ClassRule;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
+import software.amazon.awssdk.core.sync.RequestBody;
+import software.amazon.awssdk.regions.Region;
+import software.amazon.awssdk.services.s3.S3Client;
 
 public class S3BackupRepositoryTest extends AbstractBackupRepositoryTest {
 
@@ -80,7 +81,7 @@ public class S3BackupRepositoryTest extends AbstractBackupRepositoryTest {
           S3_SCHEME,
           uri.getScheme());
       assertEquals("URI path should be prefixed with /", "/x", uri.getPath());
-      assertEquals("s3:/x", uri.toString());
+      assertEquals("s3:///x", uri.toString());
 
       URI directoryUri = repo.createDirectoryURI("d");
       assertEquals(
@@ -89,7 +90,7 @@ public class S3BackupRepositoryTest extends AbstractBackupRepositoryTest {
           directoryUri.getScheme());
       assertEquals(
           "createDirectoryURI should add a trailing slash to URI",
-          "s3:/d/",
+          "s3:///d/",
           directoryUri.toString());
 
       repo.createDirectory(directoryUri);
@@ -97,8 +98,13 @@ public class S3BackupRepositoryTest extends AbstractBackupRepositoryTest {
       directoryUri = repo.createDirectoryURI("d/");
       assertEquals(
           "createDirectoryURI should have a single trailing slash, even if one is provided",
-          "s3:/d/",
+          "s3:///d/",
           directoryUri.toString());
+
+      assertEquals(
+          "createDirectoryURI should have a single trailing slash, even if one is provided",
+          "s3:///this_is_not_a_host/",
+          repo.createURI("/this_is_not_a_host/").toString());
     }
   }
 
@@ -302,6 +308,9 @@ public class S3BackupRepositoryTest extends AbstractBackupRepositoryTest {
 
   @Override
   protected S3BackupRepository getRepository() {
+    System.setProperty("aws.accessKeyId", "foo");
+    System.setProperty("aws.secretAccessKey", "bar");
+
     NamedList<Object> args = getBaseBackupRepositoryConfiguration();
 
     S3BackupRepository repo = new S3BackupRepository();
@@ -318,30 +327,24 @@ public class S3BackupRepositoryTest extends AbstractBackupRepositoryTest {
   @Override
   protected NamedList<Object> getBaseBackupRepositoryConfiguration() {
     NamedList<Object> args = new NamedList<>();
-    args.add(S3BackupRepositoryConfig.REGION, Regions.US_EAST_1.name());
+    args.add(S3BackupRepositoryConfig.REGION, Region.US_EAST_1.id());
     args.add(S3BackupRepositoryConfig.BUCKET_NAME, BUCKET_NAME);
     args.add(S3BackupRepositoryConfig.ENDPOINT, "http://localhost:" + S3_MOCK_RULE.getHttpPort());
     return args;
   }
 
   private void pushObject(String path, String content) {
-    AmazonS3 s3 = S3_MOCK_RULE.createS3Client();
-    try {
-      s3.putObject(BUCKET_NAME, path, content);
-    } finally {
-      s3.shutdown();
+    try (S3Client s3 = S3_MOCK_RULE.createS3ClientV2()) {
+      s3.putObject(b -> b.bucket(BUCKET_NAME).key(path), RequestBody.fromString(content));
     }
   }
 
   private File pullObject(String path) throws IOException {
-    AmazonS3 s3 = S3_MOCK_RULE.createS3Client();
-    try {
+    try (S3Client s3 = S3_MOCK_RULE.createS3ClientV2()) {
       File file = temporaryFolder.newFile();
-      InputStream input = s3.getObject(BUCKET_NAME, path).getObjectContent();
+      InputStream input = s3.getObject(b -> b.bucket(BUCKET_NAME).key(path));
       FileUtils.copyInputStreamToFile(input, file);
       return file;
-    } finally {
-      s3.shutdown();
     }
   }
 }
diff --git a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3IncrementalBackupTest.java b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3IncrementalBackupTest.java
index c784647..befcad3 100644
--- a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3IncrementalBackupTest.java
+++ b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3IncrementalBackupTest.java
@@ -18,7 +18,6 @@
 package org.apache.solr.s3;
 
 import com.adobe.testing.s3mock.junit4.S3MockRule;
-import com.amazonaws.regions.Regions;
 import java.lang.invoke.MethodHandles;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.cloud.api.collections.AbstractIncrementalBackupTest;
@@ -26,6 +25,7 @@ import org.junit.BeforeClass;
 import org.junit.ClassRule;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import software.amazon.awssdk.regions.Region;
 
 @LuceneTestCase.SuppressCodecs({
   "SimpleText"
@@ -90,13 +90,14 @@ public class S3IncrementalBackupTest extends AbstractIncrementalBackupTest {
   @BeforeClass
   public static void setupClass() throws Exception {
     System.setProperty("aws.accessKeyId", "foo");
-    System.setProperty("aws.secretKey", "bar");
+    System.setProperty("aws.secretAccessKey", "bar");
+
     configureCluster(NUM_SHARDS) // nodes
         .addConfig("conf1", getFile("conf/solrconfig.xml").getParentFile().toPath())
         .withSolrXml(
             SOLR_XML
                 .replace("BUCKET", BUCKET_NAME)
-                .replace("REGION", Regions.US_EAST_1.getName())
+                .replace("REGION", Region.US_EAST_1.id())
                 .replace("ENDPOINT", "http://localhost:" + S3_MOCK_RULE.getHttpPort()))
         .configure();
   }
diff --git a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3IndexInputTest.java b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3IndexInputTest.java
index cefae1a..ef3cbb8 100644
--- a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3IndexInputTest.java
+++ b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3IndexInputTest.java
@@ -67,38 +67,38 @@ public class S3IndexInputTest extends SolrTestCaseJ4 {
     File file = new File(tmp, "content");
     FileUtils.write(file, content, StandardCharsets.UTF_8);
 
-    SliceInputStream slicedStream = new SliceInputStream(new FileInputStream(file), slice);
-    S3IndexInput input = new S3IndexInput(slicedStream, "path", file.length());
-
-    // Now read the file
-    ByteBuffer buffer;
-    if (directBuffer) {
-      buffer = ByteBuffer.allocateDirect((int) file.length());
-    } else {
-      buffer = ByteBuffer.allocate((int) file.length());
+    try (SliceInputStream slicedStream = new SliceInputStream(new FileInputStream(file), slice);
+         S3IndexInput input = new S3IndexInput(slicedStream, "path", file.length())) {
+
+      // Now read the file
+      ByteBuffer buffer;
+      if (directBuffer) {
+        buffer = ByteBuffer.allocateDirect((int) file.length());
+      } else {
+        buffer = ByteBuffer.allocate((int) file.length());
+      }
+      input.readInternal(buffer);
+
+      // Check the buffer content, in a way that works for both heap and direct buffers
+      buffer.position(0);
+      byte[] bytes = new byte[buffer.remaining()];
+      buffer.get(bytes);
+      assertEquals(content, new String(bytes, Charset.defaultCharset()));
+
+      // Ensure we actually made many calls
+      int expectedReadCount;
+      if (directBuffer) {
+        // For direct buffer, we may be caped by internal buffer if it's smaller than the size
+        // defined by the test
+        expectedReadCount = content.length() / Math.min(slice, S3IndexInput.LOCAL_BUFFER_SIZE) + 1;
+      } else {
+        expectedReadCount = content.length() / slice + 1;
+      }
+      assertEquals(
+          "S3IndexInput did an unexpected number of reads",
+          expectedReadCount,
+          slicedStream.readCount);
     }
-    input.readInternal(buffer);
-    input.close();
-
-    // Check the buffer content, in a way that works for both heap and direct buffers
-    buffer.position(0);
-    byte[] bytes = new byte[buffer.remaining()];
-    buffer.get(bytes);
-    assertEquals(content, new String(bytes, Charset.defaultCharset()));
-
-    // Ensure we actually made many calls
-    int expectedReadCount;
-    if (directBuffer) {
-      // For direct buffer, we may be caped by internal buffer if it's smaller than the size defined
-      // by the test
-      expectedReadCount = content.length() / Math.min(slice, S3IndexInput.LOCAL_BUFFER_SIZE) + 1;
-    } else {
-      expectedReadCount = content.length() / slice + 1;
-    }
-    assertEquals(
-        "S3IndexInput did an unexpected number of reads",
-        expectedReadCount,
-        slicedStream.readCount);
   }
 
   /** Input stream that reads, but not too much in a single call. */
@@ -126,5 +126,11 @@ public class S3IndexInputTest extends SolrTestCaseJ4 {
       int slicedLength = Math.min(slice, length);
       return super.read(b, off, slicedLength);
     }
+
+    @Override
+    public void close() throws IOException {
+      input.close();
+      super.close();
+    }
   }
 }
diff --git a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3OutputStreamTest.java b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3OutputStreamTest.java
index 52f9fb2..861c935 100644
--- a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3OutputStreamTest.java
+++ b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3OutputStreamTest.java
@@ -17,7 +17,6 @@
 package org.apache.solr.s3;
 
 import com.adobe.testing.s3mock.junit4.S3MockRule;
-import com.amazonaws.services.s3.AmazonS3;
 import java.io.IOException;
 import java.io.InputStream;
 import java.nio.charset.Charset;
@@ -28,6 +27,7 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.ClassRule;
 import org.junit.Test;
+import software.amazon.awssdk.services.s3.S3Client;
 
 public class S3OutputStreamTest extends SolrTestCaseJ4 {
 
@@ -41,16 +41,16 @@ public class S3OutputStreamTest extends SolrTestCaseJ4 {
           .withProperty("server.jetty.threads.idle-timeout", "3s")
           .build();
 
-  private AmazonS3 s3;
+  private S3Client s3;
 
   @Before
   public void setUpClient() {
-    s3 = S3_MOCK_RULE.createS3Client();
+    s3 = S3_MOCK_RULE.createS3ClientV2();
   }
 
   @After
   public void tearDownClient() {
-    s3.shutdown();
+    s3.close();
   }
 
   /**
@@ -59,58 +59,58 @@ public class S3OutputStreamTest extends SolrTestCaseJ4 {
    */
   @Test
   public void testWriteByteByByte() throws IOException {
-
-    S3OutputStream output = new S3OutputStream(s3, "byte-by-byte", BUCKET);
-    output.write('h');
-    output.write('e');
-    output.write('l');
-    output.write('l');
-    output.write('o');
-    output.close();
+    try (S3OutputStream output = new S3OutputStream(s3, "byte-by-byte", BUCKET)) {
+      output.write('h');
+      output.write('e');
+      output.write('l');
+      output.write('l');
+      output.write('o');
+    }
 
     // Check we can re-read same content
-    InputStream input = s3.getObject(BUCKET, "byte-by-byte").getObjectContent();
-    String read = IOUtils.toString(input, Charset.defaultCharset());
-    assertEquals("Contents saved to S3 file did not match expected", "hello", read);
+    try (InputStream input = s3.getObject(b -> b.bucket(BUCKET).key("byte-by-byte"))) {
+      String read = IOUtils.toString(input, Charset.defaultCharset());
+      assertEquals("Contents saved to S3 file did not match expected", "hello", read);
+    }
   }
 
   /** Write a small byte array, which is smaller than S3 part size. */
   @Test
   public void testWriteSmallBuffer() throws IOException {
-
     // must be smaller than S3 part size
     byte[] buffer = "hello".getBytes(Charset.defaultCharset());
     // pre-check -- ensure that our test string isn't too big
     assertTrue(buffer.length < S3OutputStream.PART_SIZE);
 
-    S3OutputStream output = new S3OutputStream(s3, "small-buffer", BUCKET);
-    output.write(buffer);
-    output.close();
+    try (S3OutputStream output = new S3OutputStream(s3, "small-buffer", BUCKET)) {
+      output.write(buffer);
+    }
 
     // Check we can re-read same content
-    InputStream input = s3.getObject(BUCKET, "small-buffer").getObjectContent();
-    String read = IOUtils.toString(input, Charset.defaultCharset());
-    assertEquals("hello", read);
+    try (InputStream input = s3.getObject(b -> b.bucket(BUCKET).key("small-buffer"))) {
+      String read = IOUtils.toString(input, Charset.defaultCharset());
+      assertEquals("hello", read);
+    }
   }
 
   /** Write a byte array larger than S3 part size. Simulate a real multi-part upload. */
   @Test
   public void testWriteLargeBuffer() throws IOException {
-
     // must be larger than S3 part size
     String content = RandomStringUtils.randomAlphanumeric(S3OutputStream.PART_SIZE + 1024);
     byte[] buffer = content.getBytes(Charset.defaultCharset());
     // pre-check -- ensure that our test string isn't too small
     assertTrue(buffer.length > S3OutputStream.PART_SIZE);
 
-    S3OutputStream output = new S3OutputStream(s3, "large-buffer", BUCKET);
-    output.write(buffer);
-    output.close();
+    try (S3OutputStream output = new S3OutputStream(s3, "large-buffer", BUCKET)) {
+      output.write(buffer);
+    }
 
     // Check we can re-read same content
-    InputStream input = s3.getObject(BUCKET, "large-buffer").getObjectContent();
-    String read = IOUtils.toString(input, Charset.defaultCharset());
-    assertEquals(new String(buffer, Charset.defaultCharset()), read);
+    try (InputStream input = s3.getObject(b -> b.bucket(BUCKET).key("large-buffer"))) {
+      String read = IOUtils.toString(input, Charset.defaultCharset());
+      assertEquals(new String(buffer, Charset.defaultCharset()), read);
+    }
   }
 
   /** Check flush is a no-op if data size is lower than required size of S3 part. */
@@ -120,21 +120,22 @@ public class S3OutputStreamTest extends SolrTestCaseJ4 {
     byte[] buffer = "hello".getBytes(Charset.defaultCharset());
     assertTrue(buffer.length < S3OutputStream.PART_SIZE);
 
-    S3OutputStream output = new S3OutputStream(s3, "flush-small", BUCKET);
-    output.write(buffer);
-    output.flush();
+    try (S3OutputStream output = new S3OutputStream(s3, "flush-small", BUCKET)) {
+      output.write(buffer);
+      output.flush();
 
-    buffer = ", world!".getBytes(Charset.defaultCharset());
-    output.write(buffer);
-    output.close();
+      buffer = ", world!".getBytes(Charset.defaultCharset());
+      output.write(buffer);
+    }
 
     // Check we can re-read same content
-    InputStream input = s3.getObject(BUCKET, "flush-small").getObjectContent();
-    String read = IOUtils.toString(input, Charset.defaultCharset());
-    assertEquals(
-        "Flushing a small frame of an S3OutputStream should not impact data written",
-        "hello, world!",
-        read);
+    try (InputStream input = s3.getObject(b -> b.bucket(BUCKET).key("flush-small"))) {
+      String read = IOUtils.toString(input, Charset.defaultCharset());
+      assertEquals(
+          "Flushing a small frame of an S3OutputStream should not impact data written",
+          "hello, world!",
+          read);
+    }
   }
 
   /** Check flush is happening when data in buffer is larger than S3 minimal part size. */
@@ -145,20 +146,21 @@ public class S3OutputStreamTest extends SolrTestCaseJ4 {
     byte[] buffer = content.getBytes(Charset.defaultCharset());
     assertTrue(buffer.length > S3OutputStream.MIN_PART_SIZE);
 
-    S3OutputStream output = new S3OutputStream(s3, "flush-large", BUCKET);
-    output.write(buffer);
-    output.flush();
+    try (S3OutputStream output = new S3OutputStream(s3, "flush-large", BUCKET)) {
+      output.write(buffer);
+      output.flush();
 
-    buffer = "some more".getBytes(Charset.defaultCharset());
-    output.write(buffer);
-    output.close();
+      buffer = "some more".getBytes(Charset.defaultCharset());
+      output.write(buffer);
+    }
 
     // Check we can re-read same content
-    InputStream input = s3.getObject(BUCKET, "flush-large").getObjectContent();
-    String read = IOUtils.toString(input, Charset.defaultCharset());
-    assertEquals(
-        "Flushing a large frame of an S3OutputStream should not impact data written",
-        content + "some more",
-        read);
+    try (InputStream input = s3.getObject(b -> b.bucket(BUCKET).key("flush-large"))) {
+      String read = IOUtils.toString(input, Charset.defaultCharset());
+      assertEquals(
+          "Flushing a large frame of an S3OutputStream should not impact data written",
+          content + "some more",
+          read);
+    }
   }
 }
diff --git a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3PathsTest.java b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3PathsTest.java
index efba913..127aa9c 100644
--- a/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3PathsTest.java
+++ b/solr/contrib/s3-repository/src/test/org/apache/solr/s3/S3PathsTest.java
@@ -70,7 +70,7 @@ public class S3PathsTest extends AbstractS3ClientTest {
 
     client.deleteDirectory("/delete-dir");
 
-    assertFalse("dir should no longer exist after deletion", client.pathExists("/delete-dir"));
+    assertFalse("dir should no longer exist after deletion", client.pathExists("/delete-dir/"));
     assertFalse(
         "files in dir should be recursively deleted", client.pathExists("/delete-dir/file1"));
     assertFalse(
@@ -93,11 +93,11 @@ public class S3PathsTest extends AbstractS3ClientTest {
     client.deleteDirectory("/delete-dir");
 
     // All files and subdirs in /delete-dir should no longer exist
-    assertFalse(client.pathExists("/delete-dir"));
+    assertFalse(client.pathExists("/delete-dir/"));
     assertFalse(client.pathExists("/delete-dir/file1"));
-    assertFalse(client.pathExists("/delete-dir/sub-dir1"));
+    assertFalse(client.pathExists("/delete-dir/sub-dir1/"));
     assertFalse(client.pathExists("/delete-dir/sub-dir1/file2"));
-    assertFalse(client.pathExists("/delete-dir/sub-dir1/sub-dir2"));
+    assertFalse(client.pathExists("/delete-dir/sub-dir1/sub-dir2/"));
     assertFalse(client.pathExists("/delete-dir/sub-dir1/sub-dir2/file3"));
   }
 
diff --git a/solr/core/src/java/org/apache/solr/core/NodeConfig.java b/solr/core/src/java/org/apache/solr/core/NodeConfig.java
index fd9f8db..06620b4 100644
--- a/solr/core/src/java/org/apache/solr/core/NodeConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/NodeConfig.java
@@ -341,7 +341,8 @@ public class NodeConfig {
         "zkDigestPassword",
         "zkDigestReadonlyPassword",
         "aws.secretKey", // AWS SDK v1
-        "aws.secretAccessKey" // AWS SDK v2
+        "aws.secretAccessKey", // AWS SDK v2
+        "http.proxyPassword"
     ));
 
     public NodeConfigBuilder(String nodeName, Path solrHome) {
diff --git a/solr/licenses/jaxb-api-2.3.1.jar.sha1 b/solr/licenses/jaxb-api-2.3.1.jar.sha1
deleted file mode 100644
index 2f6376f..0000000
--- a/solr/licenses/jaxb-api-2.3.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-8531ad5ac454cc2deb9d4d32c40c4d7451939b5d
diff --git a/solr/licenses/jaxb-api-LICENSE-CDDL.txt b/solr/licenses/jaxb-api-LICENSE-CDDL.txt
deleted file mode 100644
index 4a00ba9..0000000
--- a/solr/licenses/jaxb-api-LICENSE-CDDL.txt
+++ /dev/null
@@ -1,362 +0,0 @@
-COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.1
-
-1. Definitions.
-
-    1.1. "Contributor" means each individual or entity that creates or
-    contributes to the creation of Modifications.
-
-    1.2. "Contributor Version" means the combination of the Original
-    Software, prior Modifications used by a Contributor (if any), and
-    the Modifications made by that particular Contributor.
-
-    1.3. "Covered Software" means (a) the Original Software, or (b)
-    Modifications, or (c) the combination of files containing Original
-    Software with files containing Modifications, in each case including
-    portions thereof.
-
-    1.4. "Executable" means the Covered Software in any form other than
-    Source Code.
-
-    1.5. "Initial Developer" means the individual or entity that first
-    makes Original Software available under this License.
-
-    1.6. "Larger Work" means a work which combines Covered Software or
-    portions thereof with code not governed by the terms of this License.
-
-    1.7. "License" means this document.
-
-    1.8. "Licensable" means having the right to grant, to the maximum
-    extent possible, whether at the time of the initial grant or
-    subsequently acquired, any and all of the rights conveyed herein.
-
-    1.9. "Modifications" means the Source Code and Executable form of
-    any of the following:
-
-    A. Any file that results from an addition to, deletion from or
-    modification of the contents of a file containing Original Software
-    or previous Modifications;
-
-    B. Any new file that contains any part of the Original Software or
-    previous Modification; or
-
-    C. Any new file that is contributed or otherwise made available
-    under the terms of this License.
-
-    1.10. "Original Software" means the Source Code and Executable form
-    of computer software code that is originally released under this
-    License.
-
-    1.11. "Patent Claims" means any patent claim(s), now owned or
-    hereafter acquired, including without limitation, method, process,
-    and apparatus claims, in any patent Licensable by grantor.
-
-    1.12. "Source Code" means (a) the common form of computer software
-    code in which modifications are made and (b) associated
-    documentation included in or with such code.
-
-    1.13. "You" (or "Your") means an individual or a legal entity
-    exercising rights under, and complying with all of the terms of,
-    this License. For legal entities, "You" includes any entity which
-    controls, is controlled by, or is under common control with You. For
-    purposes of this definition, "control" means (a) the power, direct
-    or indirect, to cause the direction or management of such entity,
-    whether by contract or otherwise, or (b) ownership of more than
-    fifty percent (50%) of the outstanding shares or beneficial
-    ownership of such entity.
-
-2. License Grants.
-
-    2.1. The Initial Developer Grant.
-
-    Conditioned upon Your compliance with Section 3.1 below and subject
-    to third party intellectual property claims, the Initial Developer
-    hereby grants You a world-wide, royalty-free, non-exclusive license:
-
-    (a) under intellectual property rights (other than patent or
-    trademark) Licensable by Initial Developer, to use, reproduce,
-    modify, display, perform, sublicense and distribute the Original
-    Software (or portions thereof), with or without Modifications,
-    and/or as part of a Larger Work; and
-
-    (b) under Patent Claims infringed by the making, using or selling of
-    Original Software, to make, have made, use, practice, sell, and
-    offer for sale, and/or otherwise dispose of the Original Software
-    (or portions thereof).
-
-    (c) The licenses granted in Sections 2.1(a) and (b) are effective on
-    the date Initial Developer first distributes or otherwise makes the
-    Original Software available to a third party under the terms of this
-    License.
-
-    (d) Notwithstanding Section 2.1(b) above, no patent license is
-    granted: (1) for code that You delete from the Original Software, or
-    (2) for infringements caused by: (i) the modification of the
-    Original Software, or (ii) the combination of the Original Software
-    with other software or devices.
-
-    2.2. Contributor Grant.
-
-    Conditioned upon Your compliance with Section 3.1 below and subject
-    to third party intellectual property claims, each Contributor hereby
-    grants You a world-wide, royalty-free, non-exclusive license:
-
-    (a) under intellectual property rights (other than patent or
-    trademark) Licensable by Contributor to use, reproduce, modify,
-    display, perform, sublicense and distribute the Modifications
-    created by such Contributor (or portions thereof), either on an
-    unmodified basis, with other Modifications, as Covered Software
-    and/or as part of a Larger Work; and
-
-    (b) under Patent Claims infringed by the making, using, or selling
-    of Modifications made by that Contributor either alone and/or in
-    combination with its Contributor Version (or portions of such
-    combination), to make, use, sell, offer for sale, have made, and/or
-    otherwise dispose of: (1) Modifications made by that Contributor (or
-    portions thereof); and (2) the combination of Modifications made by
-    that Contributor with its Contributor Version (or portions of such
-    combination).
-
-    (c) The licenses granted in Sections 2.2(a) and 2.2(b) are effective
-    on the date Contributor first distributes or otherwise makes the
-    Modifications available to a third party.
-
-    (d) Notwithstanding Section 2.2(b) above, no patent license is
-    granted: (1) for any code that Contributor has deleted from the
-    Contributor Version; (2) for infringements caused by: (i) third
-    party modifications of Contributor Version, or (ii) the combination
-    of Modifications made by that Contributor with other software
-    (except as part of the Contributor Version) or other devices; or (3)
-    under Patent Claims infringed by Covered Software in the absence of
-    Modifications made by that Contributor.
-
-3. Distribution Obligations.
-
-    3.1. Availability of Source Code.
-
-    Any Covered Software that You distribute or otherwise make available
-    in Executable form must also be made available in Source Code form
-    and that Source Code form must be distributed only under the terms
-    of this License. You must include a copy of this License with every
-    copy of the Source Code form of the Covered Software You distribute
-    or otherwise make available. You must inform recipients of any such
-    Covered Software in Executable form as to how they can obtain such
-    Covered Software in Source Code form in a reasonable manner on or
-    through a medium customarily used for software exchange.
-
-    3.2. Modifications.
-
-    The Modifications that You create or to which You contribute are
-    governed by the terms of this License. You represent that You
-    believe Your Modifications are Your original creation(s) and/or You
-    have sufficient rights to grant the rights conveyed by this License.
-
-    3.3. Required Notices.
-
-    You must include a notice in each of Your Modifications that
-    identifies You as the Contributor of the Modification. You may not
-    remove or alter any copyright, patent or trademark notices contained
-    within the Covered Software, or any notices of licensing or any
-    descriptive text giving attribution to any Contributor or the
-    Initial Developer.
-
-    3.4. Application of Additional Terms.
-
-    You may not offer or impose any terms on any Covered Software in
-    Source Code form that alters or restricts the applicable version of
-    this License or the recipients' rights hereunder. You may choose to
-    offer, and to charge a fee for, warranty, support, indemnity or
-    liability obligations to one or more recipients of Covered Software.
-    However, you may do so only on Your own behalf, and not on behalf of
-    the Initial Developer or any Contributor. You must make it
-    absolutely clear that any such warranty, support, indemnity or
-    liability obligation is offered by You alone, and You hereby agree
-    to indemnify the Initial Developer and every Contributor for any
-    liability incurred by the Initial Developer or such Contributor as a
-    result of warranty, support, indemnity or liability terms You offer.
-
-    3.5. Distribution of Executable Versions.
-
-    You may distribute the Executable form of the Covered Software under
-    the terms of this License or under the terms of a license of Your
-    choice, which may contain terms different from this License,
-    provided that You are in compliance with the terms of this License
-    and that the license for the Executable form does not attempt to
-    limit or alter the recipient's rights in the Source Code form from
-    the rights set forth in this License. If You distribute the Covered
-    Software in Executable form under a different license, You must make
-    it absolutely clear that any terms which differ from this License
-    are offered by You alone, not by the Initial Developer or
-    Contributor. You hereby agree to indemnify the Initial Developer and
-    every Contributor for any liability incurred by the Initial
-    Developer or such Contributor as a result of any such terms You offer.
-
-    3.6. Larger Works.
-
-    You may create a Larger Work by combining Covered Software with
-    other code not governed by the terms of this License and distribute
-    the Larger Work as a single product. In such a case, You must make
-    sure the requirements of this License are fulfilled for the Covered
-    Software.
-
-4. Versions of the License.
-
-    4.1. New Versions.
-
-    Oracle is the initial license steward and may publish revised and/or
-    new versions of this License from time to time. Each version will be
-    given a distinguishing version number. Except as provided in Section
-    4.3, no one other than the license steward has the right to modify
-    this License.
-
-    4.2. Effect of New Versions.
-
-    You may always continue to use, distribute or otherwise make the
-    Covered Software available under the terms of the version of the
-    License under which You originally received the Covered Software. If
-    the Initial Developer includes a notice in the Original Software
-    prohibiting it from being distributed or otherwise made available
-    under any subsequent version of the License, You must distribute and
-    make the Covered Software available under the terms of the version
-    of the License under which You originally received the Covered
-    Software. Otherwise, You may also choose to use, distribute or
-    otherwise make the Covered Software available under the terms of any
-    subsequent version of the License published by the license steward.
-
-    4.3. Modified Versions.
-
-    When You are an Initial Developer and You want to create a new
-    license for Your Original Software, You may create and use a
-    modified version of this License if You: (a) rename the license and
-    remove any references to the name of the license steward (except to
-    note that the license differs from this License); and (b) otherwise
-    make it clear that the license contains terms which differ from this
-    License.
-
-5. DISCLAIMER OF WARRANTY.
-
-    COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS,
-    WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED,
-    INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED SOFTWARE
-    IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE OR
-    NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF
-    THE COVERED SOFTWARE IS WITH YOU. SHOULD ANY COVERED SOFTWARE PROVE
-    DEFECTIVE IN ANY RESPECT, YOU (NOT THE INITIAL DEVELOPER OR ANY
-    OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY SERVICING,
-    REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN
-    ESSENTIAL PART OF THIS LICENSE. NO USE OF ANY COVERED SOFTWARE IS
-    AUTHORIZED HEREUNDER EXCEPT UNDER THIS DISCLAIMER.
-
-6. TERMINATION.
-
-    6.1. This License and the rights granted hereunder will terminate
-    automatically if You fail to comply with terms herein and fail to
-    cure such breach within 30 days of becoming aware of the breach.
-    Provisions which, by their nature, must remain in effect beyond the
-    termination of this License shall survive.
-
-    6.2. If You assert a patent infringement claim (excluding
-    declaratory judgment actions) against Initial Developer or a
-    Contributor (the Initial Developer or Contributor against whom You
-    assert such claim is referred to as "Participant") alleging that the
-    Participant Software (meaning the Contributor Version where the
-    Participant is a Contributor or the Original Software where the
-    Participant is the Initial Developer) directly or indirectly
-    infringes any patent, then any and all rights granted directly or
-    indirectly to You by such Participant, the Initial Developer (if the
-    Initial Developer is not the Participant) and all Contributors under
-    Sections 2.1 and/or 2.2 of this License shall, upon 60 days notice
-    from Participant terminate prospectively and automatically at the
-    expiration of such 60 day notice period, unless if within such 60
-    day period You withdraw Your claim with respect to the Participant
-    Software against such Participant either unilaterally or pursuant to
-    a written agreement with Participant.
-
-    6.3. If You assert a patent infringement claim against Participant
-    alleging that the Participant Software directly or indirectly
-    infringes any patent where such claim is resolved (such as by
-    license or settlement) prior to the initiation of patent
-    infringement litigation, then the reasonable value of the licenses
-    granted by such Participant under Sections 2.1 or 2.2 shall be taken
-    into account in determining the amount or value of any payment or
-    license.
-
-    6.4. In the event of termination under Sections 6.1 or 6.2 above,
-    all end user licenses that have been validly granted by You or any
-    distributor hereunder prior to termination (excluding licenses
-    granted to You by any distributor) shall survive termination.
-
-7. LIMITATION OF LIABILITY.
-
-    UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT
-    (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE
-    INITIAL DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF
-    COVERED SOFTWARE, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE LIABLE
-    TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR
-    CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT
-    LIMITATION, DAMAGES FOR LOSS OF GOODWILL, WORK STOPPAGE, COMPUTER
-    FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER COMMERCIAL DAMAGES OR
-    LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN INFORMED OF THE
-    POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF LIABILITY SHALL NOT
-    APPLY TO LIABILITY FOR DEATH OR PERSONAL INJURY RESULTING FROM SUCH
-    PARTY'S NEGLIGENCE TO THE EXTENT APPLICABLE LAW PROHIBITS SUCH
-    LIMITATION. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OR
-    LIMITATION OF INCIDENTAL OR CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION
-    AND LIMITATION MAY NOT APPLY TO YOU.
-
-8. U.S. GOVERNMENT END USERS.
-
-    The Covered Software is a "commercial item," as that term is defined
-    in 48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial computer
-    software" (as that term is defined at 48 C.F.R. ยง
-    252.227-7014(a)(1)) and "commercial computer software documentation"
-    as such terms are used in 48 C.F.R. 12.212 (Sept. 1995). Consistent
-    with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4
-    (June 1995), all U.S. Government End Users acquire Covered Software
-    with only those rights set forth herein. This U.S. Government Rights
-    clause is in lieu of, and supersedes, any other FAR, DFAR, or other
-    clause or provision that addresses Government rights in computer
-    software under this License.
-
-9. MISCELLANEOUS.
-
-    This License represents the complete agreement concerning subject
-    matter hereof. If any provision of this License is held to be
-    unenforceable, such provision shall be reformed only to the extent
-    necessary to make it enforceable. This License shall be governed by
-    the law of the jurisdiction specified in a notice contained within
-    the Original Software (except to the extent applicable law, if any,
-    provides otherwise), excluding such jurisdiction's conflict-of-law
-    provisions. Any litigation relating to this License shall be subject
-    to the jurisdiction of the courts located in the jurisdiction and
-    venue specified in a notice contained within the Original Software,
-    with the losing party responsible for costs, including, without
-    limitation, court costs and reasonable attorneys' fees and expenses.
-    The application of the United Nations Convention on Contracts for
-    the International Sale of Goods is expressly excluded. Any law or
-    regulation which provides that the language of a contract shall be
-    construed against the drafter shall not apply to this License. You
-    agree that You alone are responsible for compliance with the United
-    States export administration regulations (and the export control
-    laws and regulation of any other countries) when You use, distribute
-    or otherwise make available any Covered Software.
-
-10. RESPONSIBILITY FOR CLAIMS.
-
-    As between Initial Developer and the Contributors, each party is
-    responsible for claims and damages arising, directly or indirectly,
-    out of its utilization of rights under this License and You agree to
-    work with Initial Developer and Contributors to distribute such
-    responsibility on an equitable basis. Nothing herein is intended or
-    shall be deemed to constitute any admission of liability.
-
-------------------------------------------------------------------------
-
-NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND DISTRIBUTION
-LICENSE (CDDL)
-
-The code released under the CDDL shall be governed by the laws of the
-State of California (excluding conflict-of-law provisions). Any
-litigation relating to this License shall be subject to the jurisdiction
-of the Federal Courts of the Northern District of California and the
-state courts of the State of California, with venue lying in Santa Clara
-County, California.
diff --git a/solr/licenses/jaxb-api-NOTICE.txt b/solr/licenses/jaxb-api-NOTICE.txt
deleted file mode 100644
index 8b13789..0000000
--- a/solr/licenses/jaxb-api-NOTICE.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/solr/licenses/netty-nio-client-2.16.93.jar.sha1 b/solr/licenses/netty-nio-client-2.16.93.jar.sha1
deleted file mode 100644
index cb925fe..0000000
--- a/solr/licenses/netty-nio-client-2.16.93.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4f7e2d63236c77e0ce93e2ffa8c477acb68d0193
diff --git a/solr/licenses/netty-nio-client-LICENSE-ASL.txt b/solr/licenses/netty-nio-client-LICENSE-ASL.txt
deleted file mode 100644
index 1eef70a..0000000
--- a/solr/licenses/netty-nio-client-LICENSE-ASL.txt
+++ /dev/null
@@ -1,206 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-   Note: Other license terms may apply to certain, identified software files contained within or distributed
-   with the accompanying software if such terms are included in the directory containing the accompanying software.
-   Such other license terms will then apply in lieu of the terms of the software license above.
diff --git a/solr/licenses/netty-nio-client-NOTICE.txt b/solr/licenses/netty-nio-client-NOTICE.txt
deleted file mode 100644
index 7b5a068..0000000
--- a/solr/licenses/netty-nio-client-NOTICE.txt
+++ /dev/null
@@ -1,25 +0,0 @@
-AWS SDK for Java 2.0
-Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
-
-This product includes software developed by
-Amazon Technologies, Inc (http://www.amazon.com/).
-
-**********************
-THIRD PARTY COMPONENTS
-**********************
-This software includes third party software subject to the following copyrights:
-- XML parsing and utility functions from JetS3t - Copyright 2006-2009 James Murty.
-- PKCS#1 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc.
-- Apache Commons Lang - https://github.com/apache/commons-lang
-- Netty Reactive Streams - https://github.com/playframework/netty-reactive-streams
-- Jackson-core - https://github.com/FasterXML/jackson-core
-- Jackson-dataformat-cbor - https://github.com/FasterXML/jackson-dataformats-binary
-
-The licenses for these third party components are included in LICENSE.txt
-
-- For Apache Commons Lang see also this required NOTICE:
-  Apache Commons Lang
-  Copyright 2001-2020 The Apache Software Foundation
-  
-  This product includes software developed at
-  The Apache Software Foundation (https://www.apache.org/).
diff --git a/solr/licenses/netty-reactive-streams-2.0.5.jar.sha1 b/solr/licenses/netty-reactive-streams-2.0.5.jar.sha1
deleted file mode 100644
index 2dd6a05..0000000
--- a/solr/licenses/netty-reactive-streams-2.0.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4e829ba519e1e63cb00567c82aca7d48312b32cb
diff --git a/solr/licenses/netty-reactive-streams-LICENSE-ASL.txt b/solr/licenses/netty-reactive-streams-LICENSE-ASL.txt
deleted file mode 100644
index d645695..0000000
--- a/solr/licenses/netty-reactive-streams-LICENSE-ASL.txt
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/solr/licenses/netty-reactive-streams-NOTICE.txt b/solr/licenses/netty-reactive-streams-NOTICE.txt
deleted file mode 100644
index 8b13789..0000000
--- a/solr/licenses/netty-reactive-streams-NOTICE.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/solr/licenses/netty-reactive-streams-http-2.0.5.jar.sha1 b/solr/licenses/netty-reactive-streams-http-2.0.5.jar.sha1
deleted file mode 100644
index 5751260..0000000
--- a/solr/licenses/netty-reactive-streams-http-2.0.5.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5148eda52181bf504eaf61015f6cf0f39ea22136
diff --git a/solr/licenses/netty-reactive-streams-http-LICENSE-ASL.txt b/solr/licenses/netty-reactive-streams-http-LICENSE-ASL.txt
deleted file mode 100644
index d645695..0000000
--- a/solr/licenses/netty-reactive-streams-http-LICENSE-ASL.txt
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/solr/licenses/netty-reactive-streams-http-NOTICE.txt b/solr/licenses/netty-reactive-streams-http-NOTICE.txt
deleted file mode 100644
index 8b13789..0000000
--- a/solr/licenses/netty-reactive-streams-http-NOTICE.txt
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/solr/licenses/reactive-streams-1.0.3.jar.sha1 b/solr/licenses/reactive-streams-1.0.3.jar.sha1
new file mode 100644
index 0000000..b0ab77d
--- /dev/null
+++ b/solr/licenses/reactive-streams-1.0.3.jar.sha1
@@ -0,0 +1 @@
+d9fb7a7926ffa635b3dcaa5049fb2bfa25b3e7d0
diff --git a/solr/licenses/reactive-streams-LICENSE-PD.txt b/solr/licenses/reactive-streams-LICENSE-PD.txt
new file mode 100644
index 0000000..696f2c0
--- /dev/null
+++ b/solr/licenses/reactive-streams-LICENSE-PD.txt
@@ -0,0 +1,8 @@
+Licensed under Public Domain (CC0)
+
+To the extent possible under law, the person who associated CC0 with
+this code has waived all copyright and related or neighboring
+rights to this code.
+
+You should have received a copy of the CC0 legalcode along with this
+work. If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
diff --git a/solr/licenses/reactive-streams-NOTICE.txt b/solr/licenses/reactive-streams-NOTICE.txt
new file mode 100644
index 0000000..e69de29
diff --git a/solr/licenses/woodstox-core-asl-4.4.1.jar.sha1 b/solr/licenses/woodstox-core-asl-4.4.1.jar.sha1
deleted file mode 100644
index 4432f29..0000000
--- a/solr/licenses/woodstox-core-asl-4.4.1.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-84fee5eb1a4a1cefe65b6883c73b3fa83be3c1a1
diff --git a/solr/server/etc/security.policy b/solr/server/etc/security.policy
index 04952ac..7716da0 100644
--- a/solr/server/etc/security.policy
+++ b/solr/server/etc/security.policy
@@ -103,7 +103,8 @@ grant {
   permission java.lang.RuntimePermission "writeFileDescriptor";
   // needed by hadoop http
   permission java.lang.RuntimePermission "getProtectionDomain";
-  // needed by aws s3 sdk
+  // needed by aws s3 sdk (Apache HTTP Client)
+  permission java.lang.RuntimePermission "setFactory";
   permission java.lang.RuntimePermission "accessClassInPackage.jdk.internal.reflect";
 
   // These two *have* to be spelled out a separate
@@ -200,6 +201,11 @@ grant {
 
   permission java.io.FilePermission "${log4j.configurationFile}", "read,write,delete,readlink";
 
+  // Credentials for S3 Repository
+  permission java.io.FilePermission "${aws.sharedCredentialsFile}", "read,readlink";
+  permission java.io.FilePermission "${aws.configFile}", "read,readlink";
+  permission java.io.FilePermission "${user.home}${/}.aws${/}-", "read,readlink";
+
   // expanded to a wildcard if set, allows all networking everywhere
   permission java.net.SocketPermission "${solr.internal.network.permission}", "accept,listen,connect,resolve";
 };
diff --git a/solr/solr-ref-guide/src/making-and-restoring-backups.adoc b/solr/solr-ref-guide/src/making-and-restoring-backups.adoc
index cd3b1af..4a6bb41 100644
--- a/solr/solr-ref-guide/src/making-and-restoring-backups.adoc
+++ b/solr/solr-ref-guide/src/making-and-restoring-backups.adoc
@@ -421,9 +421,10 @@ An example configuration using the overall and GCS-client properties can be seen
 
 === S3BackupRepository
 
-Stores and retrieves backup files in an Amazon S3 bucket. This plugin must first be <<solr-plugins.adoc#installing-plugins,installed>> before using.
+Stores and retrieves backup files in an Amazon S3 bucket.
+This plugin must first be <<solr-plugins.adoc#installing-plugins,installed>> before using.
 
-This plugin uses the https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/credentials.html[default AWS credentials provider chain], so ensure that your credentials are set appropriately (e.g., via env var, or in `~/.aws/credentials`, etc.).
+This plugin uses the https://docs.aws.amazon.com/sdk-for-java/v2/developer-guide/credentials.html[default AWS credentials provider chain], so ensure that your credentials are set appropriately (e.g., via env var, or in `~/.aws/credentials`, etc.).
 
 [NOTE]
 ====
@@ -474,27 +475,35 @@ Minio is an example of an _s3-compatible_ endpoint that does not work with the S
 The S3BackupRepository is only guaranteed to be compatible with AWS S3 and S3Mock.
 ====
 
-`s3.proxy.host`::
+`s3.proxy.url`::
 +
 [%autowidth,frame=none]
 |===
 |Optional |Default: none
 |===
 +
-Proxy hostname for the S3 client to route requests through, if desired.
-Should be used in conjunction with `s3.proxy.port` parameter.
-Can be overridden by setting `S3_PROXY_HOST` environment variable.
+Proxy url for the S3 client to route requests through, if desired.
+The url should include `<scheme>://<hostname>:<port>`, however port and scheme _may_ be inferred if missing.
++
+If used, this will override any system proxy settings that are set.
+There is no need to disable the `s3.proxy.useSystemSettings` option.
+If you need to use a proxy `username`, `password` or `nonProxyHosts`, please use the system properties listed below.
 
-`s3.proxy.port`::
+`s3.proxy.useSystemSettings`::
 +
 [%autowidth,frame=none]
 |===
-|Optional |Default: none
+|Optional |Default: true
 |===
 +
-Proxy port number for the S3 client to route requests through, if desired.
-Should be used in conjunction with `s3.proxy.host` parameter.
-Can be overridden by setting `S3_PROXY_PORT` environment variable.
+By default use the system proxy settings if they are set when communicating with the S3 server.
+The supported proxy system properties are:
++
+* `http.proxyHost`
+* `http.proxyPort`
+* `http.nonProxyHosts`
+* `http.proxyUser`
+* `http.proxyPassword`
 
 An example configuration to enable S3 backups and restore can be seen below:
 
diff --git a/solr/solrj/ivy.xml b/solr/solrj/ivy.xml
index 5ce77aa..e93d2aa 100644
--- a/solr/solrj/ivy.xml
+++ b/solr/solrj/ivy.xml
@@ -39,7 +39,7 @@
     <dependency org="org.apache.httpcomponents" name="httpcore" rev="${/org.apache.httpcomponents/httpcore}" conf="compile"/>
     <dependency org="commons-io" name="commons-io" rev="${/commons-io/commons-io}" conf="compile"/>
     <dependency org="org.apache.commons" name="commons-math3" rev="${/org.apache.commons/commons-math3}" conf="compile"/>
-    <dependency org="org.codehaus.woodstox" name="woodstox-core-asl" rev="${/org.codehaus.woodstox/woodstox-core-asl}" conf="compile"/>
+    <dependency org="com.fasterxml.woodstox" name="woodstox-core" rev="${/com.fasterxml.woodstox/woodstox-core}" conf="compile"/>
     <dependency org="org.codehaus.woodstox" name="stax2-api" rev="${/org.codehaus.woodstox/stax2-api}" conf="compile"/>
     <dependency org="org.slf4j" name="slf4j-api" rev="${/org.slf4j/slf4j-api}" conf="compile"/>
     <dependency org="org.slf4j" name="jcl-over-slf4j" rev="${/org.slf4j/jcl-over-slf4j}" conf="compile"/>