You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by aa...@apache.org on 2022/05/20 18:17:42 UTC
[hadoop] branch branch-3.3 updated: HDFS-16453. Upgrade okhttp from 2.7.5 to 4.9.3 (#4229)
This is an automated email from the ASF dual-hosted git repository.
aajisaka pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/hadoop.git
The following commit(s) were added to refs/heads/branch-3.3 by this push:
new 57fe613299f HDFS-16453. Upgrade okhttp from 2.7.5 to 4.9.3 (#4229)
57fe613299f is described below
commit 57fe613299f7e254c19985e8075f7494616ea9c8
Author: Ashutosh Gupta <as...@st.niituniversity.in>
AuthorDate: Fri May 20 18:53:14 2022 +0100
HDFS-16453. Upgrade okhttp from 2.7.5 to 4.9.3 (#4229)
Co-authored-by: Ashutosh Gupta <as...@amazon.com>
Signed-off-by: Akira Ajisaka <aa...@apache.org>
(cherry picked from commit fb910bd9068f12f89136c2776ca6a45caac0408f)
Conflicts:
hadoop-project/pom.xml
---
LICENSE-binary | 2 +-
hadoop-client-modules/hadoop-client/pom.xml | 12 ++++
.../dev-support/findbugsExcludeFile.xml | 13 ++++
hadoop-hdfs-project/hadoop-hdfs-client/pom.xml | 10 +++-
.../ConfRefreshTokenBasedAccessTokenProvider.java | 66 ++++++++++-----------
.../oauth2/CredentialBasedAccessTokenProvider.java | 69 +++++++++++-----------
.../hadoop/hdfs/web/oauth2/OAuth2Constants.java | 2 +-
hadoop-project/pom.xml | 36 +++++++++--
hadoop-tools/hadoop-azure-datalake/pom.xml | 13 ----
9 files changed, 135 insertions(+), 88 deletions(-)
diff --git a/LICENSE-binary b/LICENSE-binary
index bacad8b6b4f..03c42f4deca 100644
--- a/LICENSE-binary
+++ b/LICENSE-binary
@@ -243,7 +243,7 @@ com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava
com.google.j2objc:j2objc-annotations:1.3
com.microsoft.azure:azure-storage:7.0.1
com.nimbusds:nimbus-jose-jwt:9.8.1
-com.squareup.okhttp:okhttp:2.7.5
+com.squareup.okhttp3:okhttp:4.9.3
com.squareup.okio:okio:1.6.0
com.yammer.metrics:metrics-core:2.2.0
com.zaxxer:HikariCP-java7:2.4.12
diff --git a/hadoop-client-modules/hadoop-client/pom.xml b/hadoop-client-modules/hadoop-client/pom.xml
index 7dda07b5a2f..b2b58489531 100644
--- a/hadoop-client-modules/hadoop-client/pom.xml
+++ b/hadoop-client-modules/hadoop-client/pom.xml
@@ -114,6 +114,18 @@
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
</exclusion>
+ <exclusion>
+ <groupId>org.jetbrains.kotlin</groupId>
+ <artifactId>kotlin-stdlib</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.jetbrains.kotlin</groupId>
+ <artifactId>kotlin-stdlib-common</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.squareup.okhttp3</groupId>
+ <artifactId>okhttp</artifactId>
+ </exclusion>
<exclusion>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml b/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml
index 278d01dc22d..1cefa55baa1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml
@@ -93,4 +93,17 @@
<Bug pattern="EI_EXPOSE_REP" />
</Match>
+ <!--okhttp classes from Kotlin are not analysed for NP check. -->
+ <Match>
+ <Class name="org.apache.hadoop.hdfs.web.oauth2.ConfRefreshTokenBasedAccessTokenProvider" />
+ <Method name="refresh" />
+ <Bug pattern="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE" />
+ </Match>
+
+ <Match>
+ <Class name="org.apache.hadoop.hdfs.web.oauth2.CredentialBasedAccessTokenProvider" />
+ <Method name="refresh" />
+ <Bug pattern="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE" />
+ </Match>
+
</FindBugsFilter>
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
index f0a3ae993fd..566bdd0d8b8 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
@@ -35,9 +35,17 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
<dependencies>
<dependency>
- <groupId>com.squareup.okhttp</groupId>
+ <groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.jetbrains.kotlin</groupId>
+ <artifactId>kotlin-stdlib</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.jetbrains.kotlin</groupId>
+ <artifactId>kotlin-stdlib-common</artifactId>
+ </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java
index 3e3fbfbd913..e944e8c1c8d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java
@@ -18,10 +18,15 @@
*/
package org.apache.hadoop.hdfs.web.oauth2;
-import com.squareup.okhttp.OkHttpClient;
-import com.squareup.okhttp.Request;
-import com.squareup.okhttp.RequestBody;
-import com.squareup.okhttp.Response;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.RequestBody;
+import okhttp3.Response;
+
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
@@ -30,10 +35,6 @@ import org.apache.hadoop.util.JsonSerialization;
import org.apache.hadoop.util.Timer;
import org.apache.http.HttpStatus;
-import java.io.IOException;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY;
import static org.apache.hadoop.hdfs.web.oauth2.OAuth2Constants.ACCESS_TOKEN;
@@ -102,37 +103,34 @@ public class ConfRefreshTokenBasedAccessTokenProvider
}
void refresh() throws IOException {
- try {
- OkHttpClient client = new OkHttpClient();
- client.setConnectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
- TimeUnit.MILLISECONDS);
- client.setReadTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
- TimeUnit.MILLISECONDS);
-
- String bodyString = Utils.postBody(GRANT_TYPE, REFRESH_TOKEN,
- REFRESH_TOKEN, refreshToken,
- CLIENT_ID, clientId);
-
- RequestBody body = RequestBody.create(URLENCODED, bodyString);
-
- Request request = new Request.Builder()
- .url(refreshURL)
- .post(body)
- .build();
- Response responseBody = client.newCall(request).execute();
-
- if (responseBody.code() != HttpStatus.SC_OK) {
- throw new IllegalArgumentException("Received invalid http response: "
- + responseBody.code() + ", text = " + responseBody.toString());
+ OkHttpClient client =
+ new OkHttpClient.Builder().connectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
+ TimeUnit.MILLISECONDS)
+ .readTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS)
+ .build();
+
+ String bodyString =
+ Utils.postBody(GRANT_TYPE, REFRESH_TOKEN, REFRESH_TOKEN, refreshToken, CLIENT_ID, clientId);
+
+ RequestBody body = RequestBody.create(bodyString, URLENCODED);
+
+ Request request = new Request.Builder().url(refreshURL).post(body).build();
+ try (Response response = client.newCall(request).execute()) {
+ if (!response.isSuccessful()) {
+ throw new IOException("Unexpected code " + response);
+ }
+ if (response.code() != HttpStatus.SC_OK) {
+ throw new IllegalArgumentException(
+ "Received invalid http response: " + response.code() + ", text = "
+ + response.toString());
}
- Map<?, ?> response = JsonSerialization.mapReader().readValue(
- responseBody.body().string());
+ Map<?, ?> responseBody = JsonSerialization.mapReader().readValue(response.body().string());
- String newExpiresIn = response.get(EXPIRES_IN).toString();
+ String newExpiresIn = responseBody.get(EXPIRES_IN).toString();
accessTokenTimer.setExpiresIn(newExpiresIn);
- accessToken = response.get(ACCESS_TOKEN).toString();
+ accessToken = responseBody.get(ACCESS_TOKEN).toString();
} catch (Exception e) {
throw new IOException("Exception while refreshing access token", e);
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java
index bfd7055990e..25ceb884609 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java
@@ -18,10 +18,15 @@
*/
package org.apache.hadoop.hdfs.web.oauth2;
-import com.squareup.okhttp.OkHttpClient;
-import com.squareup.okhttp.Request;
-import com.squareup.okhttp.RequestBody;
-import com.squareup.okhttp.Response;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.RequestBody;
+import okhttp3.Response;
+
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
@@ -30,10 +35,6 @@ import org.apache.hadoop.util.JsonSerialization;
import org.apache.hadoop.util.Timer;
import org.apache.http.HttpStatus;
-import java.io.IOException;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY;
import static org.apache.hadoop.hdfs.web.oauth2.OAuth2Constants.ACCESS_TOKEN;
@@ -96,38 +97,38 @@ public abstract class CredentialBasedAccessTokenProvider
}
void refresh() throws IOException {
- try {
- OkHttpClient client = new OkHttpClient();
- client.setConnectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
- TimeUnit.MILLISECONDS);
- client.setReadTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
- TimeUnit.MILLISECONDS);
-
- String bodyString = Utils.postBody(CLIENT_SECRET, getCredential(),
- GRANT_TYPE, CLIENT_CREDENTIALS,
- CLIENT_ID, clientId);
-
- RequestBody body = RequestBody.create(URLENCODED, bodyString);
-
- Request request = new Request.Builder()
- .url(refreshURL)
- .post(body)
- .build();
- Response responseBody = client.newCall(request).execute();
-
- if (responseBody.code() != HttpStatus.SC_OK) {
+ OkHttpClient client = new OkHttpClient.Builder()
+ .connectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS)
+ .readTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS)
+ .build();
+
+ String bodyString = Utils.postBody(CLIENT_SECRET, getCredential(),
+ GRANT_TYPE, CLIENT_CREDENTIALS,
+ CLIENT_ID, clientId);
+
+ RequestBody body = RequestBody.create(bodyString, URLENCODED);
+
+ Request request = new Request.Builder()
+ .url(refreshURL)
+ .post(body)
+ .build();
+ try (Response response = client.newCall(request).execute()) {
+ if (!response.isSuccessful()) {
+ throw new IOException("Unexpected code " + response);
+ }
+
+ if (response.code() != HttpStatus.SC_OK) {
throw new IllegalArgumentException("Received invalid http response: "
- + responseBody.code() + ", text = " + responseBody.toString());
+ + response.code() + ", text = " + response.toString());
}
- Map<?, ?> response = JsonSerialization.mapReader().readValue(
- responseBody.body().string());
+ Map<?, ?> responseBody = JsonSerialization.mapReader().readValue(
+ response.body().string());
- String newExpiresIn = response.get(EXPIRES_IN).toString();
+ String newExpiresIn = responseBody.get(EXPIRES_IN).toString();
timer.setExpiresIn(newExpiresIn);
- accessToken = response.get(ACCESS_TOKEN).toString();
-
+ accessToken = responseBody.get(ACCESS_TOKEN).toString();
} catch (Exception e) {
throw new IOException("Unable to obtain access token from credential", e);
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java
index 3f091c2945b..2f28b65e40e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java
@@ -18,7 +18,7 @@
*/
package org.apache.hadoop.hdfs.web.oauth2;
-import com.squareup.okhttp.MediaType;
+import okhttp3.MediaType;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index d165a8f8302..44d1e8a388e 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -135,7 +135,9 @@
<ehcache.version>3.3.1</ehcache.version>
<hikari.version>2.4.12</hikari.version>
<mssql.version>6.2.1.jre7</mssql.version>
- <okhttp.version>2.7.5</okhttp.version>
+ <okhttp3.version>4.9.3</okhttp3.version>
+ <kotlin-stdlib.verion>1.4.10</kotlin-stdlib.verion>
+ <kotlin-stdlib-common.version>1.4.10</kotlin-stdlib-common.version>
<jdom.version>1.1</jdom.version>
<jna.version>5.2.0</jna.version>
<grizzly.version>2.2.21</grizzly.version>
@@ -221,14 +223,40 @@
<dependencyManagement>
<dependencies>
<dependency>
- <groupId>com.squareup.okhttp</groupId>
+ <groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
- <version>${okhttp.version}</version>
+ <version>${okhttp3.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.jetbrains.kotlin</groupId>
+ <artifactId>kotlin-stdlib</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.jetbrains.kotlin</groupId>
+ <artifactId>kotlin-stdlib-common</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.jetbrains.kotlin</groupId>
+ <artifactId>kotlin-stdlib</artifactId>
+ <version>${kotlin-stdlib.verion}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.jetbrains</groupId>
+ <artifactId>annotations</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.jetbrains.kotlin</groupId>
+ <artifactId>kotlin-stdlib-common</artifactId>
+ <version>${kotlin-stdlib-common.version}</version>
</dependency>
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>mockwebserver</artifactId>
- <version>3.7.0</version>
+ <version>4.9.3</version>
<scope>test</scope>
</dependency>
<dependency>
diff --git a/hadoop-tools/hadoop-azure-datalake/pom.xml b/hadoop-tools/hadoop-azure-datalake/pom.xml
index aeaf9cbfbc5..2b771a61ac6 100644
--- a/hadoop-tools/hadoop-azure-datalake/pom.xml
+++ b/hadoop-tools/hadoop-azure-datalake/pom.xml
@@ -29,7 +29,6 @@
</description>
<packaging>jar</packaging>
<properties>
- <okHttpVersion>${okhttp.version}</okHttpVersion>
<minimalJsonVersion>0.9.1</minimalJsonVersion>
<file.encoding>UTF-8</file.encoding>
<downloadSources>true</downloadSources>
@@ -118,12 +117,6 @@
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>
- <dependency>
- <groupId>com.squareup.okhttp</groupId>
- <artifactId>okhttp</artifactId>
- <version>${okhttp.version}</version>
- <scope>test</scope>
- </dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
@@ -141,12 +134,6 @@
<scope>test</scope>
<type>test-jar</type>
</dependency>
- <dependency>
- <groupId>com.squareup.okhttp</groupId>
- <artifactId>mockwebserver</artifactId>
- <version>${okhttp.version}</version>
- <scope>test</scope>
- </dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org