You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by aa...@apache.org on 2022/05/20 17:53:33 UTC

[hadoop] branch trunk updated: HDFS-16453. Upgrade okhttp from 2.7.5 to 4.9.3 (#4229)

This is an automated email from the ASF dual-hosted git repository.

aajisaka pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new fb910bd9068 HDFS-16453. Upgrade okhttp from 2.7.5 to 4.9.3 (#4229)
fb910bd9068 is described below

commit fb910bd9068f12f89136c2776ca6a45caac0408f
Author: Ashutosh Gupta <as...@st.niituniversity.in>
AuthorDate: Fri May 20 18:53:14 2022 +0100

    HDFS-16453. Upgrade okhttp from 2.7.5 to 4.9.3 (#4229)
    
    Co-authored-by: Ashutosh Gupta <as...@amazon.com>
    Signed-off-by: Akira Ajisaka <aa...@apache.org>
---
 LICENSE-binary                                     |  2 +-
 hadoop-client-modules/hadoop-client/pom.xml        | 12 ++++
 .../dev-support/findbugsExcludeFile.xml            | 13 ++++
 hadoop-hdfs-project/hadoop-hdfs-client/pom.xml     | 10 +++-
 .../ConfRefreshTokenBasedAccessTokenProvider.java  | 66 ++++++++++-----------
 .../oauth2/CredentialBasedAccessTokenProvider.java | 69 +++++++++++-----------
 .../hadoop/hdfs/web/oauth2/OAuth2Constants.java    |  2 +-
 hadoop-project/pom.xml                             | 35 ++++++++++-
 hadoop-tools/hadoop-azure-datalake/pom.xml         | 13 ----
 9 files changed, 135 insertions(+), 87 deletions(-)

diff --git a/LICENSE-binary b/LICENSE-binary
index fe60ac3609c..980b9c7f2b6 100644
--- a/LICENSE-binary
+++ b/LICENSE-binary
@@ -241,7 +241,7 @@ com.google.guava:guava:27.0-jre
 com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava
 com.microsoft.azure:azure-storage:7.0.0
 com.nimbusds:nimbus-jose-jwt:9.8.1
-com.squareup.okhttp:okhttp:2.7.5
+com.squareup.okhttp3:okhttp:4.9.3
 com.squareup.okio:okio:1.6.0
 com.zaxxer:HikariCP:4.0.3
 commons-beanutils:commons-beanutils:1.9.3
diff --git a/hadoop-client-modules/hadoop-client/pom.xml b/hadoop-client-modules/hadoop-client/pom.xml
index b48a221bdf1..5299c9e8713 100644
--- a/hadoop-client-modules/hadoop-client/pom.xml
+++ b/hadoop-client-modules/hadoop-client/pom.xml
@@ -114,6 +114,18 @@
           <groupId>org.eclipse.jetty</groupId>
           <artifactId>jetty-server</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.jetbrains.kotlin</groupId>
+          <artifactId>kotlin-stdlib</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.jetbrains.kotlin</groupId>
+          <artifactId>kotlin-stdlib-common</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>com.squareup.okhttp3</groupId>
+          <artifactId>okhttp</artifactId>
+        </exclusion>
         <exclusion>
           <groupId>com.sun.jersey</groupId>
           <artifactId>jersey-core</artifactId>
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml b/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml
index c96b3a99bd1..508388aa481 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml
@@ -94,4 +94,17 @@
     <Bug pattern="EI_EXPOSE_REP" />
   </Match>
 
+  <!--okhttp classes from Kotlin are not analysed for NP check. -->
+  <Match>
+    <Class name="org.apache.hadoop.hdfs.web.oauth2.ConfRefreshTokenBasedAccessTokenProvider" />
+    <Method name="refresh" />
+    <Bug pattern="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE" />
+  </Match>
+
+  <Match>
+    <Class name="org.apache.hadoop.hdfs.web.oauth2.CredentialBasedAccessTokenProvider" />
+    <Method name="refresh" />
+    <Bug pattern="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE" />
+  </Match>
+
 </FindBugsFilter>
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
index c4e65ef811d..9bb0932d328 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
@@ -35,9 +35,17 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
 
   <dependencies>
     <dependency>
-      <groupId>com.squareup.okhttp</groupId>
+      <groupId>com.squareup.okhttp3</groupId>
       <artifactId>okhttp</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.jetbrains.kotlin</groupId>
+      <artifactId>kotlin-stdlib</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.jetbrains.kotlin</groupId>
+      <artifactId>kotlin-stdlib-common</artifactId>
+    </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-common</artifactId>
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java
index 3e3fbfbd913..e944e8c1c8d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java
@@ -18,10 +18,15 @@
  */
 package org.apache.hadoop.hdfs.web.oauth2;
 
-import com.squareup.okhttp.OkHttpClient;
-import com.squareup.okhttp.Request;
-import com.squareup.okhttp.RequestBody;
-import com.squareup.okhttp.Response;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.RequestBody;
+import okhttp3.Response;
+
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -30,10 +35,6 @@ import org.apache.hadoop.util.JsonSerialization;
 import org.apache.hadoop.util.Timer;
 import org.apache.http.HttpStatus;
 
-import java.io.IOException;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
 import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY;
 import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY;
 import static org.apache.hadoop.hdfs.web.oauth2.OAuth2Constants.ACCESS_TOKEN;
@@ -102,37 +103,34 @@ public class ConfRefreshTokenBasedAccessTokenProvider
   }
 
   void refresh() throws IOException {
-    try {
-      OkHttpClient client = new OkHttpClient();
-      client.setConnectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
-          TimeUnit.MILLISECONDS);
-      client.setReadTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
-                TimeUnit.MILLISECONDS);
-
-      String bodyString = Utils.postBody(GRANT_TYPE, REFRESH_TOKEN,
-          REFRESH_TOKEN, refreshToken,
-          CLIENT_ID, clientId);
-
-      RequestBody body = RequestBody.create(URLENCODED, bodyString);
-
-      Request request = new Request.Builder()
-          .url(refreshURL)
-          .post(body)
-          .build();
-      Response responseBody = client.newCall(request).execute();
-
-      if (responseBody.code() != HttpStatus.SC_OK) {
-        throw new IllegalArgumentException("Received invalid http response: "
-            + responseBody.code() + ", text = " + responseBody.toString());
+    OkHttpClient client =
+        new OkHttpClient.Builder().connectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
+                TimeUnit.MILLISECONDS)
+            .readTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS)
+            .build();
+
+    String bodyString =
+        Utils.postBody(GRANT_TYPE, REFRESH_TOKEN, REFRESH_TOKEN, refreshToken, CLIENT_ID, clientId);
+
+    RequestBody body = RequestBody.create(bodyString, URLENCODED);
+
+    Request request = new Request.Builder().url(refreshURL).post(body).build();
+    try (Response response = client.newCall(request).execute()) {
+      if (!response.isSuccessful()) {
+        throw new IOException("Unexpected code " + response);
+      }
+      if (response.code() != HttpStatus.SC_OK) {
+        throw new IllegalArgumentException(
+            "Received invalid http response: " + response.code() + ", text = "
+                + response.toString());
       }
 
-      Map<?, ?> response = JsonSerialization.mapReader().readValue(
-          responseBody.body().string());
+      Map<?, ?> responseBody = JsonSerialization.mapReader().readValue(response.body().string());
 
-      String newExpiresIn = response.get(EXPIRES_IN).toString();
+      String newExpiresIn = responseBody.get(EXPIRES_IN).toString();
       accessTokenTimer.setExpiresIn(newExpiresIn);
 
-      accessToken = response.get(ACCESS_TOKEN).toString();
+      accessToken = responseBody.get(ACCESS_TOKEN).toString();
     } catch (Exception e) {
       throw new IOException("Exception while refreshing access token", e);
     }
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java
index bfd7055990e..25ceb884609 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java
@@ -18,10 +18,15 @@
  */
 package org.apache.hadoop.hdfs.web.oauth2;
 
-import com.squareup.okhttp.OkHttpClient;
-import com.squareup.okhttp.Request;
-import com.squareup.okhttp.RequestBody;
-import com.squareup.okhttp.Response;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.RequestBody;
+import okhttp3.Response;
+
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
@@ -30,10 +35,6 @@ import org.apache.hadoop.util.JsonSerialization;
 import org.apache.hadoop.util.Timer;
 import org.apache.http.HttpStatus;
 
-import java.io.IOException;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
 import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY;
 import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY;
 import static org.apache.hadoop.hdfs.web.oauth2.OAuth2Constants.ACCESS_TOKEN;
@@ -96,38 +97,38 @@ public abstract class CredentialBasedAccessTokenProvider
   }
 
   void refresh() throws IOException {
-    try {
-      OkHttpClient client = new OkHttpClient();
-      client.setConnectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
-          TimeUnit.MILLISECONDS);
-      client.setReadTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
-          TimeUnit.MILLISECONDS);
-
-      String bodyString = Utils.postBody(CLIENT_SECRET, getCredential(),
-          GRANT_TYPE, CLIENT_CREDENTIALS,
-          CLIENT_ID, clientId);
-
-      RequestBody body = RequestBody.create(URLENCODED, bodyString);
-
-      Request request = new Request.Builder()
-          .url(refreshURL)
-          .post(body)
-          .build();
-      Response responseBody = client.newCall(request).execute();
-
-      if (responseBody.code() != HttpStatus.SC_OK) {
+    OkHttpClient client = new OkHttpClient.Builder()
+            .connectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS)
+            .readTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS)
+            .build();
+
+    String bodyString = Utils.postBody(CLIENT_SECRET, getCredential(),
+        GRANT_TYPE, CLIENT_CREDENTIALS,
+        CLIENT_ID, clientId);
+
+    RequestBody body = RequestBody.create(bodyString, URLENCODED);
+
+    Request request = new Request.Builder()
+        .url(refreshURL)
+        .post(body)
+        .build();
+    try (Response response = client.newCall(request).execute()) {
+      if (!response.isSuccessful()) {
+        throw new IOException("Unexpected code " + response);
+      }
+
+      if (response.code() != HttpStatus.SC_OK) {
         throw new IllegalArgumentException("Received invalid http response: "
-            + responseBody.code() + ", text = " + responseBody.toString());
+            + response.code() + ", text = " + response.toString());
       }
 
-      Map<?, ?> response = JsonSerialization.mapReader().readValue(
-          responseBody.body().string());
+      Map<?, ?> responseBody = JsonSerialization.mapReader().readValue(
+          response.body().string());
 
-      String newExpiresIn = response.get(EXPIRES_IN).toString();
+      String newExpiresIn = responseBody.get(EXPIRES_IN).toString();
       timer.setExpiresIn(newExpiresIn);
 
-      accessToken = response.get(ACCESS_TOKEN).toString();
-
+      accessToken = responseBody.get(ACCESS_TOKEN).toString();
     } catch (Exception e) {
       throw new IOException("Unable to obtain access token from credential", e);
     }
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java
index 3f091c2945b..2f28b65e40e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java
@@ -18,7 +18,7 @@
  */
 package org.apache.hadoop.hdfs.web.oauth2;
 
-import com.squareup.okhttp.MediaType;
+import okhttp3.MediaType;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 85df7ca7df4..6831609661a 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -135,6 +135,9 @@
     <hikari.version>4.0.3</hikari.version>
     <mssql.version>6.2.1.jre7</mssql.version>
     <okhttp.version>2.7.5</okhttp.version>
+    <okhttp3.version>4.9.3</okhttp3.version>
+    <kotlin-stdlib.verion>1.4.10</kotlin-stdlib.verion>
+    <kotlin-stdlib-common.version>1.4.10</kotlin-stdlib-common.version>
     <jdom2.version>2.0.6.1</jdom2.version>
     <jna.version>5.2.0</jna.version>
     <grizzly.version>2.2.21</grizzly.version>
@@ -222,14 +225,40 @@
   <dependencyManagement>
     <dependencies>
       <dependency>
-        <groupId>com.squareup.okhttp</groupId>
+        <groupId>com.squareup.okhttp3</groupId>
         <artifactId>okhttp</artifactId>
-        <version>${okhttp.version}</version>
+        <version>${okhttp3.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>org.jetbrains.kotlin</groupId>
+            <artifactId>kotlin-stdlib</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.jetbrains.kotlin</groupId>
+            <artifactId>kotlin-stdlib-common</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+      <dependency>
+        <groupId>org.jetbrains.kotlin</groupId>
+        <artifactId>kotlin-stdlib</artifactId>
+        <version>${kotlin-stdlib.verion}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>org.jetbrains</groupId>
+            <artifactId>annotations</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+      <dependency>
+        <groupId>org.jetbrains.kotlin</groupId>
+        <artifactId>kotlin-stdlib-common</artifactId>
+        <version>${kotlin-stdlib-common.version}</version>
       </dependency>
       <dependency>
         <groupId>com.squareup.okhttp3</groupId>
         <artifactId>mockwebserver</artifactId>
-        <version>3.7.0</version>
+        <version>4.9.3</version>
         <scope>test</scope>
       </dependency>
       <dependency>
diff --git a/hadoop-tools/hadoop-azure-datalake/pom.xml b/hadoop-tools/hadoop-azure-datalake/pom.xml
index 446e2957b60..cec050d2c1b 100644
--- a/hadoop-tools/hadoop-azure-datalake/pom.xml
+++ b/hadoop-tools/hadoop-azure-datalake/pom.xml
@@ -29,7 +29,6 @@
   </description>
   <packaging>jar</packaging>
   <properties>
-    <okHttpVersion>${okhttp.version}</okHttpVersion>
     <minimalJsonVersion>0.9.1</minimalJsonVersion>
     <file.encoding>UTF-8</file.encoding>
     <downloadSources>true</downloadSources>
@@ -118,12 +117,6 @@
       <artifactId>hadoop-common</artifactId>
       <scope>provided</scope>
     </dependency>
-    <dependency>
-      <groupId>com.squareup.okhttp</groupId>
-      <artifactId>okhttp</artifactId>
-      <version>${okhttp.version}</version>
-      <scope>test</scope>
-    </dependency>
     <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
@@ -141,12 +134,6 @@
       <scope>test</scope>
       <type>test-jar</type>
     </dependency>
-    <dependency>
-      <groupId>com.squareup.okhttp</groupId>
-      <artifactId>mockwebserver</artifactId>
-      <version>${okhttp.version}</version>
-      <scope>test</scope>
-    </dependency>
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org