You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by we...@apache.org on 2021/01/04 17:44:29 UTC

[hadoop] branch trunk updated: HADOOP-17371. Bump Jetty to the latest version 9.4.34. Contributed by Wei-Chiu Chuang. (#2453)

This is an automated email from the ASF dual-hosted git repository.

weichiu pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 66ee0a6  HADOOP-17371. Bump Jetty to the latest version 9.4.34. Contributed by Wei-Chiu Chuang. (#2453)
66ee0a6 is described below

commit 66ee0a6df0dc0dd8242018153fd652a3206e73b5
Author: Wei-Chiu Chuang <we...@apache.org>
AuthorDate: Mon Jan 4 09:43:58 2021 -0800

    HADOOP-17371. Bump Jetty to the latest version 9.4.34. Contributed by Wei-Chiu Chuang. (#2453)
---
 .../hadoop-client-minicluster/pom.xml              | 12 +++++++++
 hadoop-common-project/hadoop-auth/pom.xml          |  4 +++
 .../server/AuthenticationFilter.java               | 14 ++++++++--
 .../security/http/RestCsrfPreventionFilter.java    |  5 ++++
 .../key/kms/server/KMSAuthenticationFilter.java    | 13 ++++++++++
 .../hadoop/hdfs/server/namenode/ImageServlet.java  | 30 ++++++++++++++--------
 hadoop-project/pom.xml                             |  2 +-
 7 files changed, 67 insertions(+), 13 deletions(-)

diff --git a/hadoop-client-modules/hadoop-client-minicluster/pom.xml b/hadoop-client-modules/hadoop-client-minicluster/pom.xml
index 70a627c..0bedf1d 100644
--- a/hadoop-client-modules/hadoop-client-minicluster/pom.xml
+++ b/hadoop-client-modules/hadoop-client-minicluster/pom.xml
@@ -840,6 +840,18 @@
                         <exclude>*/**</exclude>
                       </excludes>
                     </filter>
+                    <filter>
+                      <artifact>org.eclipse.jetty:jetty-util-ajax</artifact>
+                      <excludes>
+                        <exclude>*/**</exclude>
+                      </excludes>
+                    </filter>
+                    <filter>
+                      <artifact>org.eclipse.jetty:jetty-server</artifact>
+                      <excludes>
+                        <exclude>jetty-dir.css</exclude>
+                      </excludes>
+                    </filter>
                   </filters>
 
                   <!-- relocate classes from mssql-jdbc -->
diff --git a/hadoop-common-project/hadoop-auth/pom.xml b/hadoop-common-project/hadoop-auth/pom.xml
index 4761945..10e0b9c 100644
--- a/hadoop-common-project/hadoop-auth/pom.xml
+++ b/hadoop-common-project/hadoop-auth/pom.xml
@@ -193,6 +193,10 @@
       <artifactId>guava</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-server</artifactId>
+    </dependency>
   </dependencies>
 
   <build>
diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
index 94d11f48..9f40c42 100644
--- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
+++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
@@ -19,6 +19,7 @@ import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
 import org.apache.hadoop.security.authentication.util.*;
+import org.eclipse.jetty.server.Response;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -619,11 +620,20 @@ public class AuthenticationFilter implements Filter {
                 KerberosAuthenticator.WWW_AUTHENTICATE))) {
           errCode = HttpServletResponse.SC_FORBIDDEN;
         }
+        // After Jetty 9.4.21, sendError() no longer allows a custom message.
+        // use setStatusWithReason() to set a custom message.
+        String reason;
         if (authenticationEx == null) {
-          httpResponse.sendError(errCode, "Authentication required");
+          reason = "Authentication required";
         } else {
-          httpResponse.sendError(errCode, authenticationEx.getMessage());
+          reason = authenticationEx.getMessage();
         }
+
+        if (httpResponse instanceof Response) {
+          ((Response)httpResponse).setStatusWithReason(errCode, reason);
+        }
+
+        httpResponse.sendError(errCode, reason);
       }
     }
   }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/RestCsrfPreventionFilter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/RestCsrfPreventionFilter.java
index 59cb0d6..b81ed8e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/RestCsrfPreventionFilter.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/http/RestCsrfPreventionFilter.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 
+import org.eclipse.jetty.server.Response;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -271,6 +272,10 @@ public class RestCsrfPreventionFilter implements Filter {
 
     @Override
     public void sendError(int code, String message) throws IOException {
+      if (httpResponse instanceof Response) {
+        ((Response)httpResponse).setStatusWithReason(code, message);
+      }
+
       httpResponse.sendError(code, message);
     }
   }
diff --git a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java
index c020af2..ead22e4 100644
--- a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java
+++ b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSAuthenticationFilter.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthentica
 import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationHandler;
 import org.apache.hadoop.security.token.delegation.web.KerberosDelegationTokenAuthenticationHandler;
 import org.apache.hadoop.security.token.delegation.web.PseudoDelegationTokenAuthenticationHandler;
+import org.eclipse.jetty.server.Response;
 
 import javax.servlet.FilterChain;
 import javax.servlet.FilterConfig;
@@ -113,6 +114,18 @@ public class KMSAuthenticationFilter
     public void sendError(int sc, String msg) throws IOException {
       statusCode = sc;
       this.msg = msg;
+
+      ServletResponse response = getResponse();
+
+      // After Jetty 9.4.21, sendError() no longer allows a custom message.
+      // use setStatusWithReason() to set a custom message.
+      if (response instanceof Response) {
+        ((Response) response).setStatusWithReason(sc, msg);
+      } else {
+        KMS.LOG.warn("The wrapped response object is instance of {}" +
+            ", not org.eclipse.jetty.server.Response. Can't set custom error " +
+            "message", response.getClass());
+      }
       super.sendError(sc, HtmlQuoting.quoteHtmlChars(msg));
     }
 
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageServlet.java
index 54c8738..54825d8 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageServlet.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageServlet.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.ha.HAServiceProtocol;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtilClient;
 import org.apache.hadoop.security.SecurityUtil;
+import org.eclipse.jetty.server.Response;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -119,7 +120,7 @@ public class ImageServlet extends HttpServlet {
     if (nnImage == null) {
       String errorMsg = "NameNode initialization not yet complete. "
           + "FSImage has not been set in the NameNode.";
-      response.sendError(HttpServletResponse.SC_FORBIDDEN, errorMsg);
+      sendError(response, HttpServletResponse.SC_FORBIDDEN, errorMsg);
       throw new IOException(errorMsg);
     }
     return nnImage;
@@ -218,7 +219,7 @@ public class ImageServlet extends HttpServlet {
       
     } catch (Throwable t) {
       String errMsg = "GetImage failed. " + StringUtils.stringifyException(t);
-      response.sendError(HttpServletResponse.SC_GONE, errMsg);
+      sendError(response, HttpServletResponse.SC_GONE, errMsg);
       throw new IOException(errMsg);
     } finally {
       response.getOutputStream().close();
@@ -234,7 +235,7 @@ public class ImageServlet extends HttpServlet {
             conf)) {
       String errorMsg = "Only Namenode, Secondary Namenode, and administrators may access "
           + "this servlet";
-      response.sendError(HttpServletResponse.SC_FORBIDDEN, errorMsg);
+      sendError(response, HttpServletResponse.SC_FORBIDDEN, errorMsg);
       LOG.warn("Received non-NN/SNN/administrator request for image or edits from "
           + request.getUserPrincipal().getName()
           + " at "
@@ -247,7 +248,7 @@ public class ImageServlet extends HttpServlet {
         && !myStorageInfoString.equals(theirStorageInfoString)) {
       String errorMsg = "This namenode has storage info " + myStorageInfoString
           + " but the secondary expected " + theirStorageInfoString;
-      response.sendError(HttpServletResponse.SC_FORBIDDEN, errorMsg);
+      sendError(response, HttpServletResponse.SC_FORBIDDEN, errorMsg);
       LOG.warn("Received an invalid request file transfer request "
           + "from a secondary with storage info " + theirStorageInfoString);
       throw new IOException(errorMsg);
@@ -578,7 +579,7 @@ public class ImageServlet extends HttpServlet {
                 // we need a different response type here so the client can differentiate this
                 // from the failure to upload due to (1) security, or (2) other checkpoints already
                 // present
-                response.sendError(HttpServletResponse.SC_EXPECTATION_FAILED,
+                sendError(response, HttpServletResponse.SC_EXPECTATION_FAILED,
                     "Nameode "+request.getLocalAddr()+" is currently not in a state which can "
                         + "accept uploads of new fsimages. State: "+state);
                 return null;
@@ -593,7 +594,7 @@ public class ImageServlet extends HttpServlet {
               // if the node is attempting to upload an older transaction, we ignore it
               SortedSet<ImageUploadRequest> larger = currentlyDownloadingCheckpoints.tailSet(imageRequest);
               if (larger.size() > 0) {
-                response.sendError(HttpServletResponse.SC_CONFLICT,
+                sendError(response, HttpServletResponse.SC_CONFLICT,
                     "Another checkpointer is already in the process of uploading a" +
                         " checkpoint made up to transaction ID " + larger.last());
                 return null;
@@ -601,7 +602,7 @@ public class ImageServlet extends HttpServlet {
 
               //make sure no one else has started uploading one
               if (!currentlyDownloadingCheckpoints.add(imageRequest)) {
-                response.sendError(HttpServletResponse.SC_CONFLICT,
+                sendError(response, HttpServletResponse.SC_CONFLICT,
                     "Either current namenode is checkpointing or another"
                         + " checkpointer is already in the process of "
                         + "uploading a checkpoint made at transaction ID "
@@ -648,7 +649,7 @@ public class ImageServlet extends HttpServlet {
                     (txid - lastCheckpointTxid) + " expecting at least "
                     + checkpointTxnCount;
                 LOG.info(message);
-                response.sendError(HttpServletResponse.SC_CONFLICT, message);
+                sendError(response, HttpServletResponse.SC_CONFLICT, message);
                 return null;
               }
 
@@ -658,7 +659,7 @@ public class ImageServlet extends HttpServlet {
                       + "another checkpointer already uploaded an "
                       + "checkpoint for txid " + txid;
                   LOG.info(message);
-                  response.sendError(HttpServletResponse.SC_CONFLICT, message);
+                  sendError(response, HttpServletResponse.SC_CONFLICT, message);
                   return null;
                 }
 
@@ -695,11 +696,20 @@ public class ImageServlet extends HttpServlet {
           });
     } catch (Throwable t) {
       String errMsg = "PutImage failed. " + StringUtils.stringifyException(t);
-      response.sendError(HttpServletResponse.SC_GONE, errMsg);
+      sendError(response, HttpServletResponse.SC_GONE, errMsg);
       throw new IOException(errMsg);
     }
   }
 
+  private void sendError(HttpServletResponse response, int code, String message)
+      throws IOException {
+    if (response instanceof Response) {
+      ((Response)response).setStatusWithReason(code, message);
+    }
+
+    response.sendError(code, message);
+  }
+
   /*
    * Params required to handle put image request
    */
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 831275e..f3fa475 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -37,7 +37,7 @@
     <!--Whether to proceed to next module if any test failures exist-->
     <ignoreTestFailure>true</ignoreTestFailure>
     <maven.test.redirectTestOutputToFile>true</maven.test.redirectTestOutputToFile>
-    <jetty.version>9.4.20.v20190813</jetty.version>
+    <jetty.version>9.4.35.v20201120</jetty.version>
     <test.exclude>_</test.exclude>
     <test.exclude.pattern>_</test.exclude.pattern>
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org