You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by iw...@apache.org on 2019/08/14 22:46:47 UTC

[hadoop] branch branch-2 updated: HDFS-14423. Percent (%) and plus (+) characters no longer work in WebHDFS.

This is an automated email from the ASF dual-hosted git repository.

iwasakims pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/branch-2 by this push:
     new 670de35  HDFS-14423. Percent (%) and plus (+) characters no longer work in WebHDFS.
670de35 is described below

commit 670de354b305ad459dab51ce4aa8069843a837b2
Author: Masatake Iwasaki <iw...@apache.org>
AuthorDate: Mon Aug 12 12:07:16 2019 +0900

    HDFS-14423. Percent (%) and plus (+) characters no longer work in WebHDFS.
    
    Signed-off-by: Masatake Iwasaki <iw...@apache.org>
    (cherry picked from commit da0006fe0473e353ee2d489156248a01aa982dfd)
    
     Conflicts:
    	hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
    
    (cherry picked from commit d7ca016d63d89e5c8377a035f93485a7c77c3430)
    
     Conflicts:
    	hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
    	hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
    	hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
    	hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java
---
 .../java/org/apache/hadoop/http/HttpServer2.java   | 15 +++++++++
 .../apache/hadoop/hdfs/web/WebHdfsFileSystem.java  | 35 +-------------------
 .../datanode/web/webhdfs/WebHdfsHandler.java       |  3 +-
 .../hdfs/server/namenode/NameNodeHttpServer.java   |  8 +++--
 .../web/resources/NamenodeWebHdfsMethods.java      |  7 ++--
 .../org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java | 37 ++++++++++++++++++----
 6 files changed, 56 insertions(+), 49 deletions(-)

diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
index 74936ee..6ebbf71 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
@@ -698,12 +698,27 @@ public final class HttpServer2 implements FilterContainer {
    */
   public void addJerseyResourcePackage(final String packageName,
       final String pathSpec) {
+    addJerseyResourcePackage(packageName, pathSpec,
+        Collections.<String, String>emptyMap());
+  }
+
+  /**
+   * Add a Jersey resource package.
+   * @param packageName The Java package name containing the Jersey resource.
+   * @param pathSpec The path spec for the servlet
+   * @param params properties and features for ResourceConfig
+   */
+  public void addJerseyResourcePackage(final String packageName,
+      final String pathSpec, Map<String, String> params) {
     LOG.info("addJerseyResourcePackage: packageName=" + packageName
         + ", pathSpec=" + pathSpec);
     final ServletHolder sh = new ServletHolder(ServletContainer.class);
     sh.setInitParameter("com.sun.jersey.config.property.resourceConfigClass",
         "com.sun.jersey.api.core.PackagesResourceConfig");
     sh.setInitParameter("com.sun.jersey.config.property.packages", packageName);
+    for (Map.Entry<String, String> entry : params.entrySet()) {
+      sh.setInitParameter(entry.getKey(), entry.getValue());
+    }
     webAppContext.addServlet(sh, pathSpec);
   }
 
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
index 7d30967..67db5b3b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
@@ -37,8 +37,6 @@ import java.net.InetSocketAddress;
 import java.net.MalformedURLException;
 import java.net.URI;
 import java.net.URL;
-import java.net.URLDecoder;
-import java.net.URLEncoder;
 import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
@@ -143,8 +141,6 @@ public class WebHdfsFileSystem extends FileSystem
   public static final String EZ_HEADER = "X-Hadoop-Accept-EZ";
   public static final String FEFINFO_HEADER = "X-Hadoop-feInfo";
 
-  public static final String SPECIAL_FILENAME_CHARACTERS_REGEX = ".*[;+%].*";
-
   /**
    * Default connection factory may be overridden in tests to use smaller
    * timeout values
@@ -609,38 +605,9 @@ public class WebHdfsFileSystem extends FileSystem
   URL toUrl(final HttpOpParam.Op op, final Path fspath,
       final Param<?,?>... parameters) throws IOException {
     //initialize URI path and query
-    Path encodedFSPath = fspath;
-    if (fspath != null) {
-      URI fspathUri = fspath.toUri();
-      String fspathUriDecoded = fspathUri.getPath();
-      boolean pathAlreadyEncoded = false;
-      try {
-        fspathUriDecoded = URLDecoder.decode(fspathUri.getPath(), "UTF-8");
-        pathAlreadyEncoded = true;
-      } catch (IllegalArgumentException ex) {
-        LOG.trace("Cannot decode URL encoded file", ex);
-      }
-      String[] fspathItems = fspathUriDecoded.split("/");
-
-      if (fspathItems.length > 0) {
-        StringBuilder fsPathEncodedItems = new StringBuilder();
-        for (String fsPathItem : fspathItems) {
-          fsPathEncodedItems.append("/");
-          if (fsPathItem.matches(SPECIAL_FILENAME_CHARACTERS_REGEX) ||
-              pathAlreadyEncoded) {
-            fsPathEncodedItems.append(URLEncoder.encode(fsPathItem, "UTF-8"));
-          } else {
-            fsPathEncodedItems.append(fsPathItem);
-          }
-        }
-        encodedFSPath = new Path(fspathUri.getScheme(),
-                fspathUri.getAuthority(), fsPathEncodedItems.substring(1));
-      }
-    }
 
     final String path = PATH_PREFIX
-        + (encodedFSPath == null ? "/" :
-            makeQualified(encodedFSPath).toUri().getRawPath());
+        + (fspath == null? "/": makeQualified(fspath).toUri().getRawPath());
     final String query = op.toQueryString()
         + Param.toSortedString("&", getAuthParameters(op))
         + Param.toSortedString("&", parameters);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java
index 2eb03a3..f20e565 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/webhdfs/WebHdfsHandler.java
@@ -57,7 +57,6 @@ import java.io.OutputStream;
 import java.net.InetSocketAddress;
 import java.net.URI;
 import java.net.URISyntaxException;
-import java.net.URLDecoder;
 import java.nio.charset.StandardCharsets;
 import java.security.PrivilegedExceptionAction;
 import java.util.EnumSet;
@@ -127,7 +126,7 @@ public class WebHdfsHandler extends SimpleChannelInboundHandler<HttpRequest> {
     params = new ParameterParser(queryString, conf);
     DataNodeUGIProvider ugiProvider = new DataNodeUGIProvider(params);
     ugi = ugiProvider.ugi();
-    path = URLDecoder.decode(params.path(), "UTF-8");
+    path = params.path();
 
     injectToken();
     ugi.doAs(new PrivilegedExceptionAction<Void>() {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
index 165e8f9..4f73793 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
@@ -52,6 +52,8 @@ import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.http.RestCsrfPreventionFilter;
 
+import com.sun.jersey.api.core.ResourceConfig;
+
 /**
  * Encapsulates the HTTP server started by the NameNode. 
  */
@@ -114,9 +116,11 @@ public class NameNodeHttpServer {
       }
 
       // add webhdfs packages
+      final Map<String, String> resourceParams = new HashMap<>();
+      resourceParams.put(ResourceConfig.FEATURE_MATCH_MATRIX_PARAMS, "true");
       httpServer2.addJerseyResourcePackage(
-          jerseyResourcePackage+ ";" + Param.class.getPackage().getName(),
-          pathSpec);
+          jerseyResourcePackage + ";" + Param.class.getPackage().getName(),
+          pathSpec, resourceParams);
     }
   }
 
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
index 1ddcd3e..836af9d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
@@ -25,7 +25,6 @@ import java.io.PrintWriter;
 import java.net.InetAddress;
 import java.net.URI;
 import java.net.URISyntaxException;
-import java.net.URLDecoder;
 import java.net.UnknownHostException;
 import java.security.Principal;
 import java.security.PrivilegedExceptionAction;
@@ -960,10 +959,8 @@ public class NamenodeWebHdfsMethods {
     return doAs(ugi, new PrivilegedExceptionAction<Response>() {
       @Override
       public Response run() throws IOException, URISyntaxException {
-          String absolutePath = path.getAbsolutePath() == null ? null :
-              URLDecoder.decode(path.getAbsolutePath(), "UTF-8");
-          return get(ugi, delegation, username, doAsUser, absolutePath,
-              op, offset, length, renewer, bufferSize,
+          return get(ugi, delegation, username, doAsUser,
+              path.getAbsolutePath(), op, offset, length, renewer, bufferSize,
               xattrNames, xattrEncoding, excludeDatanodes, fsAction, tokenKind,
               tokenService, noredirect, startAfter);
       }
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java
index 02a68ea..c1c499b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.fs.WebHdfs;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.hdfs.DFSTestUtil;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
@@ -76,7 +77,7 @@ public class TestWebHdfsUrl {
         uri, conf);
 
     // Construct a file path that contains percentage-encoded string
-    String pathName = "/hdtest010%2C60020%2C1371000602151.1371058984668";
+    String pathName = "/hdtest010%2C60020%2C1371000602151.1371058984668+";
     Path fsPath = new Path(pathName);
     URL encodedPathUrl = webhdfs.toUrl(PutOpParam.Op.CREATE, fsPath);
     // We should get back the original file path after cycling back and decoding
@@ -415,15 +416,11 @@ public class TestWebHdfsUrl {
   }
 
   private static final String BACKWARD_COMPATIBLE_SPECIAL_CHARACTER_FILENAME =
-          "specialFile ?\"\\()[]_-=&,{}#'`~!@$^*|<>.";
+          "specialFile ?\"\\()[]_-=&,{}#'`~!@$^*|<>.+%";
 
   @Test
   public void testWebHdfsBackwardCompatibleSpecialCharacterFile()
           throws Exception {
-
-    assertFalse(BACKWARD_COMPATIBLE_SPECIAL_CHARACTER_FILENAME
-            .matches(WebHdfsFileSystem.SPECIAL_FILENAME_CHARACTERS_REGEX));
-
     UserGroupInformation ugi =
             UserGroupInformation.createRemoteUser("test-user");
     ugi.setAuthenticationMethod(KERBEROS);
@@ -469,4 +466,32 @@ public class TestWebHdfsUrl {
     }
   }
 
+  @Test
+  public void testWebHdfsPathWithSemicolon() throws Exception {
+    try (MiniDFSCluster cluster =
+        new MiniDFSCluster.Builder(WebHdfsTestUtil.createConf())
+            .numDataNodes(1)
+            .build()) {
+      cluster.waitActive();
+
+      // regression test for HDFS-14423.
+      final Path semicolon = new Path("/a;b");
+      final Path plus = new Path("/a+b");
+      final Path percent = new Path("/a%b");
+
+      final WebHdfsFileSystem webhdfs = WebHdfsTestUtil.getWebHdfsFileSystem(
+          cluster.getConfiguration(0), WebHdfs.SCHEME);
+      webhdfs.create(semicolon).close();
+      webhdfs.create(plus).close();
+      webhdfs.create(percent).close();
+
+      final DistributedFileSystem dfs = cluster.getFileSystem();
+      assertEquals(semicolon.getName(),
+          dfs.getFileStatus(semicolon).getPath().getName());
+      assertEquals(plus.getName(),
+          dfs.getFileStatus(plus).getPath().getName());
+      assertEquals(percent.getName(),
+          dfs.getFileStatus(percent).getPath().getName());
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org