You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by sz...@apache.org on 2011/11/08 20:25:57 UTC

svn commit: r1199396 - in /hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: ./ src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/ src/main/java/org/apache/hadoop/hdfs/web/ src/main/java/org/apache/hadoop/hdfs/web/resources/

Author: szetszwo
Date: Tue Nov  8 19:25:57 2011
New Revision: 1199396

URL: http://svn.apache.org/viewvc?rev=1199396&view=rev
Log:
HDFS-2540. Webhdfs: change "Expect: 100-continue" to two-step write; change "HdfsFileStatus" and "localName" respectively to "FileStatus" and "pathSuffix" in JSON response.

Modified:
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1199396&r1=1199395&r2=1199396&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Tue Nov  8 19:25:57 2011
@@ -116,18 +116,6 @@ Release 0.23.1 - UNRELEASED
 
   BUG FIXES
 
-    HDFS-2416. distcp with a webhdfs uri on a secure cluster fails. (jitendra)
-
-    HDFS-2527. WebHdfs: remove the use of "Range" header in Open; use ugi
-    username if renewer parameter is null in GetDelegationToken; response OK
-    when setting replication for non-files; rename GETFILEBLOCKLOCATIONS to
-    GET_BLOCK_LOCATIONS and state that it is a private unstable API; replace
-    isDirectory and isSymlink with enum {FILE, DIRECTORY, SYMLINK} in
-    HdfsFileStatus JSON object.  (szetszwo)
-
-    HDFS-2528. Webhdfs: set delegation kind to WEBHDFS and add a HDFS token
-    when http requests are redirected to datanode.  (szetszwo)
-
 Release 0.23.0 - 2011-11-01 
 
   INCOMPATIBLE CHANGES
@@ -1284,6 +1272,22 @@ Release 0.23.0 - 2011-11-01 
     HDFS-2065. Add null checks in DFSClient.getFileChecksum(..).  (Uma
     Maheswara Rao G via szetszwo)
 
+    HDFS-2416. distcp with a webhdfs uri on a secure cluster fails. (jitendra)
+
+    HDFS-2527. WebHdfs: remove the use of "Range" header in Open; use ugi
+    username if renewer parameter is null in GetDelegationToken; response OK
+    when setting replication for non-files; rename GETFILEBLOCKLOCATIONS to
+    GET_BLOCK_LOCATIONS and state that it is a private unstable API; replace
+    isDirectory and isSymlink with enum {FILE, DIRECTORY, SYMLINK} in
+    HdfsFileStatus JSON object.  (szetszwo)
+
+    HDFS-2528. Webhdfs: set delegation kind to WEBHDFS and add a HDFS token
+    when http requests are redirected to datanode.  (szetszwo)
+
+    HDFS-2540. Webhdfs: change "Expect: 100-continue" to two-step write; change 
+    "HdfsFileStatus" and "localName" respectively to "FileStatus" and
+    "pathSuffix" in JSON response.  (szetszwo)
+
   BREAKDOWN OF HDFS-1073 SUBTASKS
 
     HDFS-1521. Persist transaction ID on disk between NN restarts.

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java?rev=1199396&r1=1199395&r2=1199396&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Tue Nov  8 19:25:57 2011
@@ -48,6 +48,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Options;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.DirectoryListing;
@@ -577,8 +578,8 @@ public class NamenodeWebHdfsMethods {
       @Override
       public void write(final OutputStream outstream) throws IOException {
         final PrintStream out = new PrintStream(outstream);
-        out.println("{\"" + HdfsFileStatus.class.getSimpleName() + "es\":{\""
-            + HdfsFileStatus.class.getSimpleName() + "\":[");
+        out.println("{\"" + FileStatus.class.getSimpleName() + "es\":{\""
+            + FileStatus.class.getSimpleName() + "\":[");
 
         final HdfsFileStatus[] partial = first.getPartialListing();
         if (partial.length > 0) {

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java?rev=1199396&r1=1199395&r2=1199396&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java Tue Nov  8 19:25:57 2011
@@ -28,6 +28,7 @@ import java.util.TreeMap;
 
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileChecksum;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DFSUtil;
@@ -149,7 +150,7 @@ public class JsonUtil {
       return null;
     }
     final Map<String, Object> m = new TreeMap<String, Object>();
-    m.put("localName", status.getLocalName());
+    m.put("pathSuffix", status.getLocalName());
     m.put("type", PathType.valueOf(status));
     if (status.isSymlink()) {
       m.put("symlink", status.getSymlink());
@@ -163,8 +164,7 @@ public class JsonUtil {
     m.put("modificationTime", status.getModificationTime());
     m.put("blockSize", status.getBlockSize());
     m.put("replication", status.getReplication());
-    return includeType ? toJsonString(HdfsFileStatus.class, m) : 
-      JSON.toString(m);
+    return includeType ? toJsonString(FileStatus.class, m): JSON.toString(m);
   }
 
   /** Convert a Json map to a HdfsFileStatus object. */
@@ -174,8 +174,8 @@ public class JsonUtil {
     }
 
     final Map<?, ?> m = includesType ? 
-        (Map<?, ?>)json.get(HdfsFileStatus.class.getSimpleName()) : json;
-    final String localName = (String) m.get("localName");
+        (Map<?, ?>)json.get(FileStatus.class.getSimpleName()) : json;
+    final String localName = (String) m.get("pathSuffix");
     final PathType type = PathType.valueOf((String) m.get("type"));
     final byte[] symlink = type != PathType.SYMLINK? null
         : DFSUtil.string2Bytes((String)m.get("symlink"));

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java?rev=1199396&r1=1199395&r2=1199396&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java Tue Nov  8 19:25:57 2011
@@ -334,14 +334,13 @@ public class WebHdfsFileSystem extends F
     final URL url = toUrl(op, fspath, parameters);
 
     //connect and get response
-    final HttpURLConnection conn = getHttpUrlConnection(url);
+    HttpURLConnection conn = getHttpUrlConnection(url);
     try {
       conn.setRequestMethod(op.getType().toString());
-      conn.setDoOutput(op.getDoOutput());
       if (op.getDoOutput()) {
-        conn.setRequestProperty("Expect", "100-Continue");
-        conn.setInstanceFollowRedirects(true);
+        conn = twoStepWrite(conn, op);
       }
+      conn.setDoOutput(op.getDoOutput());
       conn.connect();
       return conn;
     } catch (IOException e) {
@@ -349,6 +348,35 @@ public class WebHdfsFileSystem extends F
       throw e;
     }
   }
+  
+  /**
+   * Two-step Create/Append:
+   * Step 1) Submit a Http request with neither auto-redirect nor data. 
+   * Step 2) Submit Http PUT with the URL from the Location header with data.
+   * 
+   * The reason of having two-step create/append is for preventing clients to
+   * send out the data before the redirect. This issue is addressed by the
+   * "Expect: 100-continue" header in HTTP/1.1; see RFC 2616, Section 8.2.3.
+   * Unfortunately, there are software library bugs (e.g. Jetty 6 http server
+   * and Java 6 http client), which do not correctly implement "Expect:
+   * 100-continue". The two-step create/append is a temporary workaround for
+   * the software library bugs.
+   */
+  private static HttpURLConnection twoStepWrite(HttpURLConnection conn,
+      final HttpOpParam.Op op) throws IOException {
+    //Step 1) Submit a Http request with neither auto-redirect nor data. 
+    conn.setInstanceFollowRedirects(false);
+    conn.setDoOutput(false);
+    conn.connect();
+    validateResponse(HttpOpParam.TemporaryRedirectOp.valueOf(op), conn);
+    final String redirect = conn.getHeaderField("Location");
+    conn.disconnect();
+
+    //Step 2) Submit Http PUT with the URL from the Location header with data.
+    conn = (HttpURLConnection)new URL(redirect).openConnection();
+    conn.setRequestMethod(op.getType().toString());
+    return conn;
+  }
 
   /**
    * Run a http operation.
@@ -626,8 +654,8 @@ public class WebHdfsFileSystem extends F
 
     final HttpOpParam.Op op = GetOpParam.Op.LISTSTATUS;
     final Map<?, ?> json  = run(op, f);
-    final Map<?, ?> rootmap = (Map<?, ?>)json.get(HdfsFileStatus.class.getSimpleName() + "es");
-    final Object[] array = (Object[])rootmap.get(HdfsFileStatus.class.getSimpleName());
+    final Map<?, ?> rootmap = (Map<?, ?>)json.get(FileStatus.class.getSimpleName() + "es");
+    final Object[] array = (Object[])rootmap.get(FileStatus.class.getSimpleName());
 
     //convert FileStatus
     final FileStatus[] statuses = new FileStatus[array.length];

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java?rev=1199396&r1=1199395&r2=1199396&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java Tue Nov  8 19:25:57 2011
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.hdfs.web.resources;
 
+import javax.ws.rs.core.Response;
+
+
 /** Http operation parameter. */
 public abstract class HttpOpParam<E extends Enum<E> & HttpOpParam.Op>
     extends EnumParam<E> {
@@ -46,6 +49,49 @@ public abstract class HttpOpParam<E exte
     public String toQueryString();
   }
 
+  /** Expects HTTP response 307 "Temporary Redirect". */
+  public static class TemporaryRedirectOp implements Op {
+    static final TemporaryRedirectOp CREATE = new TemporaryRedirectOp(PutOpParam.Op.CREATE);
+    static final TemporaryRedirectOp APPEND = new TemporaryRedirectOp(PostOpParam.Op.APPEND);
+    
+    /** Get an object for the given op. */
+    public static TemporaryRedirectOp valueOf(final Op op) {
+      if (op == CREATE.op) {
+        return CREATE;
+      } if (op == APPEND.op) {
+        return APPEND;
+      }
+      throw new IllegalArgumentException(op + " not found.");
+    }
+
+    private final Op op;
+
+    private TemporaryRedirectOp(final Op op) {
+      this.op = op;
+    }
+
+    @Override
+    public Type getType() {
+      return op.getType();
+    }
+
+    @Override
+    public boolean getDoOutput() {
+      return op.getDoOutput();
+    }
+
+    /** Override the original expected response with "Temporary Redirect". */
+    @Override
+    public int getExpectedHttpResponseCode() {
+      return Response.Status.TEMPORARY_REDIRECT.getStatusCode();
+    }
+
+    @Override
+    public String toQueryString() {
+      return op.toQueryString();
+    }
+  }
+
   HttpOpParam(final Domain<E> domain, final E value) {
     super(domain, value);
   }