You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by tu...@apache.org on 2012/06/27 18:13:44 UTC

svn commit: r1354603 - in /hadoop/common/branches/branch-2/hadoop-hdfs-project: hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/ hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/ hadoop-hdfs-httpfs/src/main/java/org/a...

Author: tucu
Date: Wed Jun 27 16:13:35 2012
New Revision: 1354603

URL: http://svn.apache.org/viewvc?rev=1354603&view=rev
Log:
Merge -r 1354598:1354599 from trunk to branch. FIXES: HDFS-3113

Added:
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
      - copied unchanged from r1354599, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Parameters.java
      - copied unchanged from r1354599, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Parameters.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
      - copied unchanged from r1354599, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ParametersProvider.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestParam.java
      - copied unchanged from r1354599, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestParam.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/httpfs-log4j.properties
      - copied unchanged from r1354599, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/resources/httpfs-log4j.properties
Removed:
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParams.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestBooleanParam.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestByteParam.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestEnumParam.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestIntegerParam.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestLongParam.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestShortParam.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/lib/wsrs/TestStringParam.java
Modified:
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestCheckUploadContentTypeFilter.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java?rev=1354603&r1=1354602&r2=1354603&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java Wed Jun 27 16:13:35 2012
@@ -154,41 +154,33 @@ public class HttpFSFileSystem extends Fi
 
   public static final int HTTP_TEMPORARY_REDIRECT = 307;
 
+  private static final String HTTP_GET = "GET";
+  private static final String HTTP_PUT = "PUT";
+  private static final String HTTP_POST = "POST";
+  private static final String HTTP_DELETE = "DELETE";
 
-  /**
-   * Get operations.
-   */
-  public enum GetOpValues {
-    OPEN, GETFILESTATUS, LISTSTATUS, GETHOMEDIRECTORY, GETCONTENTSUMMARY, GETFILECHECKSUM,
-    GETDELEGATIONTOKEN, GETFILEBLOCKLOCATIONS, INSTRUMENTATION
-  }
+  public enum Operation {
+    OPEN(HTTP_GET), GETFILESTATUS(HTTP_GET), LISTSTATUS(HTTP_GET),
+    GETHOMEDIRECTORY(HTTP_GET), GETCONTENTSUMMARY(HTTP_GET),
+    GETFILECHECKSUM(HTTP_GET),  GETFILEBLOCKLOCATIONS(HTTP_GET),
+    INSTRUMENTATION(HTTP_GET),
+    APPEND(HTTP_POST),
+    CREATE(HTTP_PUT), MKDIRS(HTTP_PUT), RENAME(HTTP_PUT), SETOWNER(HTTP_PUT),
+    SETPERMISSION(HTTP_PUT), SETREPLICATION(HTTP_PUT), SETTIMES(HTTP_PUT),
+    DELETE(HTTP_DELETE);
 
-  /**
-   * Post operations.
-   */
-  public static enum PostOpValues {
-    APPEND
-  }
+    private String httpMethod;
 
-  /**
-   * Put operations.
-   */
-  public static enum PutOpValues {
-    CREATE, MKDIRS, RENAME, SETOWNER, SETPERMISSION, SETREPLICATION, SETTIMES,
-    RENEWDELEGATIONTOKEN, CANCELDELEGATIONTOKEN
-  }
+    Operation(String httpMethod) {
+      this.httpMethod = httpMethod;
+    }
+
+    public String getMethod() {
+      return httpMethod;
+    }
 
-  /**
-   * Delete operations.
-   */
-  public static enum DeleteOpValues {
-    DELETE
   }
 
-  private static final String HTTP_GET = "GET";
-  private static final String HTTP_PUT = "PUT";
-  private static final String HTTP_POST = "POST";
-  private static final String HTTP_DELETE = "DELETE";
 
   private AuthenticatedURL.Token authToken = new AuthenticatedURL.Token();
   private URI uri;
@@ -402,10 +394,12 @@ public class HttpFSFileSystem extends Fi
   @Override
   public FSDataInputStream open(Path f, int bufferSize) throws IOException {
     Map<String, String> params = new HashMap<String, String>();
-    params.put(OP_PARAM, GetOpValues.OPEN.toString());
-    HttpURLConnection conn = getConnection(HTTP_GET, params, f, true);
+    params.put(OP_PARAM, Operation.OPEN.toString());
+    HttpURLConnection conn = getConnection(Operation.OPEN.getMethod(), params,
+                                           f, true);
     validateResponse(conn, HttpURLConnection.HTTP_OK);
-    return new FSDataInputStream(new HttpFSDataInputStream(conn.getInputStream(), bufferSize));
+    return new FSDataInputStream(
+      new HttpFSDataInputStream(conn.getInputStream(), bufferSize));
   }
 
   /**
@@ -508,15 +502,18 @@ public class HttpFSFileSystem extends Fi
    * @see #setPermission(Path, FsPermission)
    */
   @Override
-  public FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, int bufferSize,
-                                   short replication, long blockSize, Progressable progress) throws IOException {
+  public FSDataOutputStream create(Path f, FsPermission permission,
+                                   boolean overwrite, int bufferSize,
+                                   short replication, long blockSize,
+                                   Progressable progress) throws IOException {
     Map<String, String> params = new HashMap<String, String>();
-    params.put(OP_PARAM, PutOpValues.CREATE.toString());
+    params.put(OP_PARAM, Operation.CREATE.toString());
     params.put(OVERWRITE_PARAM, Boolean.toString(overwrite));
     params.put(REPLICATION_PARAM, Short.toString(replication));
     params.put(BLOCKSIZE_PARAM, Long.toString(blockSize));
     params.put(PERMISSION_PARAM, permissionToString(permission));
-    return uploadData(HTTP_PUT, f, params, bufferSize, HttpURLConnection.HTTP_CREATED);
+    return uploadData(Operation.CREATE.getMethod(), f, params, bufferSize,
+                      HttpURLConnection.HTTP_CREATED);
   }
 
 
@@ -532,10 +529,12 @@ public class HttpFSFileSystem extends Fi
    * @throws IOException
    */
   @Override
-  public FSDataOutputStream append(Path f, int bufferSize, Progressable progress) throws IOException {
+  public FSDataOutputStream append(Path f, int bufferSize,
+                                   Progressable progress) throws IOException {
     Map<String, String> params = new HashMap<String, String>();
-    params.put(OP_PARAM, PostOpValues.APPEND.toString());
-    return uploadData(HTTP_POST, f, params, bufferSize, HttpURLConnection.HTTP_OK);
+    params.put(OP_PARAM, Operation.APPEND.toString());
+    return uploadData(Operation.APPEND.getMethod(), f, params, bufferSize,
+                      HttpURLConnection.HTTP_OK);
   }
 
   /**
@@ -545,9 +544,10 @@ public class HttpFSFileSystem extends Fi
   @Override
   public boolean rename(Path src, Path dst) throws IOException {
     Map<String, String> params = new HashMap<String, String>();
-    params.put(OP_PARAM, PutOpValues.RENAME.toString());
+    params.put(OP_PARAM, Operation.RENAME.toString());
     params.put(DESTINATION_PARAM, dst.toString());
-    HttpURLConnection conn = getConnection(HTTP_PUT, params, src, true);
+    HttpURLConnection conn = getConnection(Operation.RENAME.getMethod(),
+                                           params, src, true);
     validateResponse(conn, HttpURLConnection.HTTP_OK);
     JSONObject json = (JSONObject) jsonParse(conn);
     return (Boolean) json.get(RENAME_JSON);
@@ -580,9 +580,10 @@ public class HttpFSFileSystem extends Fi
   @Override
   public boolean delete(Path f, boolean recursive) throws IOException {
     Map<String, String> params = new HashMap<String, String>();
-    params.put(OP_PARAM, DeleteOpValues.DELETE.toString());
+    params.put(OP_PARAM, Operation.DELETE.toString());
     params.put(RECURSIVE_PARAM, Boolean.toString(recursive));
-    HttpURLConnection conn = getConnection(HTTP_DELETE, params, f, true);
+    HttpURLConnection conn = getConnection(Operation.DELETE.getMethod(),
+                                           params, f, true);
     validateResponse(conn, HttpURLConnection.HTTP_OK);
     JSONObject json = (JSONObject) jsonParse(conn);
     return (Boolean) json.get(DELETE_JSON);
@@ -601,8 +602,9 @@ public class HttpFSFileSystem extends Fi
   @Override
   public FileStatus[] listStatus(Path f) throws IOException {
     Map<String, String> params = new HashMap<String, String>();
-    params.put(OP_PARAM, GetOpValues.LISTSTATUS.toString());
-    HttpURLConnection conn = getConnection(HTTP_GET, params, f, true);
+    params.put(OP_PARAM, Operation.LISTSTATUS.toString());
+    HttpURLConnection conn = getConnection(Operation.LISTSTATUS.getMethod(),
+                                           params, f, true);
     validateResponse(conn, HttpURLConnection.HTTP_OK);
     JSONObject json = (JSONObject) jsonParse(conn);
     json = (JSONObject) json.get(FILE_STATUSES_JSON);
@@ -647,9 +649,10 @@ public class HttpFSFileSystem extends Fi
   @Override
   public boolean mkdirs(Path f, FsPermission permission) throws IOException {
     Map<String, String> params = new HashMap<String, String>();
-    params.put(OP_PARAM, PutOpValues.MKDIRS.toString());
+    params.put(OP_PARAM, Operation.MKDIRS.toString());
     params.put(PERMISSION_PARAM, permissionToString(permission));
-    HttpURLConnection conn = getConnection(HTTP_PUT, params, f, true);
+    HttpURLConnection conn = getConnection(Operation.MKDIRS.getMethod(),
+                                           params, f, true);
     validateResponse(conn, HttpURLConnection.HTTP_OK);
     JSONObject json = (JSONObject) jsonParse(conn);
     return (Boolean) json.get(MKDIRS_JSON);
@@ -668,8 +671,9 @@ public class HttpFSFileSystem extends Fi
   @Override
   public FileStatus getFileStatus(Path f) throws IOException {
     Map<String, String> params = new HashMap<String, String>();
-    params.put(OP_PARAM, GetOpValues.GETFILESTATUS.toString());
-    HttpURLConnection conn = getConnection(HTTP_GET, params, f, true);
+    params.put(OP_PARAM, Operation.GETFILESTATUS.toString());
+    HttpURLConnection conn = getConnection(Operation.GETFILESTATUS.getMethod(),
+                                           params, f, true);
     validateResponse(conn, HttpURLConnection.HTTP_OK);
     JSONObject json = (JSONObject) jsonParse(conn);
     json = (JSONObject) json.get(FILE_STATUS_JSON);
@@ -684,9 +688,11 @@ public class HttpFSFileSystem extends Fi
   @Override
   public Path getHomeDirectory() {
     Map<String, String> params = new HashMap<String, String>();
-    params.put(OP_PARAM, GetOpValues.GETHOMEDIRECTORY.toString());
+    params.put(OP_PARAM, Operation.GETHOMEDIRECTORY.toString());
     try {
-      HttpURLConnection conn = getConnection(HTTP_GET, params, new Path(getUri().toString(), "/"), false);
+      HttpURLConnection conn =
+        getConnection(Operation.GETHOMEDIRECTORY.getMethod(), params,
+                      new Path(getUri().toString(), "/"), false);
       validateResponse(conn, HttpURLConnection.HTTP_OK);
       JSONObject json = (JSONObject) jsonParse(conn);
       return new Path((String) json.get(HOME_DIR_JSON));
@@ -704,12 +710,14 @@ public class HttpFSFileSystem extends Fi
    * @param groupname If it is null, the original groupname remains unchanged.
    */
   @Override
-  public void setOwner(Path p, String username, String groupname) throws IOException {
+  public void setOwner(Path p, String username, String groupname)
+    throws IOException {
     Map<String, String> params = new HashMap<String, String>();
-    params.put(OP_PARAM, PutOpValues.SETOWNER.toString());
+    params.put(OP_PARAM, Operation.SETOWNER.toString());
     params.put(OWNER_PARAM, username);
     params.put(GROUP_PARAM, groupname);
-    HttpURLConnection conn = getConnection(HTTP_PUT, params, p, true);
+    HttpURLConnection conn = getConnection(Operation.SETOWNER.getMethod(),
+                                           params, p, true);
     validateResponse(conn, HttpURLConnection.HTTP_OK);
   }
 
@@ -722,9 +730,9 @@ public class HttpFSFileSystem extends Fi
   @Override
   public void setPermission(Path p, FsPermission permission) throws IOException {
     Map<String, String> params = new HashMap<String, String>();
-    params.put(OP_PARAM, PutOpValues.SETPERMISSION.toString());
+    params.put(OP_PARAM, Operation.SETPERMISSION.toString());
     params.put(PERMISSION_PARAM, permissionToString(permission));
-    HttpURLConnection conn = getConnection(HTTP_PUT, params, p, true);
+    HttpURLConnection conn = getConnection(Operation.SETPERMISSION.getMethod(), params, p, true);
     validateResponse(conn, HttpURLConnection.HTTP_OK);
   }
 
@@ -742,10 +750,11 @@ public class HttpFSFileSystem extends Fi
   @Override
   public void setTimes(Path p, long mtime, long atime) throws IOException {
     Map<String, String> params = new HashMap<String, String>();
-    params.put(OP_PARAM, PutOpValues.SETTIMES.toString());
+    params.put(OP_PARAM, Operation.SETTIMES.toString());
     params.put(MODIFICATION_TIME_PARAM, Long.toString(mtime));
     params.put(ACCESS_TIME_PARAM, Long.toString(atime));
-    HttpURLConnection conn = getConnection(HTTP_PUT, params, p, true);
+    HttpURLConnection conn = getConnection(Operation.SETTIMES.getMethod(),
+                                           params, p, true);
     validateResponse(conn, HttpURLConnection.HTTP_OK);
   }
 
@@ -761,11 +770,13 @@ public class HttpFSFileSystem extends Fi
    * @throws IOException
    */
   @Override
-  public boolean setReplication(Path src, short replication) throws IOException {
+  public boolean setReplication(Path src, short replication)
+    throws IOException {
     Map<String, String> params = new HashMap<String, String>();
-    params.put(OP_PARAM, PutOpValues.SETREPLICATION.toString());
+    params.put(OP_PARAM, Operation.SETREPLICATION.toString());
     params.put(REPLICATION_PARAM, Short.toString(replication));
-    HttpURLConnection conn = getConnection(HTTP_PUT, params, src, true);
+    HttpURLConnection conn =
+      getConnection(Operation.SETREPLICATION.getMethod(), params, src, true);
     validateResponse(conn, HttpURLConnection.HTTP_OK);
     JSONObject json = (JSONObject) jsonParse(conn);
     return (Boolean) json.get(SET_REPLICATION_JSON);
@@ -814,10 +825,12 @@ public class HttpFSFileSystem extends Fi
   @Override
   public ContentSummary getContentSummary(Path f) throws IOException {
     Map<String, String> params = new HashMap<String, String>();
-    params.put(OP_PARAM, GetOpValues.GETCONTENTSUMMARY.toString());
-    HttpURLConnection conn = getConnection(HTTP_GET, params, f, true);
+    params.put(OP_PARAM, Operation.GETCONTENTSUMMARY.toString());
+    HttpURLConnection conn =
+      getConnection(Operation.GETCONTENTSUMMARY.getMethod(), params, f, true);
     validateResponse(conn, HttpURLConnection.HTTP_OK);
-    JSONObject json = (JSONObject) ((JSONObject) jsonParse(conn)).get(CONTENT_SUMMARY_JSON);
+    JSONObject json =
+      (JSONObject) ((JSONObject) jsonParse(conn)).get(CONTENT_SUMMARY_JSON);
     return new ContentSummary((Long) json.get(CONTENT_SUMMARY_LENGTH_JSON),
                               (Long) json.get(CONTENT_SUMMARY_FILE_COUNT_JSON),
                               (Long) json.get(CONTENT_SUMMARY_DIRECTORY_COUNT_JSON),
@@ -830,10 +843,12 @@ public class HttpFSFileSystem extends Fi
   @Override
   public FileChecksum getFileChecksum(Path f) throws IOException {
     Map<String, String> params = new HashMap<String, String>();
-    params.put(OP_PARAM, GetOpValues.GETFILECHECKSUM.toString());
-    HttpURLConnection conn = getConnection(HTTP_GET, params, f, true);
+    params.put(OP_PARAM, Operation.GETFILECHECKSUM.toString());
+    HttpURLConnection conn =
+      getConnection(Operation.GETFILECHECKSUM.getMethod(), params, f, true);
     validateResponse(conn, HttpURLConnection.HTTP_OK);
-    final JSONObject json = (JSONObject) ((JSONObject) jsonParse(conn)).get(FILE_CHECKSUM_JSON);
+    final JSONObject json =
+      (JSONObject) ((JSONObject) jsonParse(conn)).get(FILE_CHECKSUM_JSON);
     return new FileChecksum() {
       @Override
       public String getAlgorithmName() {

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java?rev=1354603&r1=1354602&r2=1354603&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/CheckUploadContentTypeFilter.java Wed Jun 27 16:13:35 2012
@@ -30,7 +30,6 @@ import javax.servlet.ServletResponse;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import java.io.IOException;
-import java.net.InetAddress;
 import java.util.HashSet;
 import java.util.Set;
 
@@ -43,8 +42,8 @@ public class CheckUploadContentTypeFilte
   private static final Set<String> UPLOAD_OPERATIONS = new HashSet<String>();
 
   static {
-    UPLOAD_OPERATIONS.add(HttpFSFileSystem.PostOpValues.APPEND.toString());
-    UPLOAD_OPERATIONS.add(HttpFSFileSystem.PutOpValues.CREATE.toString());
+    UPLOAD_OPERATIONS.add(HttpFSFileSystem.Operation.APPEND.toString());
+    UPLOAD_OPERATIONS.add(HttpFSFileSystem.Operation.CREATE.toString());
   }
 
   /**
@@ -82,7 +81,7 @@ public class CheckUploadContentTypeFilte
     if (method.equals("PUT") || method.equals("POST")) {
       String op = httpReq.getParameter(HttpFSFileSystem.OP_PARAM);
       if (op != null && UPLOAD_OPERATIONS.contains(op.toUpperCase())) {
-        if ("true".equalsIgnoreCase(httpReq.getParameter(HttpFSParams.DataParam.NAME))) {
+        if ("true".equalsIgnoreCase(httpReq.getParameter(HttpFSParametersProvider.DataParam.NAME))) {
           String contentType = httpReq.getContentType();
           contentTypeOK =
             HttpFSFileSystem.UPLOAD_CONTENT_TYPE.equalsIgnoreCase(contentType);

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java?rev=1354603&r1=1354602&r2=1354603&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSExceptionProvider.java Wed Jun 27 16:13:35 2012
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.fs.http.server;
 
+import com.sun.jersey.api.container.ContainerException;
 import org.apache.hadoop.lib.service.FileSystemAccessException;
 import org.apache.hadoop.lib.wsrs.ExceptionProvider;
 import org.slf4j.Logger;
@@ -59,6 +60,9 @@ public class HttpFSExceptionProvider ext
     if (throwable instanceof FileSystemAccessException) {
       throwable = throwable.getCause();
     }
+    if (throwable instanceof ContainerException) {
+      throwable = throwable.getCause();
+    }
     if (throwable instanceof SecurityException) {
       status = Response.Status.UNAUTHORIZED;
     } else if (throwable instanceof FileNotFoundException) {
@@ -67,6 +71,8 @@ public class HttpFSExceptionProvider ext
       status = Response.Status.INTERNAL_SERVER_ERROR;
     } else if (throwable instanceof UnsupportedOperationException) {
       status = Response.Status.BAD_REQUEST;
+    } else if (throwable instanceof IllegalArgumentException) {
+      status = Response.Status.BAD_REQUEST;
     } else {
       status = Response.Status.INTERNAL_SERVER_ERROR;
     }

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java?rev=1354603&r1=1354602&r2=1354603&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java Wed Jun 27 16:13:35 2012
@@ -21,26 +21,22 @@ package org.apache.hadoop.fs.http.server
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
-import org.apache.hadoop.fs.http.server.HttpFSParams.AccessTimeParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.BlockSizeParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.DataParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.DeleteOpParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.DeleteRecursiveParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.DoAsParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.FilterParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.FsPathParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.GetOpParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.GroupParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.LenParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.ModifiedTimeParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.OffsetParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.OverwriteParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.OwnerParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.PermissionParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.PostOpParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.PutOpParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.ReplicationParam;
-import org.apache.hadoop.fs.http.server.HttpFSParams.ToPathParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.OperationParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.AccessTimeParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.BlockSizeParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.DataParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.RecursiveParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.DoAsParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.FilterParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.GroupParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.LenParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.ModifiedTimeParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.OffsetParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.OverwriteParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.OwnerParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.PermissionParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.ReplicationParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.DestinationParam;
 import org.apache.hadoop.lib.service.FileSystemAccess;
 import org.apache.hadoop.lib.service.FileSystemAccessException;
 import org.apache.hadoop.lib.service.Groups;
@@ -49,6 +45,7 @@ import org.apache.hadoop.lib.service.Pro
 import org.apache.hadoop.lib.servlet.FileSystemReleaseFilter;
 import org.apache.hadoop.lib.servlet.HostnameFilter;
 import org.apache.hadoop.lib.wsrs.InputStreamEntity;
+import org.apache.hadoop.lib.wsrs.Parameters;
 import org.apache.hadoop.security.authentication.server.AuthenticationToken;
 import org.json.simple.JSONObject;
 import org.slf4j.Logger;
@@ -57,7 +54,6 @@ import org.slf4j.MDC;
 
 import javax.ws.rs.Consumes;
 import javax.ws.rs.DELETE;
-import javax.ws.rs.DefaultValue;
 import javax.ws.rs.GET;
 import javax.ws.rs.POST;
 import javax.ws.rs.PUT;
@@ -90,39 +86,6 @@ public class HttpFSServer {
   private static Logger AUDIT_LOG = LoggerFactory.getLogger("httpfsaudit");
 
   /**
-   * Special binding for '/' as it is not handled by the wildcard binding.
-   *
-   * @param user principal making the request.
-   * @param op GET operation, default value is @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.GetOpValues#OPEN}.
-   * @param filter Glob filter, default value is none. Used only if the
-   * operation is @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.GetOpValues#LISTSTATUS}
-   * @param doAs user being impersonated, defualt value is none. It can be used
-   * only if the current user is a HttpFSServer proxyuser.
-   *
-   * @return the request response
-   *
-   * @throws IOException thrown if an IO error occurred. Thrown exceptions are
-   * handled by {@link HttpFSExceptionProvider}.
-   * @throws FileSystemAccessException thrown if a FileSystemAccess releated error occurred. Thrown
-   * exceptions are handled by {@link HttpFSExceptionProvider}.
-   */
-  @GET
-  @Path("/")
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response root(@Context Principal user,
-                       @QueryParam(GetOpParam.NAME) GetOpParam op,
-                       @QueryParam(FilterParam.NAME) @DefaultValue(FilterParam.DEFAULT) FilterParam filter,
-                       @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) DoAsParam doAs)
-    throws IOException, FileSystemAccessException {
-    return get(user, new FsPathParam(""), op, new OffsetParam(OffsetParam.DEFAULT),
-               new LenParam(LenParam.DEFAULT), filter, doAs,
-               new OverwriteParam(OverwriteParam.DEFAULT),
-               new BlockSizeParam(BlockSizeParam.DEFAULT),
-               new PermissionParam(PermissionParam.DEFAULT),
-               new ReplicationParam(ReplicationParam.DEFAULT));
-  }
-
-  /**
    * Resolves the effective user that will be used to request a FileSystemAccess filesystem.
    * <p/>
    * If the doAs-user is NULL or the same as the user, it returns the user.
@@ -207,402 +170,405 @@ public class HttpFSServer {
     return fs;
   }
 
+  private void enforceRootPath(HttpFSFileSystem.Operation op, String path) {
+    if (!path.equals("/")) {
+      throw new UnsupportedOperationException(
+        MessageFormat.format("Operation [{0}], invalid path [{1}], must be '/'",
+                             op, path));
+    }
+  }
+
   /**
-   * Binding to handle all GET requests, supported operations are
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.GetOpValues}.
-   * <p/>
-   * The @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.GetOpValues#INSTRUMENTATION} operation is available only
-   * to users that are in HttpFSServer's admin group (see {@link HttpFSServer}. It returns
-   * HttpFSServer instrumentation data. The specified path must be '/'.
+   * Special binding for '/' as it is not handled by the wildcard binding.
    *
-   * @param user principal making the request.
-   * @param path path for the GET request.
-   * @param op GET operation, default value is @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.GetOpValues#OPEN}.
-   * @param offset of the  file being fetch, used only with
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.GetOpValues#OPEN} operations.
-   * @param len amounts of bytes, used only with @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.GetOpValues#OPEN}
-   * operations.
-   * @param filter Glob filter, default value is none. Used only if the
-   * operation is @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.GetOpValues#LISTSTATUS}
-   * @param doAs user being impersonated, defualt value is none. It can be used
-   * only if the current user is a HttpFSServer proxyuser.
-   * @param override default is true. Used only for
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PutOpValues#CREATE} operations.
-   * @param blockSize block size to set, used only by
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PutOpValues#CREATE} operations.
-   * @param permission permission to set, used only by
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PutOpValues#SETPERMISSION}.
-   * @param replication replication factor to set, used only by
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PutOpValues#SETREPLICATION}.
+   * @param user the principal of the user making the request.
+   * @param op the HttpFS operation of the request.
+   * @param params the HttpFS parameters of the request.
    *
    * @return the request response.
    *
    * @throws IOException thrown if an IO error occurred. Thrown exceptions are
    * handled by {@link HttpFSExceptionProvider}.
-   * @throws FileSystemAccessException thrown if a FileSystemAccess releated error occurred. Thrown
-   * exceptions are handled by {@link HttpFSExceptionProvider}.
+   * @throws FileSystemAccessException thrown if a FileSystemAccess releated
+   * error occurred. Thrown exceptions are handled by
+   * {@link HttpFSExceptionProvider}.
    */
   @GET
-  @Path("{path:.*}")
-  @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON})
-  public Response get(@Context Principal user,
-                      @PathParam("path") @DefaultValue("") FsPathParam path,
-                      @QueryParam(GetOpParam.NAME) GetOpParam op,
-                      @QueryParam(OffsetParam.NAME) @DefaultValue(OffsetParam.DEFAULT) OffsetParam offset,
-                      @QueryParam(LenParam.NAME) @DefaultValue(LenParam.DEFAULT) LenParam len,
-                      @QueryParam(FilterParam.NAME) @DefaultValue(FilterParam.DEFAULT) FilterParam filter,
-                      @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) DoAsParam doAs,
-
-                      //these params are only for createHandle operation acceptance purposes
-                      @QueryParam(OverwriteParam.NAME) @DefaultValue(OverwriteParam.DEFAULT) OverwriteParam override,
-                      @QueryParam(BlockSizeParam.NAME) @DefaultValue(BlockSizeParam.DEFAULT) BlockSizeParam blockSize,
-                      @QueryParam(PermissionParam.NAME) @DefaultValue(PermissionParam.DEFAULT)
-                      PermissionParam permission,
-                      @QueryParam(ReplicationParam.NAME) @DefaultValue(ReplicationParam.DEFAULT)
-                      ReplicationParam replication
-  )
+  @Path("/")
+  @Produces(MediaType.APPLICATION_JSON)
+  public Response getRoot(@Context Principal user,
+                          @QueryParam(OperationParam.NAME) OperationParam op,
+                          @Context Parameters params)
     throws IOException, FileSystemAccessException {
-    Response response = null;
-    if (op == null) {
-      throw new UnsupportedOperationException(MessageFormat.format("Missing [{0}] parameter", GetOpParam.NAME));
-    } else {
-      path.makeAbsolute();
-      MDC.put(HttpFSFileSystem.OP_PARAM, op.value().name());
-      switch (op.value()) {
-        case OPEN: {
-          //Invoking the command directly using an unmanaged FileSystem that is released by the
-          //FileSystemReleaseFilter
-          FSOperations.FSOpen command = new FSOperations.FSOpen(path.value());
-          FileSystem fs = createFileSystem(user, doAs.value());
-          InputStream is = command.execute(fs);
-          AUDIT_LOG.info("[{}] offset [{}] len [{}]", new Object[]{path, offset, len});
-          InputStreamEntity entity = new InputStreamEntity(is, offset.value(), len.value());
-          response = Response.ok(entity).type(MediaType.APPLICATION_OCTET_STREAM).build();
-          break;
-        }
-        case GETFILESTATUS: {
-          FSOperations.FSFileStatus command = new FSOperations.FSFileStatus(path.value());
-          Map json = fsExecute(user, doAs.value(), command);
-          AUDIT_LOG.info("[{}]", path);
-          response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
-          break;
-        }
-        case LISTSTATUS: {
-          FSOperations.FSListStatus command = new FSOperations.FSListStatus(path.value(), filter.value());
-          Map json = fsExecute(user, doAs.value(), command);
-          if (filter.value() == null) {
-            AUDIT_LOG.info("[{}]", path);
-          } else {
-            AUDIT_LOG.info("[{}] filter [{}]", path, filter.value());
-          }
-          response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
-          break;
-        }
-        case GETHOMEDIRECTORY: {
-          FSOperations.FSHomeDir command = new FSOperations.FSHomeDir();
-          JSONObject json = fsExecute(user, doAs.value(), command);
-          AUDIT_LOG.info("");
-          response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
-          break;
-        }
-        case INSTRUMENTATION: {
-          if (!path.value().equals("/")) {
-            throw new UnsupportedOperationException(
-              MessageFormat.format("Invalid path for {0}={1}, must be '/'",
-                                   GetOpParam.NAME, HttpFSFileSystem.GetOpValues.INSTRUMENTATION));
-          }
-          Groups groups = HttpFSServerWebApp.get().get(Groups.class);
-          List<String> userGroups = groups.getGroups(user.getName());
-          if (!userGroups.contains(HttpFSServerWebApp.get().getAdminGroup())) {
-            throw new AccessControlException("User not in HttpFSServer admin group");
-          }
-          Instrumentation instrumentation = HttpFSServerWebApp.get().get(Instrumentation.class);
-          Map snapshot = instrumentation.getSnapshot();
-          response = Response.ok(snapshot).build();
-          break;
-        }
-        case GETCONTENTSUMMARY: {
-          FSOperations.FSContentSummary command = new FSOperations.FSContentSummary(path.value());
-          Map json = fsExecute(user, doAs.value(), command);
-          AUDIT_LOG.info("[{}]", path);
-          response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
-          break;
-        }
-        case GETFILECHECKSUM: {
-          FSOperations.FSFileChecksum command = new FSOperations.FSFileChecksum(path.value());
-          Map json = fsExecute(user, doAs.value(), command);
-          AUDIT_LOG.info("[{}]", path);
-          response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
-          break;
-        }
-        case GETDELEGATIONTOKEN: {
-          response = Response.status(Response.Status.BAD_REQUEST).build();
-          break;
-        }
-        case GETFILEBLOCKLOCATIONS: {
-          response = Response.status(Response.Status.BAD_REQUEST).build();
-          break;
-        }
-      }
-      return response;
-    }
+    return get(user, "", op, params);
+  }
+
+  private String makeAbsolute(String path) {
+    return "/" + ((path != null) ? path : "");
   }
 
   /**
-   * Creates the URL for an upload operation (create or append).
+   * Binding to handle GET requests, supported operations are
    *
-   * @param uriInfo uri info of the request.
-   * @param uploadOperation operation for the upload URL.
+   * @param user the principal of the user making the request.
+   * @param path the path for operation.
+   * @param op the HttpFS operation of the request.
+   * @param params the HttpFS parameters of the request.
    *
-   * @return the URI for uploading data.
+   * @return the request response.
+   *
+   * @throws IOException thrown if an IO error occurred. Thrown exceptions are
+   * handled by {@link HttpFSExceptionProvider}.
+   * @throws FileSystemAccessException thrown if a FileSystemAccess releated
+   * error occurred. Thrown exceptions are handled by
+   * {@link HttpFSExceptionProvider}.
    */
-  protected URI createUploadRedirectionURL(UriInfo uriInfo, Enum<?> uploadOperation) {
-    UriBuilder uriBuilder = uriInfo.getRequestUriBuilder();
-    uriBuilder = uriBuilder.replaceQueryParam(PutOpParam.NAME, uploadOperation).
-      queryParam(DataParam.NAME, Boolean.TRUE);
-    return uriBuilder.build(null);
+  @GET
+  @Path("{path:.*}")
+  @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON})
+  public Response get(@Context Principal user,
+                      @PathParam("path") String path,
+                      @QueryParam(OperationParam.NAME) OperationParam op,
+                      @Context Parameters params)
+    throws IOException, FileSystemAccessException {
+    Response response;
+    path = makeAbsolute(path);
+    MDC.put(HttpFSFileSystem.OP_PARAM, op.value().name());
+    String doAs = params.get(DoAsParam.NAME, DoAsParam.class);
+    switch (op.value()) {
+      case OPEN: {
+        //Invoking the command directly using an unmanaged FileSystem that is
+        // released by the FileSystemReleaseFilter
+        FSOperations.FSOpen command = new FSOperations.FSOpen(path);
+        FileSystem fs = createFileSystem(user, doAs);
+        InputStream is = command.execute(fs);
+        Long offset = params.get(OffsetParam.NAME, OffsetParam.class);
+        Long len = params.get(LenParam.NAME, LenParam.class);
+        AUDIT_LOG.info("[{}] offset [{}] len [{}]",
+                       new Object[]{path, offset, len});
+        InputStreamEntity entity = new InputStreamEntity(is, offset, len);
+        response =
+          Response.ok(entity).type(MediaType.APPLICATION_OCTET_STREAM).build();
+        break;
+      }
+      case GETFILESTATUS: {
+        FSOperations.FSFileStatus command =
+          new FSOperations.FSFileStatus(path);
+        Map json = fsExecute(user, doAs, command);
+        AUDIT_LOG.info("[{}]", path);
+        response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
+        break;
+      }
+      case LISTSTATUS: {
+        String filter = params.get(FilterParam.NAME, FilterParam.class);
+        FSOperations.FSListStatus command = new FSOperations.FSListStatus(
+          path, filter);
+        Map json = fsExecute(user, doAs, command);
+        AUDIT_LOG.info("[{}] filter [{}]", path,
+                       (filter != null) ? filter : "-");
+        response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
+        break;
+      }
+      case GETHOMEDIRECTORY: {
+        enforceRootPath(op.value(), path);
+        FSOperations.FSHomeDir command = new FSOperations.FSHomeDir();
+        JSONObject json = fsExecute(user, doAs, command);
+        AUDIT_LOG.info("");
+        response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
+        break;
+      }
+      case INSTRUMENTATION: {
+        enforceRootPath(op.value(), path);
+        Groups groups = HttpFSServerWebApp.get().get(Groups.class);
+        List<String> userGroups = groups.getGroups(user.getName());
+        if (!userGroups.contains(HttpFSServerWebApp.get().getAdminGroup())) {
+          throw new AccessControlException(
+            "User not in HttpFSServer admin group");
+        }
+        Instrumentation instrumentation =
+          HttpFSServerWebApp.get().get(Instrumentation.class);
+        Map snapshot = instrumentation.getSnapshot();
+        response = Response.ok(snapshot).build();
+        break;
+      }
+      case GETCONTENTSUMMARY: {
+        FSOperations.FSContentSummary command =
+          new FSOperations.FSContentSummary(path);
+        Map json = fsExecute(user, doAs, command);
+        AUDIT_LOG.info("[{}]", path);
+        response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
+        break;
+      }
+      case GETFILECHECKSUM: {
+        FSOperations.FSFileChecksum command =
+          new FSOperations.FSFileChecksum(path);
+        Map json = fsExecute(user, doAs, command);
+        AUDIT_LOG.info("[{}]", path);
+        response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
+        break;
+      }
+      case GETFILEBLOCKLOCATIONS: {
+        response = Response.status(Response.Status.BAD_REQUEST).build();
+        break;
+      }
+      default: {
+        throw new IOException(
+          MessageFormat.format("Invalid HTTP GET operation [{0}]",
+                               op.value()));
+      }
+    }
+    return response;
   }
 
+
   /**
-   * Binding to handle all DELETE requests.
+   * Binding to handle DELETE requests.
    *
-   * @param user principal making the request.
-   * @param path path for the DELETE request.
-   * @param op DELETE operation, default value is @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.DeleteOpValues#DELETE}.
-   * @param recursive indicates if the delete is recursive, default is <code>false</code>
-   * @param doAs user being impersonated, defualt value is none. It can be used
-   * only if the current user is a HttpFSServer proxyuser.
+   * @param user the principal of the user making the request.
+   * @param path the path for operation.
+   * @param op the HttpFS operation of the request.
+   * @param params the HttpFS parameters of the request.
    *
    * @return the request response.
    *
    * @throws IOException thrown if an IO error occurred. Thrown exceptions are
    * handled by {@link HttpFSExceptionProvider}.
-   * @throws FileSystemAccessException thrown if a FileSystemAccess releated error occurred. Thrown
-   * exceptions are handled by {@link HttpFSExceptionProvider}.
+   * @throws FileSystemAccessException thrown if a FileSystemAccess releated
+   * error occurred. Thrown exceptions are handled by
+   * {@link HttpFSExceptionProvider}.
    */
   @DELETE
   @Path("{path:.*}")
   @Produces(MediaType.APPLICATION_JSON)
   public Response delete(@Context Principal user,
-                         @PathParam("path") FsPathParam path,
-                         @QueryParam(DeleteOpParam.NAME) DeleteOpParam op,
-                         @QueryParam(DeleteRecursiveParam.NAME) @DefaultValue(DeleteRecursiveParam.DEFAULT)
-                         DeleteRecursiveParam recursive,
-                         @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) DoAsParam doAs)
+                      @PathParam("path") String path,
+                      @QueryParam(OperationParam.NAME) OperationParam op,
+                      @Context Parameters params)
     throws IOException, FileSystemAccessException {
-    Response response = null;
-    if (op == null) {
-      throw new UnsupportedOperationException(MessageFormat.format("Missing [{0}] parameter", DeleteOpParam.NAME));
-    }
+    Response response;
+    path = makeAbsolute(path);
+    MDC.put(HttpFSFileSystem.OP_PARAM, op.value().name());
+    String doAs = params.get(DoAsParam.NAME, DoAsParam.class);
     switch (op.value()) {
       case DELETE: {
-        path.makeAbsolute();
-        MDC.put(HttpFSFileSystem.OP_PARAM, "DELETE");
+        Boolean recursive =
+          params.get(RecursiveParam.NAME,  RecursiveParam.class);
         AUDIT_LOG.info("[{}] recursive [{}]", path, recursive);
-        FSOperations.FSDelete command = new FSOperations.FSDelete(path.value(), recursive.value());
-        JSONObject json = fsExecute(user, doAs.value(), command);
+        FSOperations.FSDelete command =
+          new FSOperations.FSDelete(path, recursive);
+        JSONObject json = fsExecute(user, doAs, command);
         response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
         break;
       }
+      default: {
+        throw new IOException(
+          MessageFormat.format("Invalid HTTP DELETE operation [{0}]",
+                               op.value()));
+      }
     }
     return response;
   }
 
+  /**
+   * Binding to handle POST requests.
+   *
+   * @param is the inputstream for the request payload.
+   * @param user the principal of the user making the request.
+   * @param uriInfo the of the request.
+   * @param path the path for operation.
+   * @param op the HttpFS operation of the request.
+   * @param params the HttpFS parameters of the request.
+   *
+   * @return the request response.
+   *
+   * @throws IOException thrown if an IO error occurred. Thrown exceptions are
+   * handled by {@link HttpFSExceptionProvider}.
+   * @throws FileSystemAccessException thrown if a FileSystemAccess releated
+   * error occurred. Thrown exceptions are handled by
+   * {@link HttpFSExceptionProvider}.
+   */
+  @POST
+  @Path("{path:.*}")
+  @Consumes({"*/*"})
+  @Produces({MediaType.APPLICATION_JSON})
+  public Response post(InputStream is,
+                       @Context Principal user,
+                       @Context UriInfo uriInfo,
+                       @PathParam("path") String path,
+                       @QueryParam(OperationParam.NAME) OperationParam op,
+                       @Context Parameters params)
+    throws IOException, FileSystemAccessException {
+    Response response;
+    path = makeAbsolute(path);
+    MDC.put(HttpFSFileSystem.OP_PARAM, op.value().name());
+    String doAs = params.get(DoAsParam.NAME, DoAsParam.class);
+    switch (op.value()) {
+      case APPEND: {
+        boolean hasData = params.get(DataParam.NAME, DataParam.class);
+        if (!hasData) {
+          response = Response.temporaryRedirect(
+            createUploadRedirectionURL(uriInfo,
+              HttpFSFileSystem.Operation.APPEND)).build();
+        } else {
+          FSOperations.FSAppend command =
+            new FSOperations.FSAppend(is, path);
+          fsExecute(user, doAs, command);
+          AUDIT_LOG.info("[{}]", path);
+          response = Response.ok().type(MediaType.APPLICATION_JSON).build();
+        }
+        break;
+      }
+      default: {
+        throw new IOException(
+          MessageFormat.format("Invalid HTTP POST operation [{0}]",
+                               op.value()));
+      }
+    }
+    return response;
+  }
 
   /**
-   * Binding to handle all PUT requests, supported operations are
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PutOpValues}.
+   * Creates the URL for an upload operation (create or append).
    *
-   * @param is request input stream, used only for
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PostOpValues#APPEND} operations.
-   * @param user principal making the request.
-   * @param uriInfo the request uriInfo.
-   * @param path path for the PUT request.
-   * @param op PUT operation, no default value.
-   * @param toPath new path, used only for
-   * {@link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PutOpValues#RENAME} operations.
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PutOpValues#SETTIMES}.
-   * @param owner owner to set, used only for
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PutOpValues#SETOWNER} operations.
-   * @param group group to set, used only for
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PutOpValues#SETOWNER} operations.
-   * @param override default is true. Used only for
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PutOpValues#CREATE} operations.
-   * @param blockSize block size to set, used only by
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PutOpValues#CREATE} operations.
-   * @param permission permission to set, used only by
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PutOpValues#SETPERMISSION}.
-   * @param replication replication factor to set, used only by
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PutOpValues#SETREPLICATION}.
-   * @param modifiedTime modified time, in seconds since EPOC, used only by
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PutOpValues#SETTIMES}.
-   * @param accessTime accessed time, in seconds since EPOC, used only by
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PutOpValues#SETTIMES}.
-   * @param hasData indicates if the append request is uploading data or not
-   * (just getting the handle).
-   * @param doAs user being impersonated, defualt value is none. It can be used
-   * only if the current user is a HttpFSServer proxyuser.
+   * @param uriInfo uri info of the request.
+   * @param uploadOperation operation for the upload URL.
+   *
+   * @return the URI for uploading data.
+   */
+  protected URI createUploadRedirectionURL(UriInfo uriInfo, Enum<?> uploadOperation) {
+    UriBuilder uriBuilder = uriInfo.getRequestUriBuilder();
+    uriBuilder = uriBuilder.replaceQueryParam(OperationParam.NAME, uploadOperation).
+      queryParam(DataParam.NAME, Boolean.TRUE);
+    return uriBuilder.build(null);
+  }
+
+
+  /**
+   * Binding to handle PUT requests.
+   *
+   * @param is the inputstream for the request payload.
+   * @param user the principal of the user making the request.
+   * @param uriInfo the of the request.
+   * @param path the path for operation.
+   * @param op the HttpFS operation of the request.
+   * @param params the HttpFS parameters of the request.
    *
    * @return the request response.
    *
    * @throws IOException thrown if an IO error occurred. Thrown exceptions are
    * handled by {@link HttpFSExceptionProvider}.
-   * @throws FileSystemAccessException thrown if a FileSystemAccess releated error occurred. Thrown
-   * exceptions are handled by {@link HttpFSExceptionProvider}.
+   * @throws FileSystemAccessException thrown if a FileSystemAccess releated
+   * error occurred. Thrown exceptions are handled by
+   * {@link HttpFSExceptionProvider}.
    */
   @PUT
   @Path("{path:.*}")
   @Consumes({"*/*"})
   @Produces({MediaType.APPLICATION_JSON})
   public Response put(InputStream is,
-                      @Context Principal user,
-                      @Context UriInfo uriInfo,
-                      @PathParam("path") FsPathParam path,
-                      @QueryParam(PutOpParam.NAME) PutOpParam op,
-                      @QueryParam(ToPathParam.NAME) @DefaultValue(ToPathParam.DEFAULT) ToPathParam toPath,
-                      @QueryParam(OwnerParam.NAME) @DefaultValue(OwnerParam.DEFAULT) OwnerParam owner,
-                      @QueryParam(GroupParam.NAME) @DefaultValue(GroupParam.DEFAULT) GroupParam group,
-                      @QueryParam(OverwriteParam.NAME) @DefaultValue(OverwriteParam.DEFAULT) OverwriteParam override,
-                      @QueryParam(BlockSizeParam.NAME) @DefaultValue(BlockSizeParam.DEFAULT) BlockSizeParam blockSize,
-                      @QueryParam(PermissionParam.NAME) @DefaultValue(PermissionParam.DEFAULT)
-                      PermissionParam permission,
-                      @QueryParam(ReplicationParam.NAME) @DefaultValue(ReplicationParam.DEFAULT)
-                      ReplicationParam replication,
-                      @QueryParam(ModifiedTimeParam.NAME) @DefaultValue(ModifiedTimeParam.DEFAULT)
-                      ModifiedTimeParam modifiedTime,
-                      @QueryParam(AccessTimeParam.NAME) @DefaultValue(AccessTimeParam.DEFAULT)
-                      AccessTimeParam accessTime,
-                      @QueryParam(DataParam.NAME) @DefaultValue(DataParam.DEFAULT) DataParam hasData,
-                      @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) DoAsParam doAs)
+                       @Context Principal user,
+                       @Context UriInfo uriInfo,
+                       @PathParam("path") String path,
+                       @QueryParam(OperationParam.NAME) OperationParam op,
+                       @Context Parameters params)
     throws IOException, FileSystemAccessException {
-    Response response = null;
-    if (op == null) {
-      throw new UnsupportedOperationException(MessageFormat.format("Missing [{0}] parameter", PutOpParam.NAME));
-    }
-    path.makeAbsolute();
+    Response response;
+    path = makeAbsolute(path);
     MDC.put(HttpFSFileSystem.OP_PARAM, op.value().name());
+    String doAs = params.get(DoAsParam.NAME, DoAsParam.class);
     switch (op.value()) {
       case CREATE: {
-        if (!hasData.value()) {
+        boolean hasData = params.get(DataParam.NAME, DataParam.class);
+        if (!hasData) {
           response = Response.temporaryRedirect(
-            createUploadRedirectionURL(uriInfo, HttpFSFileSystem.PutOpValues.CREATE)).build();
+            createUploadRedirectionURL(uriInfo,
+              HttpFSFileSystem.Operation.CREATE)).build();
         } else {
-          FSOperations.FSCreate
-            command = new FSOperations.FSCreate(is, path.value(), permission.value(), override.value(),
-                                                replication.value(), blockSize.value());
-          fsExecute(user, doAs.value(), command);
-          AUDIT_LOG.info("[{}] permission [{}] override [{}] replication [{}] blockSize [{}]",
-                         new Object[]{path, permission, override, replication, blockSize});
+          String permission = params.get(PermissionParam.NAME,
+                                         PermissionParam.class);
+          boolean override = params.get(OverwriteParam.NAME,
+                                        OverwriteParam.class);
+          short replication = params.get(ReplicationParam.NAME,
+                                         ReplicationParam.class);
+          long blockSize = params.get(BlockSizeParam.NAME,
+                                      BlockSizeParam.class);
+          FSOperations.FSCreate command =
+            new FSOperations.FSCreate(is, path, permission, override,
+                                      replication, blockSize);
+          fsExecute(user, doAs, command);
+          AUDIT_LOG.info(
+            "[{}] permission [{}] override [{}] replication [{}] blockSize [{}]",
+            new Object[]{path, permission, override, replication, blockSize});
           response = Response.status(Response.Status.CREATED).build();
         }
         break;
       }
       case MKDIRS: {
-        FSOperations.FSMkdirs command = new FSOperations.FSMkdirs(path.value(), permission.value());
-        JSONObject json = fsExecute(user, doAs.value(), command);
-        AUDIT_LOG.info("[{}] permission [{}]", path, permission.value());
+        String permission = params.get(PermissionParam.NAME,
+                                       PermissionParam.class);
+        FSOperations.FSMkdirs command =
+          new FSOperations.FSMkdirs(path, permission);
+        JSONObject json = fsExecute(user, doAs, command);
+        AUDIT_LOG.info("[{}] permission [{}]", path, permission);
         response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
         break;
       }
       case RENAME: {
-        FSOperations.FSRename command = new FSOperations.FSRename(path.value(), toPath.value());
-        JSONObject json = fsExecute(user, doAs.value(), command);
+        String toPath = params.get(DestinationParam.NAME, DestinationParam.class);
+        FSOperations.FSRename command =
+          new FSOperations.FSRename(path, toPath);
+        JSONObject json = fsExecute(user, doAs, command);
         AUDIT_LOG.info("[{}] to [{}]", path, toPath);
         response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
         break;
       }
       case SETOWNER: {
-        FSOperations.FSSetOwner command = new FSOperations.FSSetOwner(path.value(), owner.value(), group.value());
-        fsExecute(user, doAs.value(), command);
-        AUDIT_LOG.info("[{}] to (O/G)[{}]", path, owner.value() + ":" + group.value());
+        String owner = params.get(OwnerParam.NAME, OwnerParam.class);
+        String group = params.get(GroupParam.NAME, GroupParam.class);
+        FSOperations.FSSetOwner command =
+          new FSOperations.FSSetOwner(path, owner, group);
+        fsExecute(user, doAs, command);
+        AUDIT_LOG.info("[{}] to (O/G)[{}]", path, owner + ":" + group);
         response = Response.ok().build();
         break;
       }
       case SETPERMISSION: {
-        FSOperations.FSSetPermission command = new FSOperations.FSSetPermission(path.value(), permission.value());
-        fsExecute(user, doAs.value(), command);
-        AUDIT_LOG.info("[{}] to [{}]", path, permission.value());
+        String permission = params.get(PermissionParam.NAME,
+                                       PermissionParam.class);
+        FSOperations.FSSetPermission command =
+          new FSOperations.FSSetPermission(path, permission);
+        fsExecute(user, doAs, command);
+        AUDIT_LOG.info("[{}] to [{}]", path, permission);
         response = Response.ok().build();
         break;
       }
       case SETREPLICATION: {
-        FSOperations.FSSetReplication command = new FSOperations.FSSetReplication(path.value(), replication.value());
-        JSONObject json = fsExecute(user, doAs.value(), command);
-        AUDIT_LOG.info("[{}] to [{}]", path, replication.value());
+        short replication = params.get(ReplicationParam.NAME,
+                                       ReplicationParam.class);
+        FSOperations.FSSetReplication command =
+          new FSOperations.FSSetReplication(path, replication);
+        JSONObject json = fsExecute(user, doAs, command);
+        AUDIT_LOG.info("[{}] to [{}]", path, replication);
         response = Response.ok(json).build();
         break;
       }
       case SETTIMES: {
-        FSOperations.FSSetTimes
-          command = new FSOperations.FSSetTimes(path.value(), modifiedTime.value(), accessTime.value());
-        fsExecute(user, doAs.value(), command);
-        AUDIT_LOG.info("[{}] to (M/A)[{}]", path, modifiedTime.value() + ":" + accessTime.value());
+        long modifiedTime = params.get(ModifiedTimeParam.NAME,
+                                       ModifiedTimeParam.class);
+        long accessTime = params.get(AccessTimeParam.NAME,
+                                     AccessTimeParam.class);
+        FSOperations.FSSetTimes command =
+          new FSOperations.FSSetTimes(path, modifiedTime, accessTime);
+        fsExecute(user, doAs, command);
+        AUDIT_LOG.info("[{}] to (M/A)[{}]", path,
+                       modifiedTime + ":" + accessTime);
         response = Response.ok().build();
         break;
       }
-      case RENEWDELEGATIONTOKEN: {
-        response = Response.status(Response.Status.BAD_REQUEST).build();
-        break;
-      }
-      case CANCELDELEGATIONTOKEN: {
-        response = Response.status(Response.Status.BAD_REQUEST).build();
-        break;
-      }
-    }
-    return response;
-  }
-
-  /**
-   * Binding to handle all OPST requests, supported operations are
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PostOpValues}.
-   *
-   * @param is request input stream, used only for
-   * @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PostOpValues#APPEND} operations.
-   * @param user principal making the request.
-   * @param uriInfo the request uriInfo.
-   * @param path path for the POST request.
-   * @param op POST operation, default is @link org.apache.hadoop.fs.http.client.HttpFSFileSystem.PostOpValues#APPEND}.
-   * @param hasData indicates if the append request is uploading data or not (just getting the handle).
-   * @param doAs user being impersonated, defualt value is none. It can be used
-   * only if the current user is a HttpFSServer proxyuser.
-   *
-   * @return the request response.
-   *
-   * @throws IOException thrown if an IO error occurred. Thrown exceptions are
-   * handled by {@link HttpFSExceptionProvider}.
-   * @throws FileSystemAccessException thrown if a FileSystemAccess releated error occurred. Thrown
-   * exceptions are handled by {@link HttpFSExceptionProvider}.
-   */
-  @POST
-  @Path("{path:.*}")
-  @Consumes({"*/*"})
-  @Produces({MediaType.APPLICATION_JSON})
-  public Response post(InputStream is,
-                       @Context Principal user,
-                       @Context UriInfo uriInfo,
-                       @PathParam("path") FsPathParam path,
-                       @QueryParam(PostOpParam.NAME) PostOpParam op,
-                       @QueryParam(DataParam.NAME) @DefaultValue(DataParam.DEFAULT) DataParam hasData,
-                       @QueryParam(DoAsParam.NAME) @DefaultValue(DoAsParam.DEFAULT) DoAsParam doAs)
-    throws IOException, FileSystemAccessException {
-    Response response = null;
-    if (op == null) {
-      throw new UnsupportedOperationException(MessageFormat.format("Missing [{0}] parameter", PostOpParam.NAME));
-    }
-    path.makeAbsolute();
-    MDC.put(HttpFSFileSystem.OP_PARAM, op.value().name());
-    switch (op.value()) {
-      case APPEND: {
-        if (!hasData.value()) {
-          response = Response.temporaryRedirect(
-            createUploadRedirectionURL(uriInfo, HttpFSFileSystem.PostOpValues.APPEND)).build();
-        } else {
-          FSOperations.FSAppend command = new FSOperations.FSAppend(is, path.value());
-          fsExecute(user, doAs.value(), command);
-          AUDIT_LOG.info("[{}]", path);
-          response = Response.ok().type(MediaType.APPLICATION_JSON).build();
-        }
-        break;
+      default: {
+        throw new IOException(
+          MessageFormat.format("Invalid HTTP PUT operation [{0}]",
+                               op.value()));
       }
     }
     return response;

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java?rev=1354603&r1=1354602&r2=1354603&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/BooleanParam.java Wed Jun 27 16:13:35 2012
@@ -22,15 +22,14 @@ import java.text.MessageFormat;
 
 public abstract class BooleanParam extends Param<Boolean> {
 
-  public BooleanParam(String name, String str) {
-    value = parseParam(name, str);
+  public BooleanParam(String name, Boolean defaultValue) {
+    super(name, defaultValue);
   }
 
   protected Boolean parse(String str) throws Exception {
     if (str.equalsIgnoreCase("true")) {
       return true;
-    }
-    if (str.equalsIgnoreCase("false")) {
+    } else if (str.equalsIgnoreCase("false")) {
       return false;
     }
     throw new IllegalArgumentException(MessageFormat.format("Invalid value [{0}], must be a boolean", str));

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java?rev=1354603&r1=1354602&r2=1354603&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ByteParam.java Wed Jun 27 16:13:35 2012
@@ -20,8 +20,8 @@ package org.apache.hadoop.lib.wsrs;
 
 public abstract class ByteParam extends Param<Byte> {
 
-  public ByteParam(String name, String str) {
-    value = parseParam(name, str);
+  public ByteParam(String name, Byte defaultValue) {
+    super(name, defaultValue);
   }
 
   protected Byte parse(String str) throws Exception {

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java?rev=1354603&r1=1354602&r2=1354603&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/EnumParam.java Wed Jun 27 16:13:35 2012
@@ -25,9 +25,9 @@ import java.util.Arrays;
 public abstract class EnumParam<E extends Enum<E>> extends Param<E> {
   Class<E> klass;
 
-  public EnumParam(String label, String str, Class<E> e) {
+  public EnumParam(String name, Class<E> e, E defaultValue) {
+    super(name, defaultValue);
     klass = e;
-    value = parseParam(label, str);
   }
 
   protected E parse(String str) throws Exception {

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java?rev=1354603&r1=1354602&r2=1354603&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/IntegerParam.java Wed Jun 27 16:13:35 2012
@@ -20,8 +20,8 @@ package org.apache.hadoop.lib.wsrs;
 
 public abstract class IntegerParam extends Param<Integer> {
 
-  public IntegerParam(String name, String str) {
-    value = parseParam(name, str);
+  public IntegerParam(String name, Integer defaultValue) {
+    super(name, defaultValue);
   }
 
   protected Integer parse(String str) throws Exception {

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java?rev=1354603&r1=1354602&r2=1354603&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/LongParam.java Wed Jun 27 16:13:35 2012
@@ -20,8 +20,8 @@ package org.apache.hadoop.lib.wsrs;
 
 public abstract class LongParam extends Param<Long> {
 
-  public LongParam(String name, String str) {
-    value = parseParam(name, str);
+  public LongParam(String name, Long defaultValue) {
+    super(name, defaultValue);
   }
 
   protected Long parse(String str) throws Exception {

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java?rev=1354603&r1=1354602&r2=1354603&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/Param.java Wed Jun 27 16:13:35 2012
@@ -23,32 +23,39 @@ import org.apache.hadoop.lib.util.Check;
 import java.text.MessageFormat;
 
 public abstract class Param<T> {
+  private String name;
   protected T value;
 
-  public T parseParam(String name, String str) {
-    Check.notNull(name, "name");
+  public Param(String name, T defaultValue) {
+    this.name = name;
+    this.value = defaultValue;
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public T parseParam(String str) {
     try {
-      return (str != null && str.trim().length() > 0) ? parse(str) : null;
+      value = (str != null && str.trim().length() > 0) ? parse(str) : value;
     } catch (Exception ex) {
       throw new IllegalArgumentException(
         MessageFormat.format("Parameter [{0}], invalid value [{1}], value must be [{2}]",
                              name, str, getDomain()));
     }
+    return value;
   }
 
   public T value() {
     return value;
   }
 
-  protected void setValue(T value) {
-    this.value = value;
-  }
-
   protected abstract String getDomain();
 
   protected abstract T parse(String str) throws Exception;
 
   public String toString() {
-    return value.toString();
+    return (value != null) ? value.toString() : "NULL";
   }
+
 }

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java?rev=1354603&r1=1354602&r2=1354603&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/ShortParam.java Wed Jun 27 16:13:35 2012
@@ -20,8 +20,8 @@ package org.apache.hadoop.lib.wsrs;
 
 public abstract class ShortParam extends Param<Short> {
 
-  public ShortParam(String name, String str) {
-    value = parseParam(name, str);
+  public ShortParam(String name, Short defaultValue) {
+    super(name, defaultValue);
   }
 
   protected Short parse(String str) throws Exception {

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java?rev=1354603&r1=1354602&r2=1354603&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/wsrs/StringParam.java Wed Jun 27 16:13:35 2012
@@ -15,42 +15,38 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.lib.wsrs;
 
-import org.apache.hadoop.lib.util.Check;
-
 import java.text.MessageFormat;
 import java.util.regex.Pattern;
 
 public abstract class StringParam extends Param<String> {
   private Pattern pattern;
 
-  public StringParam(String name, String str) {
-    this(name, str, null);
+  public StringParam(String name, String defaultValue) {
+    this(name, defaultValue, null);
   }
 
-  public StringParam(String name, String str, Pattern pattern) {
+  public StringParam(String name, String defaultValue, Pattern pattern) {
+    super(name, defaultValue);
     this.pattern = pattern;
-    value = parseParam(name, str);
+    parseParam(defaultValue);
   }
 
-  public String parseParam(String name, String str) {
-    String ret = null;
-    Check.notNull(name, "name");
+  public String parseParam(String str) {
     try {
       if (str != null) {
         str = str.trim();
         if (str.length() > 0) {
-          return parse(str);
+          value = parse(str);
         }
       }
     } catch (Exception ex) {
       throw new IllegalArgumentException(
         MessageFormat.format("Parameter [{0}], invalid value [{1}], value must be [{2}]",
-                             name, str, getDomain()));
+                             getName(), str, getDomain()));
     }
-    return ret;
+    return value;
   }
 
   protected String parse(String str) throws Exception {

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java?rev=1354603&r1=1354602&r2=1354603&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystem.java Wed Jun 27 16:13:35 2012
@@ -475,6 +475,7 @@ public class TestHttpFSFileSystem extend
       ops[i] = new Object[]{Operation.values()[i]};
     }
     return Arrays.asList(ops);
+//    return Arrays.asList(new Object[][]{ new Object[]{Operation.CREATE}});
   }
 
   private Operation operation;

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestCheckUploadContentTypeFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestCheckUploadContentTypeFilter.java?rev=1354603&r1=1354602&r2=1354603&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestCheckUploadContentTypeFilter.java (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestCheckUploadContentTypeFilter.java Wed Jun 27 16:13:35 2012
@@ -31,34 +31,34 @@ public class TestCheckUploadContentTypeF
 
   @Test
   public void putUpload() throws Exception {
-    test("PUT", HttpFSFileSystem.PutOpValues.CREATE.toString(), "application/octet-stream", true, false);
+    test("PUT", HttpFSFileSystem.Operation.CREATE.toString(), "application/octet-stream", true, false);
   }
 
   @Test
   public void postUpload() throws Exception {
-    test("POST", HttpFSFileSystem.PostOpValues.APPEND.toString(), "APPLICATION/OCTET-STREAM", true, false);
+    test("POST", HttpFSFileSystem.Operation.APPEND.toString(), "APPLICATION/OCTET-STREAM", true, false);
   }
 
   @Test
   public void putUploadWrong() throws Exception {
-    test("PUT", HttpFSFileSystem.PutOpValues.CREATE.toString(), "plain/text", false, false);
-    test("PUT", HttpFSFileSystem.PutOpValues.CREATE.toString(), "plain/text", true, true);
+    test("PUT", HttpFSFileSystem.Operation.CREATE.toString(), "plain/text", false, false);
+    test("PUT", HttpFSFileSystem.Operation.CREATE.toString(), "plain/text", true, true);
   }
 
   @Test
   public void postUploadWrong() throws Exception {
-    test("POST", HttpFSFileSystem.PostOpValues.APPEND.toString(), "plain/text", false, false);
-    test("POST", HttpFSFileSystem.PostOpValues.APPEND.toString(), "plain/text", true, true);
+    test("POST", HttpFSFileSystem.Operation.APPEND.toString(), "plain/text", false, false);
+    test("POST", HttpFSFileSystem.Operation.APPEND.toString(), "plain/text", true, true);
   }
 
   @Test
   public void getOther() throws Exception {
-    test("GET", HttpFSFileSystem.GetOpValues.GETHOMEDIRECTORY.toString(), "plain/text", false, false);
+    test("GET", HttpFSFileSystem.Operation.GETHOMEDIRECTORY.toString(), "plain/text", false, false);
   }
 
   @Test
   public void putOther() throws Exception {
-    test("PUT", HttpFSFileSystem.PutOpValues.MKDIRS.toString(), "plain/text", false, false);
+    test("PUT", HttpFSFileSystem.Operation.MKDIRS.toString(), "plain/text", false, false);
   }
 
   private void test(String method, String operation, String contentType,
@@ -68,7 +68,7 @@ public class TestCheckUploadContentTypeF
     Mockito.reset(request);
     Mockito.when(request.getMethod()).thenReturn(method);
     Mockito.when(request.getParameter(HttpFSFileSystem.OP_PARAM)).thenReturn(operation);
-    Mockito.when(request.getParameter(HttpFSParams.DataParam.NAME)).
+    Mockito.when(request.getParameter(HttpFSParametersProvider.DataParam.NAME)).
       thenReturn(Boolean.toString(upload));
     Mockito.when(request.getContentType()).thenReturn(contentType);
 

Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1354603&r1=1354602&r2=1354603&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Wed Jun 27 16:13:35 2012
@@ -84,6 +84,8 @@ Release 2.0.1-alpha - UNRELEASED
 
     HDFS-3535. Audit logging should log denied accesses. (Andy Isaacson via eli)
 
+    HDFS-3113. Refactor HttpFS handling of JAX-RS query string parameters (tucu)
+
   OPTIMIZATIONS
 
     HDFS-2982. Startup performance suffers when there are many edit log