You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2011/09/11 03:43:44 UTC
svn commit: r1167663 [1/2] - in
/hadoop/common/branches/branch-0.20-security: ./ src/core/
src/hdfs/org/apache/hadoop/hdfs/
src/hdfs/org/apache/hadoop/hdfs/server/datanode/
src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/
src/hdfs/org/apache/hadoop...
Author: szetszwo
Date: Sun Sep 11 01:43:43 2011
New Revision: 1167663
URL: http://svn.apache.org/viewvc?rev=1167663&view=rev
Log:
HDFS-2284. Add a new FileSystem, webhdfs://, for supporting write Http access to HDFS.
Added:
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/JsonUtil.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BooleanParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DeleteOpParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DstPathParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/EnumParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/GroupParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/IntegerParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/LongParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/ModificationTimeParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/OverwriteParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/OwnerParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/Param.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/PermissionParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/PostOpParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/PutOpParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/RecursiveParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/ReplicationParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/ShortParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/StringParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UriFsPathParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserParam.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/UserProvider.java
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestJsonUtil.java
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java
Modified:
hadoop/common/branches/branch-0.20-security/CHANGES.txt
hadoop/common/branches/branch-0.20-security/src/core/core-default.xml
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/fs/FileSystemContractBaseTest.java
hadoop/common/branches/branch-0.20-security/src/test/org/apache/hadoop/hdfs/MiniDFSCluster.java
Modified: hadoop/common/branches/branch-0.20-security/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/CHANGES.txt?rev=1167663&r1=1167662&r2=1167663&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.20-security/CHANGES.txt Sun Sep 11 01:43:43 2011
@@ -28,8 +28,11 @@ Release 0.20.205.0 - unreleased
MAPREDUCE-2764. Allow JobTracker to renew and cancel arbitrary token types,
including delegation tokens obtained via hftp. (omalley)
- HADOOP-7119 add Kerberos HTTP SPNEGO authentication support to
- Hadoop JT/NN/DN/TT web-consoles backport from Trunk (sanjay)
+ HADOOP-7119 add Kerberos HTTP SPNEGO authentication support to
+ Hadoop JT/NN/DN/TT web-consoles backport from Trunk (sanjay)
+
+ HDFS-2284. Add a new FileSystem, webhdfs://, for supporting write Http
+ access to HDFS. (szetszwo)
BUG FIXES
Modified: hadoop/common/branches/branch-0.20-security/src/core/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/core/core-default.xml?rev=1167663&r1=1167662&r2=1167663&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/core/core-default.xml (original)
+++ hadoop/common/branches/branch-0.20-security/src/core/core-default.xml Sun Sep 11 01:43:43 2011
@@ -175,6 +175,11 @@
</property>
<property>
+ <name>fs.webhdfs.impl</name>
+ <value>org.apache.hadoop.hdfs.web.WebHdfsFileSystem</value>
+</property>
+
+<property>
<name>fs.ftp.impl</name>
<value>org.apache.hadoop.fs.ftp.FTPFileSystem</value>
<description>The FileSystem for ftp: uris.</description>
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java?rev=1167663&r1=1167662&r2=1167663&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/DFSClient.java Sun Sep 11 01:43:43 2011
@@ -646,7 +646,7 @@ public class DFSClient implements FSCons
* @throws IOException
* @see ClientProtocol#append(String, String)
*/
- OutputStream append(String src, int buffersize, Progressable progress
+ public DFSOutputStream append(String src, int buffersize, Progressable progress
) throws IOException {
checkOpen();
HdfsFileStatus stat = null;
@@ -660,7 +660,7 @@ public class DFSClient implements FSCons
NSQuotaExceededException.class,
DSQuotaExceededException.class);
}
- OutputStream result = new DFSOutputStream(src, buffersize, progress,
+ final DFSOutputStream result = new DFSOutputStream(src, buffersize, progress,
lastBlock, stat, conf.getInt("io.bytes.per.checksum", 512));
leasechecker.put(src, result);
return result;
@@ -2415,7 +2415,7 @@ public class DFSClient implements FSCons
* datanode from the original pipeline. The DataStreamer now
* starts sending packets from the dataQueue.
****************************************************************/
- class DFSOutputStream extends FSOutputSummer implements Syncable {
+ public class DFSOutputStream extends FSOutputSummer implements Syncable {
private Socket s;
boolean closed = false;
@@ -3754,7 +3754,7 @@ public class DFSClient implements FSCons
/**
* Returns the size of a file as it was when this stream was opened
*/
- long getInitialLen() {
+ public long getInitialLen() {
return initialFileSize;
}
}
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java?rev=1167663&r1=1167662&r2=1167663&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/DistributedFileSystem.java Sun Sep 11 01:43:43 2011
@@ -204,7 +204,7 @@ public class DistributedFileSystem exten
Progressable progress) throws IOException {
statistics.incrementWriteOps(1);
- DFSOutputStream op = (DFSOutputStream)dfs.append(getPathName(f), bufferSize, progress);
+ final DFSOutputStream op = dfs.append(getPathName(f), bufferSize, progress);
return new FSDataOutputStream(op, statistics, op.getInitialLen());
}
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=1167663&r1=1167662&r2=1167663&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java Sun Sep 11 01:43:43 2011
@@ -301,7 +301,8 @@ public class HftpFileSystem extends File
@Override
public FSDataInputStream open(Path f, int buffersize) throws IOException {
final HttpURLConnection connection = openConnection(
- "/data" + f.toUri().getPath(), "ugi=" + getUgiParameter());
+ "/data" + f.makeQualified(this).toUri().getPath(),
+ "ugi=" + getUgiParameter());
final InputStream in;
try {
connection.setRequestMethod("GET");
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=1167663&r1=1167662&r2=1167663&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/DataNode.java Sun Sep 11 01:43:43 2011
@@ -45,6 +45,11 @@ import java.util.Random;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
+import javax.management.InstanceAlreadyExistsException;
+import javax.management.MBeanRegistrationException;
+import javax.management.MalformedObjectNameException;
+import javax.management.ObjectName;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@@ -69,17 +74,19 @@ import org.apache.hadoop.hdfs.security.t
import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
import org.apache.hadoop.hdfs.server.common.GenerationStamp;
import org.apache.hadoop.hdfs.server.common.HdfsConstants;
+import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
import org.apache.hadoop.hdfs.server.common.IncorrectVersionException;
import org.apache.hadoop.hdfs.server.common.Storage;
-import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
import org.apache.hadoop.hdfs.server.datanode.FSDataset.VolumeInfo;
import org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter.SecureResources;
import org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeInstrumentation;
+import org.apache.hadoop.hdfs.server.datanode.web.resources.DatanodeWebHdfsMethods;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.FileChecksumServlets;
import org.apache.hadoop.hdfs.server.namenode.JspHelper;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.StreamFile;
+import org.apache.hadoop.hdfs.server.protocol.BalancerBandwidthCommand;
import org.apache.hadoop.hdfs.server.protocol.BlockCommand;
import org.apache.hadoop.hdfs.server.protocol.BlockMetaDataInfo;
import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryInfo;
@@ -91,7 +98,8 @@ import org.apache.hadoop.hdfs.server.pro
import org.apache.hadoop.hdfs.server.protocol.KeyUpdateCommand;
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.hdfs.server.protocol.UpgradeCommand;
-import org.apache.hadoop.hdfs.server.protocol.BalancerBandwidthCommand;
+import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
+import org.apache.hadoop.hdfs.web.resources.Param;
import org.apache.hadoop.http.HttpServer;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Text;
@@ -109,17 +117,13 @@ import org.apache.hadoop.security.token.
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.Daemon;
import org.apache.hadoop.util.DiskChecker;
+import org.apache.hadoop.util.DiskChecker.DiskErrorException;
+import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.VersionInfo;
-import org.apache.hadoop.util.DiskChecker.DiskErrorException;
-import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
import org.mortbay.util.ajax.JSON;
-import javax.management.InstanceAlreadyExistsException;
-import javax.management.MBeanRegistrationException;
-import javax.management.ObjectName;
-
/**********************************************************
* DataNode is a class (and program) that stores a set of
* blocks for a DFS deployment. A single deployment can
@@ -444,10 +448,17 @@ public class DataNode extends Configured
this.infoServer.addInternalServlet(null, "/streamFile/*", StreamFile.class);
this.infoServer.addInternalServlet(null, "/getFileChecksum/*",
FileChecksumServlets.GetServlet.class);
+
+ this.infoServer.setAttribute("datanode", this);
this.infoServer.setAttribute("datanode.blockScanner", blockScanner);
this.infoServer.setAttribute(JspHelper.CURRENT_CONF, conf);
this.infoServer.addServlet(null, "/blockScannerReport",
DataBlockScanner.Servlet.class);
+
+ infoServer.addJerseyResourcePackage(
+ DatanodeWebHdfsMethods.class.getPackage().getName()
+ + ";" + Param.class.getPackage().getName(),
+ "/" + WebHdfsFileSystem.PATH_PREFIX + "/*");
this.infoServer.start();
// adjust info port
this.dnRegistration.setInfoPort(this.infoServer.getPort());
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java?rev=1167663&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java Sun Sep 11 01:43:43 2011
@@ -0,0 +1,159 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.datanode.web.resources;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import javax.servlet.ServletContext;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.hdfs.DFSClient;
+import org.apache.hadoop.hdfs.DFSClient.DFSOutputStream;
+import org.apache.hadoop.hdfs.server.datanode.DataNode;
+import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
+import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
+import org.apache.hadoop.hdfs.web.resources.BufferSizeParam;
+import org.apache.hadoop.hdfs.web.resources.OverwriteParam;
+import org.apache.hadoop.hdfs.web.resources.Param;
+import org.apache.hadoop.hdfs.web.resources.PermissionParam;
+import org.apache.hadoop.hdfs.web.resources.PostOpParam;
+import org.apache.hadoop.hdfs.web.resources.PutOpParam;
+import org.apache.hadoop.hdfs.web.resources.ReplicationParam;
+import org.apache.hadoop.hdfs.web.resources.UriFsPathParam;
+import org.apache.hadoop.io.IOUtils;
+
+/** Web-hdfs DataNode implementation. */
+@Path("")
+public class DatanodeWebHdfsMethods {
+ private static final Log LOG = LogFactory.getLog(DatanodeWebHdfsMethods.class);
+
+ private @Context ServletContext context;
+
+ /** Handle HTTP PUT request. */
+ @PUT
+ @Path("{" + UriFsPathParam.NAME + ":.*}")
+ @Consumes({"*/*"})
+ @Produces({MediaType.APPLICATION_JSON})
+ public Response put(
+ final InputStream in,
+ @PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
+ @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT)
+ final PutOpParam op,
+ @QueryParam(PermissionParam.NAME) @DefaultValue(PermissionParam.DEFAULT)
+ final PermissionParam permission,
+ @QueryParam(OverwriteParam.NAME) @DefaultValue(OverwriteParam.DEFAULT)
+ final OverwriteParam overwrite,
+ @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
+ final BufferSizeParam bufferSize,
+ @QueryParam(ReplicationParam.NAME) @DefaultValue(ReplicationParam.DEFAULT)
+ final ReplicationParam replication,
+ @QueryParam(BlockSizeParam.NAME) @DefaultValue(BlockSizeParam.DEFAULT)
+ final BlockSizeParam blockSize
+ ) throws IOException, URISyntaxException {
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(op + ": " + path
+ + Param.toSortedString(", ", permission, overwrite, bufferSize,
+ replication, blockSize));
+ }
+
+ final String fullpath = path.getAbsolutePath();
+ final DataNode datanode = (DataNode)context.getAttribute("datanode");
+
+ switch(op.getValue()) {
+ case CREATE:
+ {
+ final Configuration conf = new Configuration(datanode.getConf());
+ final DFSClient dfsclient = new DFSClient(conf);
+ final FSDataOutputStream out = new FSDataOutputStream(dfsclient.create(
+ fullpath, permission.getFsPermission(), overwrite.getValue(),
+ replication.getValue(), blockSize.getValue(), null,
+ bufferSize.getValue()), null);
+ try {
+ IOUtils.copyBytes(in, out, bufferSize.getValue());
+ } finally {
+ out.close();
+ }
+ final String nnAddr = NameNode.getInfoServer(conf);
+ final URI uri = new URI(WebHdfsFileSystem.SCHEME + "://" + nnAddr + fullpath);
+ return Response.created(uri).type(MediaType.APPLICATION_JSON).build();
+ }
+ default:
+ throw new UnsupportedOperationException(op + " is not supported");
+ }
+ }
+
+ /** Handle HTTP POST request. */
+ @POST
+ @Path("{" + UriFsPathParam.NAME + ":.*}")
+ @Consumes({"*/*"})
+ @Produces({MediaType.APPLICATION_JSON})
+ public Response post(
+ final InputStream in,
+ @PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
+ @QueryParam(PostOpParam.NAME) @DefaultValue(PostOpParam.DEFAULT)
+ final PostOpParam op,
+ @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
+ final BufferSizeParam bufferSize
+ ) throws IOException, URISyntaxException {
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(op + ": " + path
+ + Param.toSortedString(", ", bufferSize));
+ }
+
+ final String fullpath = path.getAbsolutePath();
+ final DataNode datanode = (DataNode)context.getAttribute("datanode");
+
+ switch(op.getValue()) {
+ case APPEND:
+ {
+ final Configuration conf = new Configuration(datanode.getConf());
+ final DFSClient dfsclient = new DFSClient(conf);
+ final DFSOutputStream dfsout = dfsclient.append(fullpath,
+ bufferSize.getValue(), null);
+ final FSDataOutputStream out = new FSDataOutputStream(dfsout, null,
+ dfsout.getInitialLen());
+ try {
+ IOUtils.copyBytes(in, out, bufferSize.getValue());
+ } finally {
+ out.close();
+ }
+ return Response.ok().type(MediaType.APPLICATION_JSON).build();
+ }
+ default:
+ throw new UnsupportedOperationException(op + " is not supported");
+ }
+ }
+}
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java?rev=1167663&r1=1167662&r2=1167663&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java Sun Sep 11 01:43:43 2011
@@ -84,7 +84,7 @@ public class FileDataServlet extends Dfs
// pick a random datanode
return jspHelper.randomNode();
}
- return jspHelper.bestNode(blks.get(0));
+ return JspHelper.bestNode(blks.get(0));
}
/**
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java?rev=1167663&r1=1167662&r2=1167663&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/JspHelper.java Sun Sep 11 01:43:43 2011
@@ -84,7 +84,7 @@ public class JspHelper {
return fsn.getRandomDatanode();
}
- public DatanodeInfo bestNode(LocatedBlock blk) throws IOException {
+ public static DatanodeInfo bestNode(LocatedBlock blk) throws IOException {
TreeSet<DatanodeInfo> deadNodes = new TreeSet<DatanodeInfo>();
DatanodeInfo chosenNode = null;
int failures = 0;
Modified: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1167663&r1=1167662&r2=1167663&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java (original)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNode.java Sun Sep 11 01:43:43 2011
@@ -17,22 +17,47 @@
*/
package org.apache.hadoop.hdfs.server.namenode;
-import org.apache.commons.logging.*;
+import java.io.File;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
+import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.Trash;
-import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.permission.PermissionStatus;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HDFSPolicyProvider;
-import org.apache.hadoop.hdfs.protocol.*;
+import org.apache.hadoop.hdfs.protocol.Block;
+import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
+import org.apache.hadoop.hdfs.protocol.ClientProtocol;
+import org.apache.hadoop.hdfs.protocol.DatanodeID;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.DirectoryListing;
+import org.apache.hadoop.hdfs.protocol.FSConstants;
+import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
+import org.apache.hadoop.hdfs.protocol.LocatedBlock;
+import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
+import org.apache.hadoop.hdfs.protocol.UnregisteredDatanodeException;
+import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
import org.apache.hadoop.hdfs.server.common.IncorrectVersionException;
import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem.CompleteFileStatus;
import org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeInstrumentation;
+import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods;
import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations;
import org.apache.hadoop.hdfs.server.protocol.DatanodeCommand;
import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol;
@@ -40,39 +65,27 @@ import org.apache.hadoop.hdfs.server.pro
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.hdfs.server.protocol.UpgradeCommand;
+import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
+import org.apache.hadoop.hdfs.web.resources.Param;
import org.apache.hadoop.http.HttpServer;
import org.apache.hadoop.io.Text;
-import org.apache.hadoop.ipc.*;
+import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RPC.Server;
-import org.apache.hadoop.conf.*;
-import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.net.NetworkTopology;
-import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
import org.apache.hadoop.net.Node;
+import org.apache.hadoop.security.Groups;
+import org.apache.hadoop.security.RefreshUserMappingsProtocol;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
-import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
-import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
-import org.apache.hadoop.security.Groups;
-import org.apache.hadoop.security.RefreshUserMappingsProtocol;
-import org.apache.hadoop.security.SecurityUtil;
-
-import java.io.*;
-import java.net.*;
-import java.security.PrivilegedExceptionAction;
-import java.util.Collection;
-import java.util.Arrays;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.util.StringUtils;
/**********************************************************
* NameNode serves as both directory namespace manager and
@@ -386,6 +399,11 @@ public class NameNode implements ClientP
FileChecksumServlets.RedirectServlet.class, false);
httpServer.addInternalServlet("contentSummary", "/contentSummary/*",
ContentSummaryServlet.class, false);
+
+ httpServer.addJerseyResourcePackage(
+ NamenodeWebHdfsMethods.class.getPackage().getName()
+ + ";" + Param.class.getPackage().getName(),
+ "/" + WebHdfsFileSystem.PATH_PREFIX + "/*");
httpServer.start();
// The web-server port can be ephemeral... ensure we have the correct info
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java?rev=1167663&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Sun Sep 11 01:43:43 2011
@@ -0,0 +1,296 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode.web.resources;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import javax.servlet.ServletContext;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
+import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
+import org.apache.hadoop.hdfs.server.namenode.JspHelper;
+import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.hdfs.web.JsonUtil;
+import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
+import org.apache.hadoop.hdfs.web.resources.AccessTimeParam;
+import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
+import org.apache.hadoop.hdfs.web.resources.BufferSizeParam;
+import org.apache.hadoop.hdfs.web.resources.DeleteOpParam;
+import org.apache.hadoop.hdfs.web.resources.DstPathParam;
+import org.apache.hadoop.hdfs.web.resources.GetOpParam;
+import org.apache.hadoop.hdfs.web.resources.GroupParam;
+import org.apache.hadoop.hdfs.web.resources.HttpOpParam;
+import org.apache.hadoop.hdfs.web.resources.ModificationTimeParam;
+import org.apache.hadoop.hdfs.web.resources.OverwriteParam;
+import org.apache.hadoop.hdfs.web.resources.OwnerParam;
+import org.apache.hadoop.hdfs.web.resources.Param;
+import org.apache.hadoop.hdfs.web.resources.PermissionParam;
+import org.apache.hadoop.hdfs.web.resources.PostOpParam;
+import org.apache.hadoop.hdfs.web.resources.PutOpParam;
+import org.apache.hadoop.hdfs.web.resources.RecursiveParam;
+import org.apache.hadoop.hdfs.web.resources.ReplicationParam;
+import org.apache.hadoop.hdfs.web.resources.UriFsPathParam;
+
+/** Web-hdfs NameNode implementation. */
+@Path("")
+public class NamenodeWebHdfsMethods {
+ private static final Log LOG = LogFactory.getLog(NamenodeWebHdfsMethods.class);
+
+ private @Context ServletContext context;
+
+ private static DatanodeInfo chooseDatanode(final NameNode namenode,
+ final String path, final HttpOpParam.Op op) throws IOException {
+ if (op == PostOpParam.Op.APPEND) {
+ final HdfsFileStatus status = namenode.getFileInfo(path);
+ final long len = status.getLen();
+ if (len > 0) {
+ final LocatedBlocks locations = namenode.getBlockLocations(path, len-1, 1);
+ final int count = locations.locatedBlockCount();
+ if (count > 0) {
+ return JspHelper.bestNode(locations.get(count - 1));
+ }
+ }
+ }
+
+ return namenode.getNamesystem().getRandomDatanode();
+ }
+
+ private static URI redirectURI(final NameNode namenode,
+ final String path, final HttpOpParam.Op op,
+ final Param<?, ?>... parameters) throws URISyntaxException, IOException {
+ final DatanodeInfo dn = chooseDatanode(namenode, path, op);
+ final String query = op.toQueryString() + Param.toSortedString("&", parameters);
+ final String uripath = "/" + WebHdfsFileSystem.PATH_PREFIX + path;
+
+ final URI uri = new URI("http", null, dn.getHostName(), dn.getInfoPort(),
+ uripath, query, null);
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("redirectURI=" + uri);
+ }
+ return uri;
+ }
+
+ /** Handle HTTP PUT request. */
+ @PUT
+ @Path("{" + UriFsPathParam.NAME + ":.*}")
+ @Consumes({"*/*"})
+ @Produces({MediaType.APPLICATION_JSON})
+ public Response put(
+ final InputStream in,
+ @PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
+ @QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT)
+ final PutOpParam op,
+ @QueryParam(DstPathParam.NAME) @DefaultValue(DstPathParam.DEFAULT)
+ final DstPathParam dstPath,
+ @QueryParam(OwnerParam.NAME) @DefaultValue(OwnerParam.DEFAULT)
+ final OwnerParam owner,
+ @QueryParam(GroupParam.NAME) @DefaultValue(GroupParam.DEFAULT)
+ final GroupParam group,
+ @QueryParam(PermissionParam.NAME) @DefaultValue(PermissionParam.DEFAULT)
+ final PermissionParam permission,
+ @QueryParam(OverwriteParam.NAME) @DefaultValue(OverwriteParam.DEFAULT)
+ final OverwriteParam overwrite,
+ @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
+ final BufferSizeParam bufferSize,
+ @QueryParam(ReplicationParam.NAME) @DefaultValue(ReplicationParam.DEFAULT)
+ final ReplicationParam replication,
+ @QueryParam(BlockSizeParam.NAME) @DefaultValue(BlockSizeParam.DEFAULT)
+ final BlockSizeParam blockSize,
+ @QueryParam(ModificationTimeParam.NAME) @DefaultValue(ModificationTimeParam.DEFAULT)
+ final ModificationTimeParam modificationTime,
+ @QueryParam(AccessTimeParam.NAME) @DefaultValue(AccessTimeParam.DEFAULT)
+ final AccessTimeParam accessTime
+ ) throws IOException, URISyntaxException {
+
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(op + ": " + path
+ + Param.toSortedString(", ", dstPath, owner, group, permission,
+ overwrite, bufferSize, replication, blockSize));
+ }
+
+ final String fullpath = path.getAbsolutePath();
+ final NameNode namenode = (NameNode)context.getAttribute("name.node");
+
+ switch(op.getValue()) {
+ case CREATE:
+ {
+ final URI uri = redirectURI(namenode, fullpath, op.getValue(),
+ permission, overwrite, bufferSize, replication, blockSize);
+ return Response.temporaryRedirect(uri).build();
+ }
+ case MKDIRS:
+ {
+ final boolean b = namenode.mkdirs(fullpath, permission.getFsPermission());
+ final String js = JsonUtil.toJsonString(PutOpParam.Op.MKDIRS, b);
+ return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+ }
+ case RENAME:
+ {
+ final boolean b = namenode.rename(fullpath, dstPath.getValue());
+ final String js = JsonUtil.toJsonString(PutOpParam.Op.RENAME, b);
+ return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+ }
+ case SETREPLICATION:
+ {
+ final boolean b = namenode.setReplication(fullpath, replication.getValue());
+ final String js = JsonUtil.toJsonString(PutOpParam.Op.SETREPLICATION, b);
+ return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+ }
+ case SETOWNER:
+ {
+ namenode.setOwner(fullpath, owner.getValue(), group.getValue());
+ return Response.ok().type(MediaType.APPLICATION_JSON).build();
+ }
+ case SETPERMISSION:
+ {
+ namenode.setPermission(fullpath, permission.getFsPermission());
+ return Response.ok().type(MediaType.APPLICATION_JSON).build();
+ }
+ case SETTIMES:
+ {
+ namenode.setTimes(fullpath, modificationTime.getValue(), accessTime.getValue());
+ return Response.ok().type(MediaType.APPLICATION_JSON).build();
+ }
+ default:
+ throw new UnsupportedOperationException(op + " is not supported");
+ }
+ }
+
+ /** Handle HTTP POST request. */
+ @POST
+ @Path("{" + UriFsPathParam.NAME + ":.*}")
+ @Consumes({"*/*"})
+ @Produces({MediaType.APPLICATION_JSON})
+ public Response post(
+ final InputStream in,
+ @PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
+ @QueryParam(PostOpParam.NAME) @DefaultValue(PostOpParam.DEFAULT)
+ final PostOpParam op,
+ @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
+ final BufferSizeParam bufferSize
+ ) throws IOException, URISyntaxException {
+
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(op + ": " + path
+ + Param.toSortedString(", ", bufferSize));
+ }
+
+ final String fullpath = path.getAbsolutePath();
+ final NameNode namenode = (NameNode)context.getAttribute("name.node");
+
+ switch(op.getValue()) {
+ case APPEND:
+ {
+ final URI uri = redirectURI(namenode, fullpath, op.getValue(), bufferSize);
+ return Response.temporaryRedirect(uri).build();
+ }
+ default:
+ throw new UnsupportedOperationException(op + " is not supported");
+ }
+ }
+
+ private static final UriFsPathParam ROOT = new UriFsPathParam("");
+
+ /** Handle HTTP GET request for the root. */
+ @GET
+ @Path("/")
+ @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON})
+ public Response root(
+ @QueryParam(GetOpParam.NAME) @DefaultValue(GetOpParam.DEFAULT)
+ final GetOpParam op
+ ) throws IOException {
+ return get(ROOT, op);
+ }
+
+ /** Handle HTTP GET request. */
+ @GET
+ @Path("{" + UriFsPathParam.NAME + ":.*}")
+ @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.APPLICATION_JSON})
+ public Response get(
+ @PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
+ @QueryParam(GetOpParam.NAME) @DefaultValue(GetOpParam.DEFAULT)
+ final GetOpParam op
+ ) throws IOException {
+
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(op + ", " + path
+ + Param.toSortedString(", "));
+ }
+
+ switch(op.getValue()) {
+ case GETFILESTATUS:
+ final NameNode namenode = (NameNode)context.getAttribute("name.node");
+ final String fullpath = path.getAbsolutePath();
+ final HdfsFileStatus status = namenode.getFileInfo(fullpath);
+ final String js = JsonUtil.toJsonString(status);
+ return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+
+ default:
+ throw new UnsupportedOperationException(op + " is not supported");
+ }
+ }
+
+ /** Handle HTTP DELETE request. */
+ @DELETE
+ @Path("{path:.*}")
+ @Produces(MediaType.APPLICATION_JSON)
+ public Response delete(
+ @PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
+ @QueryParam(DeleteOpParam.NAME) @DefaultValue(DeleteOpParam.DEFAULT)
+ final DeleteOpParam op,
+ @QueryParam(RecursiveParam.NAME) @DefaultValue(RecursiveParam.DEFAULT)
+ final RecursiveParam recursive
+ ) throws IOException {
+
+ if (LOG.isTraceEnabled()) {
+ LOG.trace(op + ", " + path
+ + Param.toSortedString(", ", recursive));
+ }
+
+ switch(op.getValue()) {
+ case DELETE:
+ final NameNode namenode = (NameNode)context.getAttribute("name.node");
+ final String fullpath = path.getAbsolutePath();
+ final boolean b = namenode.delete(fullpath, recursive.getValue());
+ final String js = JsonUtil.toJsonString(DeleteOpParam.Op.DELETE, b);
+ return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
+
+ default:
+ throw new UnsupportedOperationException(op + " is not supported");
+ }
+ }
+}
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/JsonUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/JsonUtil.java?rev=1167663&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/JsonUtil.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/JsonUtil.java Sun Sep 11 01:43:43 2011
@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web;
+
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
+import org.apache.hadoop.ipc.RemoteException;
+import org.mortbay.util.ajax.JSON;
+
+/** JSON Utilities */
+public class JsonUtil {
+ private static final ThreadLocal<Map<String, Object>> jsonMap
+ = new ThreadLocal<Map<String, Object>>() {
+ @Override
+ protected Map<String, Object> initialValue() {
+ return new TreeMap<String, Object>();
+ }
+
+ @Override
+ public Map<String, Object> get() {
+ final Map<String, Object> m = super.get();
+ m.clear();
+ return m;
+ }
+ };
+
+ /** Convert an exception object to a Json string. */
+ public static String toJsonString(final Exception e) {
+ final Map<String, Object> m = jsonMap.get();
+ m.put("className", e.getClass().getName());
+ m.put("message", e.getMessage());
+ return JSON.toString(m);
+ }
+
+ /** Convert a Json map to a RemoteException. */
+ public static RemoteException toRemoteException(final Map<String, Object> m) {
+ final String className = (String)m.get("className");
+ final String message = (String)m.get("message");
+ return new RemoteException(className, message);
+ }
+
+ /** Convert a key-value pair to a Json string. */
+ public static String toJsonString(final Object key, final Object value) {
+ final Map<String, Object> m = jsonMap.get();
+ m.put(key instanceof String ? (String) key : key.toString(), value);
+ return JSON.toString(m);
+ }
+
+ /** Convert a FsPermission object to a string. */
+ public static String toString(final FsPermission permission) {
+ return String.format("%o", permission.toShort());
+ }
+
+ /** Convert a string to a FsPermission object. */
+ public static FsPermission toFsPermission(final String s) {
+ return new FsPermission(Short.parseShort(s, 8));
+ }
+
+ /** Convert a HdfsFileStatus object to a Json string. */
+ public static String toJsonString(final HdfsFileStatus status) {
+ final Map<String, Object> m = jsonMap.get();
+ if (status == null) {
+ m.put("isNull", true);
+ } else {
+ m.put("isNull", false);
+ m.put("localName", status.getLocalName());
+ m.put("isDir", status.isDir());
+ m.put("len", status.getLen());
+ m.put("owner", status.getOwner());
+ m.put("group", status.getGroup());
+ m.put("permission", toString(status.getPermission()));
+ m.put("accessTime", status.getAccessTime());
+ m.put("modificationTime", status.getModificationTime());
+ m.put("blockSize", status.getBlockSize());
+ m.put("replication", status.getReplication());
+ }
+ return JSON.toString(m);
+ }
+
+ @SuppressWarnings("unchecked")
+ static Map<String, Object> parse(String jsonString) {
+ return (Map<String, Object>) JSON.parse(jsonString);
+ }
+
+ /** Convert a Json string to a HdfsFileStatus object. */
+ public static HdfsFileStatus toFileStatus(final Map<String, Object> m) {
+ if ((Boolean)m.get("isNull")) {
+ return null;
+ }
+
+ final String localName = (String) m.get("localName");
+ final boolean isDir = (Boolean) m.get("isDir");
+ final long len = (Long) m.get("len");
+ final String owner = (String) m.get("owner");
+ final String group = (String) m.get("group");
+ final FsPermission permission = toFsPermission((String) m.get("permission"));
+ final long aTime = (Long) m.get("accessTime");
+ final long mTime = (Long) m.get("modificationTime");
+ final long blockSize = (Long) m.get("blockSize");
+ final short replication = (short) (long) (Long) m.get("replication");
+ return new HdfsFileStatus(len, isDir, replication, blockSize, mTime, aTime,
+ permission, owner, group, DFSUtil.string2Bytes(localName));
+ }
+}
\ No newline at end of file
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java?rev=1167663&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java Sun Sep 11 01:43:43 2011
@@ -0,0 +1,304 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.web;
+
+import java.io.BufferedOutputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Map;
+
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdfs.HftpFileSystem;
+import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
+import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
+import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException;
+import org.apache.hadoop.hdfs.server.namenode.SafeModeException;
+import org.apache.hadoop.hdfs.web.resources.AccessTimeParam;
+import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
+import org.apache.hadoop.hdfs.web.resources.BufferSizeParam;
+import org.apache.hadoop.hdfs.web.resources.DeleteOpParam;
+import org.apache.hadoop.hdfs.web.resources.DstPathParam;
+import org.apache.hadoop.hdfs.web.resources.GetOpParam;
+import org.apache.hadoop.hdfs.web.resources.GroupParam;
+import org.apache.hadoop.hdfs.web.resources.HttpOpParam;
+import org.apache.hadoop.hdfs.web.resources.ModificationTimeParam;
+import org.apache.hadoop.hdfs.web.resources.OverwriteParam;
+import org.apache.hadoop.hdfs.web.resources.OwnerParam;
+import org.apache.hadoop.hdfs.web.resources.Param;
+import org.apache.hadoop.hdfs.web.resources.PermissionParam;
+import org.apache.hadoop.hdfs.web.resources.PostOpParam;
+import org.apache.hadoop.hdfs.web.resources.PutOpParam;
+import org.apache.hadoop.hdfs.web.resources.RecursiveParam;
+import org.apache.hadoop.hdfs.web.resources.ReplicationParam;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.util.Progressable;
+import org.mortbay.util.ajax.JSON;
+
+/** A FileSystem for HDFS over the web. */
+public class WebHdfsFileSystem extends HftpFileSystem {
+ /** File System URI: {SCHEME}://namenode:port/path/to/file */
+ public static final String SCHEME = "webhdfs";
+ /** Http URI: http://namenode:port/{PATH_PREFIX}/path/to/file */
+ public static final String PATH_PREFIX = SCHEME;
+
+ protected Path workingDir;
+
+ @Override
+ public URI getUri() {
+ try {
+ return new URI(SCHEME, null, nnAddr.getHostName(), nnAddr.getPort(),
+ null, null, null);
+ } catch (URISyntaxException e) {
+ return null;
+ }
+ }
+
+ @Override
+ public synchronized Path getWorkingDirectory() {
+ if (workingDir == null) {
+ workingDir = getHomeDirectory();
+ }
+ return workingDir;
+ }
+
+ @Override
+ public synchronized void setWorkingDirectory(final Path dir) {
+ String result = makeAbsolute(dir).toUri().getPath();
+ if (!DFSUtil.isValidName(result)) {
+ throw new IllegalArgumentException("Invalid DFS directory name " +
+ result);
+ }
+ workingDir = makeAbsolute(dir);
+ }
+
+ private Path makeAbsolute(Path f) {
+ return f.isAbsolute()? f: new Path(workingDir, f);
+ }
+
+ @SuppressWarnings("unchecked")
+ private static Map<String, Object> jsonParse(final InputStream in
+ ) throws IOException {
+ if (in == null) {
+ throw new IOException("The input stream is null.");
+ }
+ return (Map<String, Object>)JSON.parse(new InputStreamReader(in));
+ }
+
+ private static void validateResponse(final HttpOpParam.Op op,
+ final HttpURLConnection conn) throws IOException {
+ final int code = conn.getResponseCode();
+ if (code != op.getExpectedHttpResponseCode()) {
+ final Map<String, Object> m;
+ try {
+ m = jsonParse(conn.getErrorStream());
+ } catch(IOException e) {
+ throw new IOException("Unexpected HTTP response: code = " + code + " != "
+ + op.getExpectedHttpResponseCode() + ", " + op.toQueryString()
+ + ", message=" + conn.getResponseMessage(), e);
+ }
+
+ final RemoteException re = JsonUtil.toRemoteException(m);
+ throw re.unwrapRemoteException(AccessControlException.class,
+ DSQuotaExceededException.class,
+ FileNotFoundException.class,
+ SafeModeException.class,
+ NSQuotaExceededException.class);
+ }
+ }
+
+ private HttpURLConnection httpConnect(final HttpOpParam.Op op, final Path fspath,
+ final Param<?,?>... parameters) throws IOException {
+ //initialize URI path and query
+ final String uripath = "/" + PATH_PREFIX + makeQualified(fspath).toUri().getPath();
+ final String query = op.toQueryString() + Param.toSortedString("&", parameters);
+
+ //connect and get response
+ final HttpURLConnection conn = openConnection(uripath, query);
+ try {
+ conn.setRequestMethod(op.getType().toString());
+ conn.setDoOutput(op.getDoOutput());
+ if (op.getDoOutput()) {
+ conn.setRequestProperty("Expect", "100-Continue");
+ conn.setInstanceFollowRedirects(true);
+ }
+ conn.connect();
+ return conn;
+ } catch(IOException e) {
+ conn.disconnect();
+ throw e;
+ }
+ }
+
+ private Map<String, Object> run(final HttpOpParam.Op op, final Path fspath,
+ final Param<?,?>... parameters) throws IOException {
+ final HttpURLConnection conn = httpConnect(op, fspath, parameters);
+ validateResponse(op, conn);
+ try {
+ return jsonParse(conn.getInputStream());
+ } finally {
+ conn.disconnect();
+ }
+ }
+
+ private FsPermission applyUMask(FsPermission permission) {
+ if (permission == null) {
+ permission = FsPermission.getDefault();
+ }
+ return permission.applyUMask(FsPermission.getUMask(getConf()));
+ }
+
+ private HdfsFileStatus getHdfsFileStatus(Path f) throws IOException {
+ final HttpOpParam.Op op = GetOpParam.Op.GETFILESTATUS;
+ final Map<String, Object> json = run(op, f);
+ final HdfsFileStatus status = JsonUtil.toFileStatus(json);
+ if (status == null) {
+ throw new FileNotFoundException("File does not exist: " + f);
+ }
+ return status;
+ }
+
+ @Override
+ public FileStatus getFileStatus(Path f) throws IOException {
+ statistics.incrementReadOps(1);
+ return makeQualified(getHdfsFileStatus(f), f);
+ }
+
+ private FileStatus makeQualified(HdfsFileStatus f, Path parent) {
+ return new FileStatus(f.getLen(), f.isDir(), f.getReplication(),
+ f.getBlockSize(), f.getModificationTime(),
+ f.getAccessTime(),
+ f.getPermission(), f.getOwner(), f.getGroup(),
+ f.getFullPath(parent).makeQualified(this)); // fully-qualify path
+ }
+
+ @Override
+ public boolean mkdirs(Path f, FsPermission permission) throws IOException {
+ statistics.incrementWriteOps(1);
+ final HttpOpParam.Op op = PutOpParam.Op.MKDIRS;
+ final Map<String, Object> json = run(op, f,
+ new PermissionParam(applyUMask(permission)));
+ return (Boolean)json.get(op.toString());
+ }
+
+ @Override
+ public boolean rename(final Path src, final Path dst) throws IOException {
+ statistics.incrementWriteOps(1);
+ final HttpOpParam.Op op = PutOpParam.Op.RENAME;
+ final Map<String, Object> json = run(op, src,
+ new DstPathParam(makeQualified(dst).toUri().getPath()));
+ return (Boolean)json.get(op.toString());
+ }
+
+ @Override
+ public void setOwner(final Path p, final String owner, final String group
+ ) throws IOException {
+ if (owner == null && group == null) {
+ throw new IOException("owner == null && group == null");
+ }
+
+ statistics.incrementWriteOps(1);
+ final HttpOpParam.Op op = PutOpParam.Op.SETOWNER;
+ run(op, p, new OwnerParam(owner), new GroupParam(group));
+ }
+
+ @Override
+ public void setPermission(final Path p, final FsPermission permission
+ ) throws IOException {
+ statistics.incrementWriteOps(1);
+ final HttpOpParam.Op op = PutOpParam.Op.SETPERMISSION;
+ run(op, p, new PermissionParam(permission));
+ }
+
+ @Override
+ public boolean setReplication(final Path p, final short replication
+ ) throws IOException {
+ statistics.incrementWriteOps(1);
+ final HttpOpParam.Op op = PutOpParam.Op.SETREPLICATION;
+ final Map<String, Object> json = run(op, p,
+ new ReplicationParam(replication));
+ return (Boolean)json.get(op.toString());
+ }
+
+ @Override
+ public void setTimes(final Path p, final long mtime, final long atime
+ ) throws IOException {
+ statistics.incrementWriteOps(1);
+ final HttpOpParam.Op op = PutOpParam.Op.SETTIMES;
+ run(op, p, new ModificationTimeParam(mtime), new AccessTimeParam(atime));
+ }
+
+ private FSDataOutputStream write(final HttpOpParam.Op op,
+ final HttpURLConnection conn, final int bufferSize) throws IOException {
+ return new FSDataOutputStream(new BufferedOutputStream(
+ conn.getOutputStream(), bufferSize), statistics) {
+ @Override
+ public void close() throws IOException {
+ try {
+ super.close();
+ } finally {
+ validateResponse(op, conn);
+ }
+ }
+ };
+ }
+
+ @Override
+ public FSDataOutputStream create(final Path f, final FsPermission permission,
+ final boolean overwrite, final int bufferSize, final short replication,
+ final long blockSize, final Progressable progress) throws IOException {
+ statistics.incrementWriteOps(1);
+
+ final HttpOpParam.Op op = PutOpParam.Op.CREATE;
+ final HttpURLConnection conn = httpConnect(op, f,
+ new PermissionParam(applyUMask(permission)),
+ new OverwriteParam(overwrite),
+ new BufferSizeParam(bufferSize),
+ new ReplicationParam(replication),
+ new BlockSizeParam(blockSize));
+ return write(op, conn, bufferSize);
+ }
+
+ @Override
+ public FSDataOutputStream append(final Path f, final int bufferSize,
+ final Progressable progress) throws IOException {
+ statistics.incrementWriteOps(1);
+
+ final HttpOpParam.Op op = PostOpParam.Op.APPEND;
+ final HttpURLConnection conn = httpConnect(op, f,
+ new BufferSizeParam(bufferSize));
+ return write(op, conn, bufferSize);
+ }
+
+ @Override
+ public boolean delete(Path f, boolean recursive) throws IOException {
+ final HttpOpParam.Op op = DeleteOpParam.Op.DELETE;
+ final Map<String, Object> json = run(op, f, new RecursiveParam(recursive));
+ return (Boolean)json.get(op.toString());
+ }
+}
\ No newline at end of file
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java?rev=1167663&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java Sun Sep 11 01:43:43 2011
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web.resources;
+
+/** Access time parameter. */
+public class AccessTimeParam extends LongParam {
+ /** Parameter name. */
+ public static final String NAME = "accessTime";
+ /** Default parameter value. */
+ public static final String DEFAULT = "-1";
+
+ private static final Domain DOMAIN = new Domain(NAME);
+
+ /**
+ * Constructor.
+ * @param value the parameter value.
+ */
+ public AccessTimeParam(final Long value) {
+ super(DOMAIN, value);
+ }
+
+ /**
+ * Constructor.
+ * @param str a string representation of the parameter value.
+ */
+ public AccessTimeParam(final String str) {
+ this(DOMAIN.parse(str));
+ }
+
+ @Override
+ public String getName() {
+ return NAME;
+ }
+}
\ No newline at end of file
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java?rev=1167663&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java Sun Sep 11 01:43:43 2011
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web.resources;
+
+/** Block size parameter. */
+public class BlockSizeParam extends LongParam {
+ /** Parameter name. */
+ public static final String NAME = "blockSize";
+ /** Default parameter value. */
+ public static final String DEFAULT = NULL;
+
+ private static final Domain DOMAIN = new Domain(NAME);
+
+ /**
+ * Constructor.
+ * @param value the parameter value.
+ */
+ public BlockSizeParam(final Long value) {
+ super(DOMAIN, value);
+ }
+
+ /**
+ * Constructor.
+ * @param str a string representation of the parameter value.
+ */
+ public BlockSizeParam(final String str) {
+ this(DOMAIN.parse(str));
+ }
+
+ @Override
+ public String getName() {
+ return NAME;
+ }
+}
\ No newline at end of file
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BooleanParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BooleanParam.java?rev=1167663&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BooleanParam.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BooleanParam.java Sun Sep 11 01:43:43 2011
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web.resources;
+
+/** Boolean parameter. */
+abstract class BooleanParam extends Param<Boolean, BooleanParam.Domain> {
+ static final String TRUE = "true";
+ static final String FALSE = "false";
+
+ BooleanParam(final Domain domain, final Boolean value) {
+ super(domain, value);
+ }
+
+ /** The domain of the parameter. */
+ static final class Domain extends Param.Domain<Boolean> {
+ Domain(final String paramName) {
+ super(paramName);
+ }
+
+ @Override
+ public String getDomain() {
+ return "<" + NULL + " | boolean>";
+ }
+
+ @Override
+ Boolean parse(final String str) {
+ if (TRUE.equalsIgnoreCase(str)) {
+ return true;
+ } else if (FALSE.equalsIgnoreCase(str)) {
+ return false;
+ }
+ throw new IllegalArgumentException("Failed to parse \"" + str
+ + "\" to Boolean.");
+ }
+ }
+}
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java?rev=1167663&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java Sun Sep 11 01:43:43 2011
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web.resources;
+
+/** Buffer size parameter. */
+public class BufferSizeParam extends IntegerParam {
+ /** Parameter name. */
+ public static final String NAME = "bufferSize";
+ /** Default parameter value. */
+ public static final String DEFAULT = NULL;
+
+ private static final Domain DOMAIN = new Domain(NAME);
+
+ /**
+ * Constructor.
+ * @param value the parameter value.
+ */
+ public BufferSizeParam(final Integer value) {
+ super(DOMAIN, value);
+ }
+
+ /**
+ * Constructor.
+ * @param str a string representation of the parameter value.
+ */
+ public BufferSizeParam(final String str) {
+ this(DOMAIN.parse(str));
+ }
+
+ @Override
+ public String getName() {
+ return NAME;
+ }
+}
\ No newline at end of file
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DeleteOpParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DeleteOpParam.java?rev=1167663&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DeleteOpParam.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DeleteOpParam.java Sun Sep 11 01:43:43 2011
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web.resources;
+
+import java.net.HttpURLConnection;
+
+/** Http DELETE operation parameter. */
+public class DeleteOpParam extends HttpOpParam<DeleteOpParam.Op> {
+ /** Parameter name. */
+ public static final String NAME = "deleteOp";
+
+ /** Delete operations. */
+ public static enum Op implements HttpOpParam.Op {
+ DELETE(HttpURLConnection.HTTP_OK),
+
+ NULL(HttpURLConnection.HTTP_NOT_IMPLEMENTED);
+
+ final int expectedHttpResponseCode;
+
+ Op(final int expectedHttpResponseCode) {
+ this.expectedHttpResponseCode = expectedHttpResponseCode;
+ }
+
+ @Override
+ public HttpOpParam.Type getType() {
+ return HttpOpParam.Type.DELETE;
+ }
+
+ @Override
+ public boolean getDoOutput() {
+ return false;
+ }
+
+ @Override
+ public int getExpectedHttpResponseCode() {
+ return expectedHttpResponseCode;
+ }
+
+ @Override
+ public String toQueryString() {
+ return NAME + "=" + this;
+ }
+ }
+
+ private static final Domain<Op> DOMAIN = new Domain<Op>(NAME, Op.class);
+
+ /**
+ * Constructor.
+ * @param str a string representation of the parameter value.
+ */
+ public DeleteOpParam(final String str) {
+ super(DOMAIN, DOMAIN.parse(str));
+ }
+
+ @Override
+ public String getName() {
+ return NAME;
+ }
+}
\ No newline at end of file
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DstPathParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DstPathParam.java?rev=1167663&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DstPathParam.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/DstPathParam.java Sun Sep 11 01:43:43 2011
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web.resources;
+
+import org.apache.hadoop.fs.Path;
+
+/** Destination path parameter. */
+public class DstPathParam extends StringParam {
+ /** Parameter name. */
+ public static final String NAME = "dstPath";
+ /** Default parameter value. */
+ public static final String DEFAULT = "";
+
+ private static final Domain DOMAIN = new Domain(NAME, null);
+
+ /**
+ * Constructor.
+ * @param str a string representation of the parameter value.
+ */
+ public DstPathParam(final String str) {
+ super(DOMAIN, str == null || str.equals(DEFAULT)? null: new Path(str).toUri().getPath());
+ }
+
+ @Override
+ public String getName() {
+ return NAME;
+ }
+}
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/EnumParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/EnumParam.java?rev=1167663&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/EnumParam.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/EnumParam.java Sun Sep 11 01:43:43 2011
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web.resources;
+
+import java.util.Arrays;
+
+abstract class EnumParam<E extends Enum<E>> extends Param<E, EnumParam.Domain<E>> {
+ EnumParam(final Domain<E> domain, final E value) {
+ super(domain, value);
+ }
+
+ /** The domain of the parameter. */
+ static final class Domain<E extends Enum<E>> extends Param.Domain<E> {
+ private final Class<E> enumClass;
+
+ Domain(String name, final Class<E> enumClass) {
+ super(name);
+ this.enumClass = enumClass;
+ }
+
+ @Override
+ public final String getDomain() {
+ return Arrays.asList(enumClass.getEnumConstants()).toString();
+ }
+
+ @Override
+ final E parse(final String str) {
+ return Enum.valueOf(enumClass, str.toUpperCase());
+ }
+ }
+}
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java?rev=1167663&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/ExceptionHandler.java Sun Sep 11 01:43:43 2011
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web.resources;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hdfs.web.JsonUtil;
+
+/** Handle exceptions. */
+@Provider
+public class ExceptionHandler implements ExceptionMapper<Exception> {
+ public static final Log LOG = LogFactory.getLog(ExceptionHandler.class);
+
+ @Override
+ public Response toResponse(final Exception e) {
+ if (LOG.isTraceEnabled()) {
+ LOG.trace("GOT EXCEPITION", e);
+ }
+
+ final Response.Status s;
+ if (e instanceof SecurityException) {
+ s = Response.Status.UNAUTHORIZED;
+ } else if (e instanceof FileNotFoundException) {
+ s = Response.Status.NOT_FOUND;
+ } else if (e instanceof IOException) {
+ s = Response.Status.FORBIDDEN;
+ } else if (e instanceof UnsupportedOperationException) {
+ s = Response.Status.BAD_REQUEST;
+ } else {
+ s = Response.Status.INTERNAL_SERVER_ERROR;
+ }
+
+ final String js = JsonUtil.toJsonString(e);
+ return Response.status(s).type(MediaType.APPLICATION_JSON).entity(js).build();
+ }
+}
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java?rev=1167663&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/GetOpParam.java Sun Sep 11 01:43:43 2011
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web.resources;
+
+import java.net.HttpURLConnection;
+
+/** Http GET operation parameter. */
+public class GetOpParam extends HttpOpParam<GetOpParam.Op> {
+ /** Parameter name. */
+ public static final String NAME = "getOp";
+
+ /** Get operations. */
+ public static enum Op implements HttpOpParam.Op {
+ GETFILESTATUS(HttpURLConnection.HTTP_OK),
+ NULL(HttpURLConnection.HTTP_NOT_IMPLEMENTED);
+
+ final int expectedHttpResponseCode;
+
+ Op(final int expectedHttpResponseCode) {
+ this.expectedHttpResponseCode = expectedHttpResponseCode;
+ }
+
+ @Override
+ public HttpOpParam.Type getType() {
+ return HttpOpParam.Type.GET;
+ }
+
+ @Override
+ public boolean getDoOutput() {
+ return false;
+ }
+
+ @Override
+ public int getExpectedHttpResponseCode() {
+ return expectedHttpResponseCode;
+ }
+
+ @Override
+ public String toQueryString() {
+ return NAME + "=" + this;
+ }
+ }
+
+ private static final Domain<Op> DOMAIN = new Domain<Op>(NAME, Op.class);
+
+ /**
+ * Constructor.
+ * @param str a string representation of the parameter value.
+ */
+ public GetOpParam(final String str) {
+ super(DOMAIN, DOMAIN.parse(str));
+ }
+
+ @Override
+ public String getName() {
+ return NAME;
+ }
+}
\ No newline at end of file
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/GroupParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/GroupParam.java?rev=1167663&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/GroupParam.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/GroupParam.java Sun Sep 11 01:43:43 2011
@@ -0,0 +1,41 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web.resources;
+
+/** Group parameter. */
+public class GroupParam extends StringParam {
+ /** Parameter name. */
+ public static final String NAME = "group";
+ /** Default parameter value. */
+ public static final String DEFAULT = "";
+
+ private static final Domain DOMAIN = new Domain(NAME, null);
+
+ /**
+ * Constructor.
+ * @param str a string representation of the parameter value.
+ */
+ public GroupParam(final String str) {
+ super(DOMAIN, str == null || str.equals(DEFAULT)? null: str);
+ }
+
+ @Override
+ public String getName() {
+ return NAME;
+ }
+}
\ No newline at end of file
Added: hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java?rev=1167663&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java (added)
+++ hadoop/common/branches/branch-0.20-security/src/hdfs/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java Sun Sep 11 01:43:43 2011
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web.resources;
+
+/** Http operation parameter. */
+public abstract class HttpOpParam<E extends Enum<E> & HttpOpParam.Op> extends EnumParam<E> {
+ /** Default parameter value. */
+ public static final String DEFAULT = NULL;
+
+ /** Http operation types */
+ public static enum Type {
+ GET, PUT, POST, DELETE;
+ }
+
+ /** Http operation interface. */
+ public static interface Op {
+ /** @return the Http operation type. */
+ public Type getType();
+
+ /** @return true if the operation has output. */
+ public boolean getDoOutput();
+
+ /** @return true if the operation has output. */
+ public int getExpectedHttpResponseCode();
+
+ /** @return a URI query string. */
+ public String toQueryString();
+ }
+
+ /**
+ * Constructor.
+ * @param str a string representation of the parameter value.
+ */
+ HttpOpParam(final Domain<E> domain, final E value) {
+ super(domain, value);
+ }
+}
\ No newline at end of file