You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by sh...@apache.org on 2013/01/30 07:52:36 UTC
svn commit: r1440290 - in
/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: ./
src/main/java/org/apache/hadoop/hdfs/
src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/
src/main/java/org/apache/hadoop/hdfs/web/ src/main/java/org/apa...
Author: shv
Date: Wed Jan 30 06:51:57 2013
New Revision: 1440290
URL: http://svn.apache.org/viewvc?rev=1440290&view=rev
Log:
HDFS-3598. WebHDFS support for file concat. Contributed by Plamen Jeliazkov.
Added:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ConcatSourcesParam.java (with props)
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PostOpParam.java
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1440290&r1=1440289&r2=1440290&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Wed Jan 30 06:51:57 2013
@@ -499,6 +499,8 @@ Release 2.0.3-alpha - Unreleased
HDFS-4259. Improve pipeline DN replacement failure message (harsh)
+ HDFS-3598. WebHDFS support for file concat. (Plamen Jeliazkov via shv)
+
OPTIMIZATIONS
HDFS-3429. DataNode reads checksums even if client does not need them (todd)
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java?rev=1440290&r1=1440289&r2=1440290&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java Wed Jan 30 06:51:57 2013
@@ -310,10 +310,9 @@ public class DistributedFileSystem exten
}
/**
- * THIS IS DFS only operations, it is not part of FileSystem
- * move blocks from srcs to trg
+ * Move blocks from srcs to trg
* and delete srcs afterwards
- * all blocks should be the same size
+ * RESTRICTION: all blocks should be the same size
* @param trg existing file to append to
* @param psrcs list of files (same block size, same replication)
* @throws IOException
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java?rev=1440290&r1=1440289&r2=1440290&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Wed Jan 30 06:51:57 2013
@@ -70,6 +70,7 @@ import org.apache.hadoop.hdfs.web.WebHdf
import org.apache.hadoop.hdfs.web.resources.AccessTimeParam;
import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
import org.apache.hadoop.hdfs.web.resources.BufferSizeParam;
+import org.apache.hadoop.hdfs.web.resources.ConcatSourcesParam;
import org.apache.hadoop.hdfs.web.resources.CreateParentParam;
import org.apache.hadoop.hdfs.web.resources.DelegationParam;
import org.apache.hadoop.hdfs.web.resources.DeleteOpParam;
@@ -483,10 +484,12 @@ public class NamenodeWebHdfsMethods {
final DoAsParam doAsUser,
@QueryParam(PostOpParam.NAME) @DefaultValue(PostOpParam.DEFAULT)
final PostOpParam op,
+ @QueryParam(ConcatSourcesParam.NAME) @DefaultValue(ConcatSourcesParam.DEFAULT)
+ final ConcatSourcesParam concatSrcs,
@QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
final BufferSizeParam bufferSize
) throws IOException, InterruptedException {
- return post(ugi, delegation, username, doAsUser, ROOT, op, bufferSize);
+ return post(ugi, delegation, username, doAsUser, ROOT, op, concatSrcs, bufferSize);
}
/** Handle HTTP POST request. */
@@ -505,11 +508,13 @@ public class NamenodeWebHdfsMethods {
@PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
@QueryParam(PostOpParam.NAME) @DefaultValue(PostOpParam.DEFAULT)
final PostOpParam op,
+ @QueryParam(ConcatSourcesParam.NAME) @DefaultValue(ConcatSourcesParam.DEFAULT)
+ final ConcatSourcesParam concatSrcs,
@QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
final BufferSizeParam bufferSize
) throws IOException, InterruptedException {
- init(ugi, delegation, username, doAsUser, path, op, bufferSize);
+ init(ugi, delegation, username, doAsUser, path, op, concatSrcs, bufferSize);
return ugi.doAs(new PrivilegedExceptionAction<Response>() {
@Override
@@ -517,7 +522,7 @@ public class NamenodeWebHdfsMethods {
REMOTE_ADDRESS.set(request.getRemoteAddr());
try {
return post(ugi, delegation, username, doAsUser,
- path.getAbsolutePath(), op, bufferSize);
+ path.getAbsolutePath(), op, concatSrcs, bufferSize);
} finally {
REMOTE_ADDRESS.set(null);
}
@@ -532,6 +537,7 @@ public class NamenodeWebHdfsMethods {
final DoAsParam doAsUser,
final String fullpath,
final PostOpParam op,
+ final ConcatSourcesParam concatSrcs,
final BufferSizeParam bufferSize
) throws IOException, URISyntaxException {
final NameNode namenode = (NameNode)context.getAttribute("name.node");
@@ -543,6 +549,11 @@ public class NamenodeWebHdfsMethods {
fullpath, op.getValue(), -1L, -1L, bufferSize);
return Response.temporaryRedirect(uri).type(MediaType.APPLICATION_OCTET_STREAM).build();
}
+ case CONCAT:
+ {
+ namenode.getRpcServer().concat(fullpath, concatSrcs.getAbsolutePaths());
+ return Response.ok().build();
+ }
default:
throw new UnsupportedOperationException(op + " is not supported");
}
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java?rev=1440290&r1=1440289&r2=1440290&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java Wed Jan 30 06:51:57 2013
@@ -29,7 +29,9 @@ import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
+import java.util.ArrayList;
import java.util.Collection;
+import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
@@ -65,6 +67,7 @@ import org.apache.hadoop.hdfs.server.nam
import org.apache.hadoop.hdfs.web.resources.AccessTimeParam;
import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
import org.apache.hadoop.hdfs.web.resources.BufferSizeParam;
+import org.apache.hadoop.hdfs.web.resources.ConcatSourcesParam;
import org.apache.hadoop.hdfs.web.resources.CreateParentParam;
import org.apache.hadoop.hdfs.web.resources.DeleteOpParam;
import org.apache.hadoop.hdfs.web.resources.DestinationParam;
@@ -103,6 +106,7 @@ import org.apache.hadoop.security.token.
import org.apache.hadoop.security.token.TokenRenewer;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector;
import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.util.StringUtils;
import org.mortbay.util.ajax.JSON;
import com.google.common.base.Charsets;
@@ -717,6 +721,22 @@ public class WebHdfsFileSystem extends F
}
@Override
+ public void concat(final Path trg, final Path [] psrcs) throws IOException {
+ statistics.incrementWriteOps(1);
+ final HttpOpParam.Op op = PostOpParam.Op.CONCAT;
+
+ List<String> strPaths = new ArrayList<String>(psrcs.length);
+ for(Path psrc : psrcs) {
+ strPaths.add(psrc.toUri().getPath());
+ }
+
+ String srcs = StringUtils.join(",", strPaths);
+
+ ConcatSourcesParam param = new ConcatSourcesParam(srcs);
+ run(op, trg, param);
+ }
+
+ @Override
public FSDataOutputStream create(final Path f, final FsPermission permission,
final boolean overwrite, final int bufferSize, final short replication,
final long blockSize, final Progressable progress) throws IOException {
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ConcatSourcesParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ConcatSourcesParam.java?rev=1440290&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ConcatSourcesParam.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ConcatSourcesParam.java Wed Jan 30 06:51:57 2013
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.web.resources;
+
+/** The concat source paths parameter. */
+public class ConcatSourcesParam extends StringParam {
+ /** Parameter name. */
+ public static final String NAME = "srcs";
+
+ public static final String DEFAULT = NULL;
+
+ private static final Domain DOMAIN = new Domain(NAME, null);
+
+ /**
+ * Constructor.
+ * @param str a string representation of the parameter value.
+ */
+ public ConcatSourcesParam(String str) {
+ super(DOMAIN, str);
+ }
+
+ @Override
+ public String getName() {
+ return NAME;
+ }
+
+ /** @return the absolute path. */
+ public final String[] getAbsolutePaths() {
+ final String[] paths = getValue().split(",");
+ return paths;
+ }
+}
Propchange: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ConcatSourcesParam.java
------------------------------------------------------------------------------
svn:mime-type = text/plain
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PostOpParam.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PostOpParam.java?rev=1440290&r1=1440289&r2=1440290&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PostOpParam.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PostOpParam.java Wed Jan 30 06:51:57 2013
@@ -23,13 +23,17 @@ import java.net.HttpURLConnection;
public class PostOpParam extends HttpOpParam<PostOpParam.Op> {
/** Post operations. */
public static enum Op implements HttpOpParam.Op {
- APPEND(HttpURLConnection.HTTP_OK),
+ APPEND(true, HttpURLConnection.HTTP_OK),
- NULL(HttpURLConnection.HTTP_NOT_IMPLEMENTED);
+ CONCAT(false, HttpURLConnection.HTTP_OK),
+ NULL(false, HttpURLConnection.HTTP_NOT_IMPLEMENTED);
+
+ final boolean doOutputAndRedirect;
final int expectedHttpResponseCode;
- Op(final int expectedHttpResponseCode) {
+ Op(final boolean doOutputAndRedirect, final int expectedHttpResponseCode) {
+ this.doOutputAndRedirect = doOutputAndRedirect;
this.expectedHttpResponseCode = expectedHttpResponseCode;
}
@@ -40,12 +44,12 @@ public class PostOpParam extends HttpOpP
@Override
public boolean getDoOutput() {
- return true;
+ return doOutputAndRedirect;
}
@Override
public boolean getRedirect() {
- return true;
+ return doOutputAndRedirect;
}
@Override
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java?rev=1440290&r1=1440289&r2=1440290&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestFSMainOperationsWebHdfs.java Wed Jan 30 06:51:57 2013
@@ -27,11 +27,13 @@ import java.security.PrivilegedException
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSMainOperationsBaseTest;
+import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileSystemTestHelper;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.datanode.web.resources.DatanodeWebHdfsMethods;
import org.apache.hadoop.hdfs.web.resources.ExceptionHandler;
@@ -60,6 +62,7 @@ public class TestFSMainOperationsWebHdfs
final Configuration conf = new Configuration();
conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
+ conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 1024);
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
cluster.waitActive();
@@ -101,6 +104,30 @@ public class TestFSMainOperationsWebHdfs
return defaultWorkingDirectory;
}
+ @Test
+ public void testConcat() throws Exception {
+ Path[] paths = {new Path("/test/hadoop/file1"),
+ new Path("/test/hadoop/file2"),
+ new Path("/test/hadoop/file3")};
+
+ DFSTestUtil.createFile(fSys, paths[0], 1024, (short) 3, 0);
+ DFSTestUtil.createFile(fSys, paths[1], 1024, (short) 3, 0);
+ DFSTestUtil.createFile(fSys, paths[2], 1024, (short) 3, 0);
+
+ Path catPath = new Path("/test/hadoop/catFile");
+ DFSTestUtil.createFile(fSys, catPath, 1024, (short) 3, 0);
+ Assert.assertTrue(exists(fSys, catPath));
+
+ fSys.concat(catPath, paths);
+
+ Assert.assertFalse(exists(fSys, paths[0]));
+ Assert.assertFalse(exists(fSys, paths[1]));
+ Assert.assertFalse(exists(fSys, paths[2]));
+
+ FileStatus fileStatus = fSys.getFileStatus(catPath);
+ Assert.assertEquals(1024*4, fileStatus.getLen());
+ }
+
@Override
@Test
public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception {