You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cn...@apache.org on 2013/08/06 00:11:31 UTC
svn commit: r1510780 - in
/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src:
main/java/org/apache/hadoop/fs/shell/CopyCommands.java
site/apt/FileSystemShell.apt.vm
Author: cnauroth
Date: Mon Aug 5 22:11:30 2013
New Revision: 1510780
URL: http://svn.apache.org/r1510780
Log:
HDFS-4905. Merging change r1510773 from trunk to branch-2.
Modified:
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm
Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java?rev=1510780&r1=1510779&r2=1510780&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java (original)
+++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java Mon Aug 5 22:11:30 2013
@@ -18,18 +18,16 @@
package org.apache.hadoop.fs.shell;
-import java.io.File;
-import java.io.IOException;
+import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
+import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.PathIsDirectoryException;
+import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
/** Various commands for copy files */
@@ -44,6 +42,7 @@ class CopyCommands {
factory.addClass(CopyToLocal.class, "-copyToLocal");
factory.addClass(Get.class, "-get");
factory.addClass(Put.class, "-put");
+ factory.addClass(AppendToFile.class, "-appendToFile");
}
/** merge multiple files together */
@@ -235,4 +234,93 @@ class CopyCommands {
public static final String USAGE = Get.USAGE;
public static final String DESCRIPTION = "Identical to the -get command.";
}
+
+ /**
+ * Append the contents of one or more local files to a remote
+ * file.
+ */
+ public static class AppendToFile extends CommandWithDestination {
+ public static final String NAME = "appendToFile";
+ public static final String USAGE = "<localsrc> ... <dst>";
+ public static final String DESCRIPTION =
+ "Appends the contents of all the given local files to the\n" +
+ "given dst file. The dst file will be created if it does\n" +
+ "not exist. If <localSrc> is -, then the input is read\n" +
+ "from stdin.";
+
+ private static final int DEFAULT_IO_LENGTH = 1024 * 1024;
+ boolean readStdin = false;
+
+ // commands operating on local paths have no need for glob expansion
+ @Override
+ protected List<PathData> expandArgument(String arg) throws IOException {
+ List<PathData> items = new LinkedList<PathData>();
+ if (arg.equals("-")) {
+ readStdin = true;
+ } else {
+ try {
+ items.add(new PathData(new URI(arg), getConf()));
+ } catch (URISyntaxException e) {
+ if (Path.WINDOWS) {
+ // Unlike URI, PathData knows how to parse Windows drive-letter paths.
+ items.add(new PathData(arg, getConf()));
+ } else {
+ throw new IOException("Unexpected URISyntaxException: " + e.toString());
+ }
+ }
+ }
+ return items;
+ }
+
+ @Override
+ protected void processOptions(LinkedList<String> args)
+ throws IOException {
+
+ if (args.size() < 2) {
+ throw new IOException("missing destination argument");
+ }
+
+ getRemoteDestination(args);
+ super.processOptions(args);
+ }
+
+ @Override
+ protected void processArguments(LinkedList<PathData> args)
+ throws IOException {
+
+ if (!dst.exists) {
+ dst.fs.create(dst.path, false).close();
+ }
+
+ InputStream is = null;
+ FSDataOutputStream fos = dst.fs.append(dst.path);
+
+ try {
+ if (readStdin) {
+ if (args.size() == 0) {
+ IOUtils.copyBytes(System.in, fos, DEFAULT_IO_LENGTH);
+ } else {
+ throw new IOException(
+ "stdin (-) must be the sole input argument when present");
+ }
+ }
+
+ // Read in each input file and write to the target.
+ for (PathData source : args) {
+ is = new FileInputStream(source.toFile());
+ IOUtils.copyBytes(is, fos, DEFAULT_IO_LENGTH);
+ IOUtils.closeStream(is);
+ is = null;
+ }
+ } finally {
+ if (is != null) {
+ IOUtils.closeStream(is);
+ }
+
+ if (fos != null) {
+ IOUtils.closeStream(fos);
+ }
+ }
+ }
+ }
}
Modified: hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm?rev=1510780&r1=1510779&r2=1510780&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm (original)
+++ hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm Mon Aug 5 22:11:30 2013
@@ -45,6 +45,27 @@ bin/hadoop fs <args>
Differences are described with each of the commands. Error information is
sent to stderr and the output is sent to stdout.
+appendToFile
+
+ Usage: <<<hdfs dfs -appendToFile <localsrc> ... <dst> >>>
+
+ Append single src, or multiple srcs from local file system to the
+ destination file system. Also reads input from stdin and appends to
+ destination file system.
+
+ * <<<hdfs dfs -appendToFile localfile /user/hadoop/hadoopfile>>>
+
+ * <<<hdfs dfs -appendToFile localfile1 localfile2 /user/hadoop/hadoopfile>>>
+
+ * <<<hdfs dfs -appendToFile localfile hdfs://nn.example.com/hadoop/hadoopfile>>>
+
+ * <<<hdfs dfs -appendToFile - hdfs://nn.example.com/hadoop/hadoopfile>>>
+ Reads the input from stdin.
+
+ Exit Code:
+
+ Returns 0 on success and 1 on error.
+
cat
Usage: <<<hdfs dfs -cat URI [URI ...]>>>
@@ -76,7 +97,7 @@ chmod
Change the permissions of files. With -R, make the change recursively
through the directory structure. The user must be the owner of the file, or
- else a super-user. Additional information is in the
+ else a super-user. Additional information is in the
{{{betterurl}Permissions Guide}}.
chown