You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by dh...@apache.org on 2007/12/18 16:02:25 UTC
svn commit: r605221 - in /lucene/hadoop/branches/branch-0.15: CHANGES.txt
src/java/org/apache/hadoop/fs/FsShell.java
src/test/org/apache/hadoop/dfs/TestDFSShell.java
Author: dhruba
Date: Tue Dec 18 07:02:23 2007
New Revision: 605221
URL: http://svn.apache.org/viewvc?rev=605221&view=rev
Log:
HADOOP-2422. dfs -cat multiple files fail with 'Unable to write to
output stream'. (Raghu Angadi via dhruba)
svn merge -c 605220 from trunk.
Modified:
lucene/hadoop/branches/branch-0.15/CHANGES.txt
lucene/hadoop/branches/branch-0.15/src/java/org/apache/hadoop/fs/FsShell.java
lucene/hadoop/branches/branch-0.15/src/test/org/apache/hadoop/dfs/TestDFSShell.java
Modified: lucene/hadoop/branches/branch-0.15/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.15/CHANGES.txt?rev=605221&r1=605220&r2=605221&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.15/CHANGES.txt (original)
+++ lucene/hadoop/branches/branch-0.15/CHANGES.txt Tue Dec 18 07:02:23 2007
@@ -69,6 +69,8 @@
by HADOOP-1917 weren't correctly copied over to the trunk/docs directory.
Also fixed a couple of minor typos and broken links. (acmurthy)
+ HADOOP-2422. dfs -cat multiple files fail with 'Unable to write to
+ output stream'. (Raghu Angadi via dhruba)
Release 0.15.0 - 2007-11-2
Modified: lucene/hadoop/branches/branch-0.15/src/java/org/apache/hadoop/fs/FsShell.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.15/src/java/org/apache/hadoop/fs/FsShell.java?rev=605221&r1=605220&r2=605221&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.15/src/java/org/apache/hadoop/fs/FsShell.java (original)
+++ lucene/hadoop/branches/branch-0.15/src/java/org/apache/hadoop/fs/FsShell.java Tue Dec 18 07:02:23 2007
@@ -97,7 +97,11 @@
throw new IOException("Source must be a file.");
}
FSDataInputStream in = fs.open(src);
- IOUtils.copyBytes(in, System.out, getConf(), true);
+ try {
+ IOUtils.copyBytes(in, System.out, getConf(), false);
+ } finally {
+ in.close();
+ }
}
/**
Modified: lucene/hadoop/branches/branch-0.15/src/test/org/apache/hadoop/dfs/TestDFSShell.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.15/src/test/org/apache/hadoop/dfs/TestDFSShell.java?rev=605221&r1=605220&r2=605221&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.15/src/test/org/apache/hadoop/dfs/TestDFSShell.java (original)
+++ lucene/hadoop/branches/branch-0.15/src/test/org/apache/hadoop/dfs/TestDFSShell.java Tue Dec 18 07:02:23 2007
@@ -283,12 +283,16 @@
Path myFile = new Path("/test/mkdirs/myFile");
writeFile(fileSys, myFile);
assertTrue(fileSys.exists(myFile));
+ Path myFile2 = new Path("/test/mkdirs/myFile2");
+ writeFile(fileSys, myFile2);
+ assertTrue(fileSys.exists(myFile2));
// Verify that we can read the file
{
- String[] args = new String[2];
+ String[] args = new String[3];
args[0] = "-cat";
args[1] = "/test/mkdirs/myFile";
+ args[2] = "/test/mkdirs/myFile2";
int val = -1;
try {
val = shell.run(args);
@@ -298,6 +302,7 @@
}
assertTrue(val == 0);
}
+ fileSys.delete(myFile2);
// Verify that we can get with and without crc
{