You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/06/19 07:39:42 UTC
svn commit: r1137291 - in /hadoop/common/branches/branch-0.20-security-204:
./ src/core/org/apache/hadoop/fs/ src/core/org/apache/hadoop/util/
src/hdfs/org/apache/hadoop/hdfs/server/datanode/
src/test/org/apache/hadoop/fs/
Author: omalley
Date: Sun Jun 19 05:39:41 2011
New Revision: 1137291
URL: http://svn.apache.org/viewvc?rev=1137291&view=rev
Log:
HDFS-2023. Backport of NPE for File.list and File.listFiles.
Merged ports of HADOOP-7322, HDFS-1934, HADOOP-7342, and HDFS-2019.
(Bharath Mundlapudi via mattf)
Modified:
hadoop/common/branches/branch-0.20-security-204/CHANGES.txt (contents, props changed)
hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/fs/FileUtil.java
hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java
hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/util/ProcfsBasedProcessTree.java
hadoop/common/branches/branch-0.20-security-204/src/hdfs/org/apache/hadoop/hdfs/server/datanode/FSDataset.java
hadoop/common/branches/branch-0.20-security-204/src/test/org/apache/hadoop/fs/TestFileUtil.java
Modified: hadoop/common/branches/branch-0.20-security-204/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/CHANGES.txt?rev=1137291&r1=1137290&r2=1137291&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.20-security-204/CHANGES.txt Sun Jun 19 05:39:41 2011
@@ -142,6 +142,10 @@ Release 0.20.204.0 - unreleased
HADOOP-7398. Suppress warnings about use of HADOOP_HOME. (omalley)
+ HDFS-2023. Backport of NPE for File.list and File.listFiles.
+ Merged ports of HADOOP-7322, HDFS-1934, HADOOP-7342, and HDFS-2019.
+ (Bharath Mundlapudi via mattf)
+
MAPREDUCE-2415. Distribute the user task logs on to multiple disks.
(Bharath Mundlapudi via omalley)
Propchange: hadoop/common/branches/branch-0.20-security-204/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Sun Jun 19 05:39:41 2011
@@ -1,5 +1,5 @@
/hadoop/common/branches/branch-0.20/CHANGES.txt:826138,826568,829987,831184,833001,880632,898713,909245,909723,960946,1044225
-/hadoop/common/branches/branch-0.20-security/CHANGES.txt:1097202,1098837,1100336,1101315,1101629,1101729,1102378,1102869,1103940,1125139,1125170,1125587,1125589,1127362,1130409,1131277,1131286,1131290,1131299,1131737,1134140
+/hadoop/common/branches/branch-0.20-security/CHANGES.txt:1097202,1098837,1100336,1101315,1101629,1101729,1102378,1102869,1103940,1125139,1125170,1125587,1125589,1127362,1130409,1131277,1131286,1131290,1131299,1131461,1131737,1134140
/hadoop/common/branches/branch-0.20-security-203/CHANGES.txt:1096071,1097012-1099333,1102071,1128115
/hadoop/common/branches/branch-0.20-security-205/CHANGES.txt:1132788,1133133,1133274,1133282,1133475
/hadoop/core/branches/branch-0.18/CHANGES.txt:727226
Modified: hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/fs/FileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/fs/FileUtil.java?rev=1137291&r1=1137290&r2=1137291&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/fs/FileUtil.java (original)
+++ hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/fs/FileUtil.java Sun Jun 19 05:39:41 2011
@@ -288,7 +288,7 @@ public class FileUtil {
if (!dstFS.mkdirs(dst)) {
return false;
}
- File contents[] = src.listFiles();
+ File contents[] = listFiles(src);
for (int i = 0; i < contents.length; i++) {
copy(contents[i], dstFS, new Path(dst, contents[i].getName()),
deleteSource, conf);
@@ -444,8 +444,10 @@ public class FileUtil {
} else {
size = dir.length();
File[] allFiles = dir.listFiles();
- for (int i = 0; i < allFiles.length; i++) {
- size = size + getDU(allFiles[i]);
+ if(allFiles != null) {
+ for (int i = 0; i < allFiles.length; i++) {
+ size = size + getDU(allFiles[i]);
+ }
}
return size;
}
@@ -653,4 +655,42 @@ public class FileUtil {
}
}
}
+
+ /**
+ * A wrapper for {@link File#listFiles()}. This java.io API returns null
+ * when a dir is not a directory or for any I/O error. Instead of having
+ * null check everywhere File#listFiles() is used, we will add utility API
+ * to get around this problem. For the majority of cases where we prefer
+ * an IOException to be thrown.
+ * @param dir directory for which listing should be performed
+ * @return list of files or empty list
+ * @exception IOException for invalid directory or for a bad disk.
+ */
+ public static File[] listFiles(File dir) throws IOException {
+ File[] files = dir.listFiles();
+ if(files == null) {
+ throw new IOException("Invalid directory or I/O error occurred for dir: "
+ + dir.toString());
+ }
+ return files;
+ }
+
+ /**
+ * A wrapper for {@link File#list()}. This java.io API returns null
+ * when a dir is not a directory or for any I/O error. Instead of having
+ * null check everywhere File#list() is used, we will add utility API
+ * to get around this problem. For the majority of cases where we prefer
+ * an IOException to be thrown.
+ * @param dir directory for which listing should be performed
+ * @return list of file names or empty string list
+ * @exception IOException for invalid directory or for a bad disk.
+ */
+ public static String[] list(File dir) throws IOException {
+ String[] fileNames = dir.list();
+ if(fileNames == null) {
+ throw new IOException("Invalid directory or I/O error occurred for dir: "
+ + dir.toString());
+ }
+ return fileNames;
+ }
}
Modified: hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=1137291&r1=1137290&r2=1137291&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java (original)
+++ hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/fs/RawLocalFileSystem.java Sun Jun 19 05:39:41 2011
@@ -271,7 +271,7 @@ public class RawLocalFileSystem extends
if (f.isFile()) {
return f.delete();
} else if ((!recursive) && f.isDirectory() &&
- (f.listFiles().length != 0)) {
+ (FileUtil.listFiles(f).length != 0)) {
throw new IOException("Directory " + f.toString() + " is not empty");
}
return FileUtil.fullyDelete(f);
Modified: hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/util/ProcfsBasedProcessTree.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/util/ProcfsBasedProcessTree.java?rev=1137291&r1=1137290&r2=1137291&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/util/ProcfsBasedProcessTree.java (original)
+++ hadoop/common/branches/branch-0.20-security-204/src/core/org/apache/hadoop/util/ProcfsBasedProcessTree.java Sun Jun 19 05:39:41 2011
@@ -33,6 +33,7 @@ import java.util.LinkedList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileUtil;
/**
* A Proc file-system based ProcessTree. Works only on Linux.
@@ -268,16 +269,18 @@ public class ProcfsBasedProcessTree exte
String[] processDirs = (new File(procfsDir)).list();
List<Integer> processList = new ArrayList<Integer>();
- for (String dir : processDirs) {
- try {
- int pd = Integer.parseInt(dir);
- if ((new File(procfsDir, dir)).isDirectory()) {
- processList.add(Integer.valueOf(pd));
+ if (processDirs != null) {
+ for (String dir : processDirs) {
+ try {
+ int pd = Integer.parseInt(dir);
+ if ((new File(procfsDir, dir)).isDirectory()) {
+ processList.add(Integer.valueOf(pd));
+ }
+ } catch (NumberFormatException n) {
+ // skip this directory
+ } catch (SecurityException s) {
+ // skip this process
}
- } catch (NumberFormatException n) {
- // skip this directory
- } catch (SecurityException s) {
- // skip this process
}
}
return processList;
Modified: hadoop/common/branches/branch-0.20-security-204/src/hdfs/org/apache/hadoop/hdfs/server/datanode/FSDataset.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/hdfs/org/apache/hadoop/hdfs/server/datanode/FSDataset.java?rev=1137291&r1=1137290&r2=1137291&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/hdfs/org/apache/hadoop/hdfs/server/datanode/FSDataset.java (original)
+++ hadoop/common/branches/branch-0.20-security-204/src/hdfs/org/apache/hadoop/hdfs/server/datanode/FSDataset.java Sun Jun 19 05:39:41 2011
@@ -82,7 +82,7 @@ public class FSDataset implements FSCons
dir.toString());
}
} else {
- File[] files = dir.listFiles();
+ File[] files = FileUtil.listFiles(dir);
int numChildren = 0;
for (int idx = 0; idx < files.length; idx++) {
if (files[idx].isDirectory()) {
@@ -202,10 +202,14 @@ public class FSDataset implements FSCons
}
File blockFiles[] = dir.listFiles();
- for (int i = 0; i < blockFiles.length; i++) {
- if (Block.isBlockFilename(blockFiles[i])) {
- long genStamp = getGenerationStampFromFile(blockFiles, blockFiles[i]);
- blockSet.add(new Block(blockFiles[i], blockFiles[i].length(), genStamp));
+ if (blockFiles != null) {
+ for (int i = 0; i < blockFiles.length; i++) {
+ if (Block.isBlockFilename(blockFiles[i])) {
+ long genStamp = getGenerationStampFromFile(blockFiles,
+ blockFiles[i]);
+ blockSet.add(new Block(blockFiles[i], blockFiles[i].length(),
+ genStamp));
+ }
}
}
}
@@ -218,11 +222,14 @@ public class FSDataset implements FSCons
}
File blockFiles[] = dir.listFiles();
- for (int i = 0; i < blockFiles.length; i++) {
- if (Block.isBlockFilename(blockFiles[i])) {
- long genStamp = getGenerationStampFromFile(blockFiles, blockFiles[i]);
- volumeMap.put(new Block(blockFiles[i], blockFiles[i].length(), genStamp),
- new DatanodeBlockInfo(volume, blockFiles[i]));
+ if (blockFiles != null) {
+ for (int i = 0; i < blockFiles.length; i++) {
+ if (Block.isBlockFilename(blockFiles[i])) {
+ long genStamp = getGenerationStampFromFile(blockFiles,
+ blockFiles[i]);
+ volumeMap.put(new Block(blockFiles[i], blockFiles[i].length(),
+ genStamp), new DatanodeBlockInfo(volume, blockFiles[i]));
+ }
}
}
}
@@ -469,10 +476,7 @@ public class FSDataset implements FSCons
*/
private void recoverDetachedBlocks(File dataDir, File dir)
throws IOException {
- File contents[] = dir.listFiles();
- if (contents == null) {
- return;
- }
+ File contents[] = FileUtil.listFiles(dir);
for (int i = 0; i < contents.length; i++) {
if (!contents[i].isFile()) {
throw new IOException ("Found " + contents[i] + " in " + dir +
Modified: hadoop/common/branches/branch-0.20-security-204/src/test/org/apache/hadoop/fs/TestFileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-204/src/test/org/apache/hadoop/fs/TestFileUtil.java?rev=1137291&r1=1137290&r2=1137291&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-204/src/test/org/apache/hadoop/fs/TestFileUtil.java (original)
+++ hadoop/common/branches/branch-0.20-security-204/src/test/org/apache/hadoop/fs/TestFileUtil.java Sun Jun 19 05:39:41 2011
@@ -249,4 +249,58 @@ public class TestFileUtil {
boolean ret = FileUtil.fullyDeleteContents(new MyFile(del));
validateAndSetWritablePermissions(ret);
}
+
+ @Test
+ public void testListFiles() throws IOException {
+ setupDirs();
+ //Test existing files case
+ File[] files = FileUtil.listFiles(tmp);
+ Assert.assertEquals(1, files.length);
+
+ //Test existing directory with no files case
+ File newDir = new File(tmp.getPath(),"test");
+ newDir.mkdir();
+ Assert.assertTrue("Failed to create test dir", newDir.exists());
+ files = FileUtil.listFiles(newDir);
+ Assert.assertEquals(0, files.length);
+ newDir.delete();
+ Assert.assertFalse("Failed to delete test dir", newDir.exists());
+
+ //Test non-existing directory case, this throws
+ //IOException
+ try {
+ files = FileUtil.listFiles(newDir);
+ Assert.fail("IOException expected on listFiles() for non-existent dir "
+ + newDir.toString());
+ } catch(IOException ioe) {
+ //Expected an IOException
+ }
+ }
+
+ @Test
+ public void testListAPI() throws IOException {
+ setupDirs();
+ //Test existing files case
+ String[] files = FileUtil.list(tmp);
+ Assert.assertEquals(1, files.length);
+
+ //Test existing directory with no files case
+ File newDir = new File(tmp.getPath(),"test");
+ newDir.mkdir();
+ Assert.assertTrue("Failed to create test dir", newDir.exists());
+ files = FileUtil.list(newDir);
+ Assert.assertEquals(0, files.length);
+ newDir.delete();
+ Assert.assertFalse("Failed to delete test dir", newDir.exists());
+
+ //Test non-existing directory case, this throws
+ //IOException
+ try {
+ files = FileUtil.list(newDir);
+ Assert.fail("IOException expected on list() for non-existent dir "
+ + newDir.toString());
+ } catch(IOException ioe) {
+ //Expected an IOException
+ }
+ }
}