You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by zj...@apache.org on 2015/06/08 19:19:36 UTC
[38/50] hadoop git commit: HADOOP-12056. Use DirectoryStream in
DiskChecker#checkDirs to detect errors when listing a directory. Contributed
by Zhihai Xu.
HADOOP-12056. Use DirectoryStream in DiskChecker#checkDirs to detect errors when listing a directory. Contributed by Zhihai Xu.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/01cd698b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/01cd698b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/01cd698b
Branch: refs/heads/YARN-2928
Commit: 01cd698bd5f21d01a654f7c963da6bf46e2b0005
Parents: ddd92aa
Author: Andrew Wang <wa...@apache.org>
Authored: Fri Jun 5 13:52:21 2015 -0700
Committer: Zhijie Shen <zj...@apache.org>
Committed: Mon Jun 8 09:57:00 2015 -0700
----------------------------------------------------------------------
hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++
.../org/apache/hadoop/util/DiskChecker.java | 24 ++++++++++++++++----
.../org/apache/hadoop/util/TestDiskChecker.java | 22 ++++++++++++++++++
3 files changed, 45 insertions(+), 4 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/01cd698b/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 51579da..4b1d0d1 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -637,6 +637,9 @@ Release 2.8.0 - UNRELEASED
HADOOP-12059. S3Credentials should support use of CredentialProvider.
(Sean Busbey via wang)
+ HADOOP-12056. Use DirectoryStream in DiskChecker#checkDirs to detect
+ errors when listing a directory. (Zhihai Xu via wang)
+
OPTIMIZATIONS
HADOOP-11785. Reduce the number of listStatus operation in distcp
http://git-wip-us.apache.org/repos/asf/hadoop/blob/01cd698b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
index 6b27ae5..a36a7a0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
@@ -20,6 +20,9 @@ package org.apache.hadoop.util;
import java.io.File;
import java.io.IOException;
+import java.nio.file.DirectoryStream;
+import java.nio.file.DirectoryIteratorException;
+import java.nio.file.Files;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -86,13 +89,26 @@ public class DiskChecker {
*/
public static void checkDirs(File dir) throws DiskErrorException {
checkDir(dir);
- for (File child : dir.listFiles()) {
- if (child.isDirectory()) {
- checkDirs(child);
+ IOException ex = null;
+ try (DirectoryStream<java.nio.file.Path> stream =
+ Files.newDirectoryStream(dir.toPath())) {
+ for (java.nio.file.Path entry: stream) {
+ File child = entry.toFile();
+ if (child.isDirectory()) {
+ checkDirs(child);
+ }
}
+ } catch (DirectoryIteratorException de) {
+ ex = de.getCause();
+ } catch (IOException ie) {
+ ex = ie;
+ }
+ if (ex != null) {
+ throw new DiskErrorException("I/O error when open a directory: "
+ + dir.toString(), ex);
}
}
-
+
/**
* Create the directory if it doesn't exist and check that dir is readable,
* writable and executable
http://git-wip-us.apache.org/repos/asf/hadoop/blob/01cd698b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
index 5ab1313..de54735 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.DiskChecker.DiskErrorException;
import org.apache.hadoop.util.Shell;
@@ -180,4 +181,25 @@ public class TestDiskChecker {
System.out.println("checkDir success: " + success);
}
+
+ @Test (timeout = 30000)
+ public void testCheckDirsIOException() throws Throwable {
+ Path path = new Path("target", TestDiskChecker.class.getSimpleName());
+ File localDir = new File(path.toUri().getRawPath());
+ localDir.mkdir();
+ File localFile = new File(localDir, "test");
+ localFile.createNewFile();
+ File spyLocalDir = spy(localDir);
+ doReturn(localFile.toPath()).when(spyLocalDir).toPath();
+ try {
+ DiskChecker.checkDirs(spyLocalDir);
+ fail("Expected exception for I/O error");
+ } catch (DiskErrorException e) {
+ GenericTestUtils.assertExceptionContains("I/O error", e);
+ assertTrue(e.getCause() instanceof IOException);
+ } finally {
+ localFile.delete();
+ localDir.delete();
+ }
+ }
}