You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ar...@apache.org on 2018/01/26 21:09:40 UTC
[1/3] hadoop git commit: HDFS-13054. Handling
PathIsNotEmptyDirectoryException in DFSClient delete call. Contributed by
Nanda kumar.
Repository: hadoop
Updated Branches:
refs/heads/branch-2 9bd439e2c -> 7be5a46a4
refs/heads/branch-3.0 7f3548778 -> 480b69c03
refs/heads/trunk a37e7f0ad -> e990904dd
HDFS-13054. Handling PathIsNotEmptyDirectoryException in DFSClient delete call. Contributed by Nanda kumar.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/480b69c0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/480b69c0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/480b69c0
Branch: refs/heads/branch-3.0
Commit: 480b69c03c9c2e13dd08e9b5e89f9d3ca9ab5d7c
Parents: 7f35487
Author: Arpit Agarwal <ar...@apache.org>
Authored: Fri Jan 26 11:42:27 2018 -0800
Committer: Arpit Agarwal <ar...@apache.org>
Committed: Fri Jan 26 11:45:23 2018 -0800
----------------------------------------------------------------------
.../java/org/apache/hadoop/hdfs/DFSClient.java | 4 +++-
.../hadoop/hdfs/protocol/ClientProtocol.java | 3 +++
.../hadoop/hdfs/TestDistributedFileSystem.java | 17 +++++++++++++++++
3 files changed, 23 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/480b69c0/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
index c20e2a0..e08d403 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
@@ -82,6 +82,7 @@ import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.fs.ParentNotDirectoryException;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
import org.apache.hadoop.fs.QuotaUsage;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.StorageType;
@@ -1588,7 +1589,8 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
FileNotFoundException.class,
SafeModeException.class,
UnresolvedPathException.class,
- SnapshotAccessControlException.class);
+ SnapshotAccessControlException.class,
+ PathIsNotEmptyDirectoryException.class);
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/480b69c0/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
index 24f0321..200642c 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries;
+import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
import org.apache.hadoop.hdfs.AddBlockFlag;
import org.apache.hadoop.fs.CacheFlag;
import org.apache.hadoop.fs.ContentSummary;
@@ -625,6 +626,8 @@ public interface ClientProtocol {
* @throws org.apache.hadoop.fs.UnresolvedLinkException If <code>src</code>
* contains a symlink
* @throws SnapshotAccessControlException if path is in RO snapshot
+ * @throws PathIsNotEmptyDirectoryException if path is a non-empty directory
+ * and <code>recursive</code> is set to false
* @throws IOException If an I/O error occurred
*/
@AtMostOnce
http://git-wip-us.apache.org/repos/asf/hadoop/blob/480b69c0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
index 02dbdae..288a17f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
@@ -67,6 +67,7 @@ import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.StorageStatistics.LongStatistic;
import org.apache.hadoop.fs.StorageType;
@@ -571,6 +572,22 @@ public class TestDistributedFileSystem {
in.close();
fs.close();
}
+
+ {
+ // Test PathIsNotEmptyDirectoryException while deleting non-empty dir
+ FileSystem fs = cluster.getFileSystem();
+ fs.mkdirs(new Path("/test/nonEmptyDir"));
+ fs.create(new Path("/tmp/nonEmptyDir/emptyFile")).close();
+ try {
+ fs.delete(new Path("/tmp/nonEmptyDir"), false);
+ Assert.fail("Expecting PathIsNotEmptyDirectoryException");
+ } catch (PathIsNotEmptyDirectoryException ex) {
+ // This is the proper exception to catch; move on.
+ }
+ Assert.assertTrue(fs.exists(new Path("/test/nonEmptyDir")));
+ fs.delete(new Path("/tmp/nonEmptyDir"), true);
+ }
+
}
finally {
if (cluster != null) {cluster.shutdown();}
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org
[2/3] hadoop git commit: HDFS-13054. Handling
PathIsNotEmptyDirectoryException in DFSClient delete call. Contributed by
Nanda kumar.
Posted by ar...@apache.org.
HDFS-13054. Handling PathIsNotEmptyDirectoryException in DFSClient delete call. Contributed by Nanda kumar.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7be5a46a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7be5a46a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7be5a46a
Branch: refs/heads/branch-2
Commit: 7be5a46a41d3a78d82a804aabf1933d6af3c8725
Parents: 9bd439e
Author: Arpit Agarwal <ar...@apache.org>
Authored: Fri Jan 26 11:42:27 2018 -0800
Committer: Arpit Agarwal <ar...@apache.org>
Committed: Fri Jan 26 11:45:36 2018 -0800
----------------------------------------------------------------------
.../java/org/apache/hadoop/hdfs/DFSClient.java | 4 +++-
.../hadoop/hdfs/protocol/ClientProtocol.java | 3 +++
.../hadoop/hdfs/TestDistributedFileSystem.java | 17 +++++++++++++++++
3 files changed, 23 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7be5a46a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
index be4de50..3e92340 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
@@ -88,6 +88,7 @@ import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.fs.ParentNotDirectoryException;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
import org.apache.hadoop.fs.QuotaUsage;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.StorageType;
@@ -1620,7 +1621,8 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
FileNotFoundException.class,
SafeModeException.class,
UnresolvedPathException.class,
- SnapshotAccessControlException.class);
+ SnapshotAccessControlException.class,
+ PathIsNotEmptyDirectoryException.class);
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7be5a46a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
index 587a15c..a9c8d7e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries;
+import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
import org.apache.hadoop.hdfs.AddBlockFlag;
import org.apache.hadoop.fs.CacheFlag;
import org.apache.hadoop.fs.ContentSummary;
@@ -614,6 +615,8 @@ public interface ClientProtocol {
* @throws org.apache.hadoop.fs.UnresolvedLinkException If <code>src</code>
* contains a symlink
* @throws SnapshotAccessControlException if path is in RO snapshot
+ * @throws PathIsNotEmptyDirectoryException if path is a non-empty directory
+ * and <code>recursive</code> is set to false
* @throws IOException If an I/O error occurred
*/
@AtMostOnce
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7be5a46a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
index d92a67d..0801d89 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
@@ -73,6 +73,7 @@ import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.StorageStatistics.LongStatistic;
import org.apache.hadoop.fs.StorageType;
@@ -437,6 +438,22 @@ public class TestDistributedFileSystem {
in.close();
fs.close();
}
+
+ {
+ // Test PathIsNotEmptyDirectoryException while deleting non-empty dir
+ FileSystem fs = cluster.getFileSystem();
+ fs.mkdirs(new Path("/test/nonEmptyDir"));
+ fs.create(new Path("/tmp/nonEmptyDir/emptyFile")).close();
+ try {
+ fs.delete(new Path("/tmp/nonEmptyDir"), false);
+ Assert.fail("Expecting PathIsNotEmptyDirectoryException");
+ } catch (PathIsNotEmptyDirectoryException ex) {
+ // This is the proper exception to catch; move on.
+ }
+ Assert.assertTrue(fs.exists(new Path("/test/nonEmptyDir")));
+ fs.delete(new Path("/tmp/nonEmptyDir"), true);
+ }
+
}
finally {
if (cluster != null) {cluster.shutdown();}
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org
[3/3] hadoop git commit: HDFS-13054. Handling
PathIsNotEmptyDirectoryException in DFSClient delete call. Contributed by
Nanda kumar.
Posted by ar...@apache.org.
HDFS-13054. Handling PathIsNotEmptyDirectoryException in DFSClient delete call. Contributed by Nanda kumar.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e990904d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e990904d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e990904d
Branch: refs/heads/trunk
Commit: e990904dd568a1d8f98efb55c1dd2d598ae4752b
Parents: a37e7f0
Author: Arpit Agarwal <ar...@apache.org>
Authored: Fri Jan 26 11:42:27 2018 -0800
Committer: Arpit Agarwal <ar...@apache.org>
Committed: Fri Jan 26 13:09:13 2018 -0800
----------------------------------------------------------------------
.../java/org/apache/hadoop/hdfs/DFSClient.java | 4 +++-
.../hadoop/hdfs/protocol/ClientProtocol.java | 3 +++
.../hadoop/hdfs/TestDistributedFileSystem.java | 17 +++++++++++++++++
3 files changed, 23 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e990904d/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
index f0769c1..92bb99e 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
@@ -82,6 +82,7 @@ import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.fs.ParentNotDirectoryException;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
import org.apache.hadoop.fs.QuotaUsage;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.StorageType;
@@ -1620,7 +1621,8 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
FileNotFoundException.class,
SafeModeException.class,
UnresolvedPathException.class,
- SnapshotAccessControlException.class);
+ SnapshotAccessControlException.class,
+ PathIsNotEmptyDirectoryException.class);
}
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e990904d/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
index fbef037..0d77037 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/ClientProtocol.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries;
+import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
import org.apache.hadoop.hdfs.AddBlockFlag;
import org.apache.hadoop.fs.CacheFlag;
import org.apache.hadoop.fs.ContentSummary;
@@ -625,6 +626,8 @@ public interface ClientProtocol {
* @throws org.apache.hadoop.fs.UnresolvedLinkException If <code>src</code>
* contains a symlink
* @throws SnapshotAccessControlException if path is in RO snapshot
+ * @throws PathIsNotEmptyDirectoryException if path is a non-empty directory
+ * and <code>recursive</code> is set to false
* @throws IOException If an I/O error occurred
*/
@AtMostOnce
http://git-wip-us.apache.org/repos/asf/hadoop/blob/e990904d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
index 823c747..072ee9f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
@@ -67,6 +67,7 @@ import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.StorageStatistics.LongStatistic;
import org.apache.hadoop.fs.StorageType;
@@ -571,6 +572,22 @@ public class TestDistributedFileSystem {
in.close();
fs.close();
}
+
+ {
+ // Test PathIsNotEmptyDirectoryException while deleting non-empty dir
+ FileSystem fs = cluster.getFileSystem();
+ fs.mkdirs(new Path("/test/nonEmptyDir"));
+ fs.create(new Path("/tmp/nonEmptyDir/emptyFile")).close();
+ try {
+ fs.delete(new Path("/tmp/nonEmptyDir"), false);
+ Assert.fail("Expecting PathIsNotEmptyDirectoryException");
+ } catch (PathIsNotEmptyDirectoryException ex) {
+ // This is the proper exception to catch; move on.
+ }
+ Assert.assertTrue(fs.exists(new Path("/test/nonEmptyDir")));
+ fs.delete(new Path("/tmp/nonEmptyDir"), true);
+ }
+
}
finally {
if (cluster != null) {cluster.shutdown();}
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org