You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by wa...@apache.org on 2015/09/11 19:20:25 UTC

[04/42] hadoop git commit: HADOOP-12358. Add -safely flag to rm to prompt when deleting many files. Contributed by Xiaoyu Yao.

HADOOP-12358. Add -safely flag to rm to prompt when deleting many files. Contributed by Xiaoyu Yao.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e1feaf6d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e1feaf6d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e1feaf6d

Branch: refs/heads/YARN-1197
Commit: e1feaf6db03451068c660a863926032b35a569f8
Parents: 30db1ad
Author: Andrew Wang <wa...@apache.org>
Authored: Fri Sep 4 13:42:55 2015 -0700
Committer: Andrew Wang <wa...@apache.org>
Committed: Fri Sep 4 13:42:55 2015 -0700

----------------------------------------------------------------------
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +
 .../fs/CommonConfigurationKeysPublic.java       |  6 ++
 .../java/org/apache/hadoop/fs/shell/Delete.java | 61 ++++++++++---
 .../src/main/resources/core-default.xml         | 11 +++
 .../java/org/apache/hadoop/cli/TestCLI.java     |  2 +-
 .../org/apache/hadoop/cli/util/CLICommand.java  |  5 +-
 .../org/apache/hadoop/cli/util/CLITestCmd.java  |  6 +-
 .../src/test/resources/testConf.xml             |  6 +-
 .../org/apache/hadoop/cli/CLITestCmdDFS.java    |  8 +-
 .../java/org/apache/hadoop/cli/TestAclCLI.java  |  2 +-
 .../apache/hadoop/cli/TestCacheAdminCLI.java    |  7 +-
 .../apache/hadoop/cli/TestCryptoAdminCLI.java   |  6 +-
 .../org/apache/hadoop/cli/TestDeleteCLI.java    | 92 ++++++++++++++++++++
 .../java/org/apache/hadoop/cli/TestHDFSCLI.java |  4 +-
 .../org/apache/hadoop/cli/TestXAttrCLI.java     |  2 +-
 .../server/namenode/TestStorageRestore.java     |  3 +-
 .../src/test/resources/testDeleteConf.xml       | 83 ++++++++++++++++++
 .../org/apache/hadoop/cli/CLITestCmdMR.java     |  3 +-
 18 files changed, 275 insertions(+), 35 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 512ca1b..5118747 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -765,6 +765,9 @@ Release 2.8.0 - UNRELEASED
     HADOOP-12369. Point hadoop-project/pom.xml java.security.krb5.conf
     within target folder. (wang)
 
+    HADOOP-12358. Add -safely flag to rm to prompt when deleting many files.
+    (xyao via wang)
+
   OPTIMIZATIONS
 
     HADOOP-11785. Reduce the number of listStatus operation in distcp

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
index f3bc2e1..9f053b8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
@@ -381,5 +381,11 @@ public class CommonConfigurationKeysPublic {
       "hadoop.shell.missing.defaultFs.warning";
   public static final boolean HADOOP_SHELL_MISSING_DEFAULT_FS_WARNING_DEFAULT =
       false;
+
+  /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
+  public static final String HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES =
+      "hadoop.shell.safely.delete.limit.num.files";
+  public static final long HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES_DEFAULT =
+      100;
 }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
index 40d9478..ec45d17 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
@@ -25,6 +25,7 @@ import java.util.List;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.PathIOException;
 import org.apache.hadoop.fs.PathIsDirectoryException;
@@ -32,9 +33,13 @@ import org.apache.hadoop.fs.PathIsNotDirectoryException;
 import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;
 import org.apache.hadoop.fs.PathNotFoundException;
 import org.apache.hadoop.fs.Trash;
+import org.apache.hadoop.util.ToolRunner;
+
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES_DEFAULT;
 
 /**
- * Classes that delete paths
+ * Classes that delete paths.
  */
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
@@ -50,28 +55,36 @@ class Delete {
   /** remove non-directory paths */
   public static class Rm extends FsCommand {
     public static final String NAME = "rm";
-    public static final String USAGE = "[-f] [-r|-R] [-skipTrash] <src> ...";
+    public static final String USAGE = "[-f] [-r|-R] [-skipTrash] " +
+        "[-safely] <src> ...";
     public static final String DESCRIPTION =
-      "Delete all files that match the specified file pattern. " +
-      "Equivalent to the Unix command \"rm <src>\"\n" +
-      "-skipTrash: option bypasses trash, if enabled, and immediately " +
-      "deletes <src>\n" +
-      "-f: If the file does not exist, do not display a diagnostic " +
-      "message or modify the exit status to reflect an error.\n" +
-      "-[rR]:  Recursively deletes directories";
+        "Delete all files that match the specified file pattern. " +
+            "Equivalent to the Unix command \"rm <src>\"\n" +
+            "-f: If the file does not exist, do not display a diagnostic " +
+            "message or modify the exit status to reflect an error.\n" +
+            "-[rR]:  Recursively deletes directories.\n" +
+            "-skipTrash: option bypasses trash, if enabled, and immediately " +
+            "deletes <src>.\n" +
+            "-safely: option requires safety confirmation,if enabled, " +
+            "requires confirmation before deleting large directory with more " +
+            "than <hadoop.shell.delete.limit.num.files> files. Delay is " +
+            "expected when walking over large directory recursively to count " +
+            "the number of files to be deleted before the confirmation.\n";
 
     private boolean skipTrash = false;
     private boolean deleteDirs = false;
     private boolean ignoreFNF = false;
-    
+    private boolean safeDelete = false;
+
     @Override
     protected void processOptions(LinkedList<String> args) throws IOException {
       CommandFormat cf = new CommandFormat(
-          1, Integer.MAX_VALUE, "f", "r", "R", "skipTrash");
+          1, Integer.MAX_VALUE, "f", "r", "R", "skipTrash", "safely");
       cf.parse(args);
       ignoreFNF = cf.getOpt("f");
       deleteDirs = cf.getOpt("r") || cf.getOpt("R");
       skipTrash = cf.getOpt("skipTrash");
+      safeDelete = cf.getOpt("safely");
     }
 
     @Override
@@ -102,7 +115,7 @@ class Delete {
       // problem (ie. creating the trash dir, moving the item to be deleted,
       // etc), then the path will just be deleted because moveToTrash returns
       // false and it falls thru to fs.delete.  this doesn't seem right
-      if (moveToTrash(item)) {
+      if (moveToTrash(item) || !canBeSafelyDeleted(item)) {
         return;
       }
       if (!item.fs.delete(item.path, deleteDirs)) {
@@ -111,6 +124,28 @@ class Delete {
       out.println("Deleted " + item);
     }
 
+    private boolean canBeSafelyDeleted(PathData item)
+        throws IOException {
+      boolean shouldDelete = true;
+      if (safeDelete) {
+        final long deleteLimit = getConf().getLong(
+            HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES,
+            HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES_DEFAULT);
+        if (deleteLimit > 0) {
+          ContentSummary cs = item.fs.getContentSummary(item.path);
+          final long numFiles = cs.getFileCount();
+          if (numFiles > deleteLimit) {
+            if (!ToolRunner.confirmPrompt("Proceed deleting " + numFiles +
+                " files?")) {
+              System.err.println("Delete aborted at user request.\n");
+              shouldDelete = false;
+            }
+          }
+        }
+      }
+      return shouldDelete;
+    }
+
     private boolean moveToTrash(PathData item) throws IOException {
       boolean success = false;
       if (!skipTrash) {
@@ -122,7 +157,7 @@ class Delete {
           String msg = ioe.getMessage();
           if (ioe.getCause() != null) {
             msg += ": " + ioe.getCause().getMessage();
-	  }
+          }
           throw new IOException(msg + ". Consider using -skipTrash option", ioe);
         }
       }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
index b813aa9..410d966 100644
--- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
+++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
@@ -1962,4 +1962,15 @@ for ldap providers in the same way as above does.
     <name>hadoop.shell.missing.defaultFs.warning</name>
     <value>false</value>
   </property>
+
+  <property>
+    <name>hadoop.shell.safely.delete.limit.num.files</name>
+    <value>100</value>
+    <description>Used by -safely option of hadoop fs shell -rm command to avoid
+      accidental deletion of large directories. When enabled, the -rm command
+      requires confirmation if the number of files to be deleted is greater than
+      this limit.  The default limit is 100 files. The warning is disabled if
+      the limit is 0 or the -safely is not specified in -rm command.
+    </description>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/TestCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/TestCLI.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/TestCLI.java
index c350388..e1514ff 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/TestCLI.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/TestCLI.java
@@ -42,7 +42,7 @@ public class TestCLI extends CLITestHelper {
 
   @Override
   protected CommandExecutor.Result execute(CLICommand cmd) throws Exception {
-    return cmd.getExecutor("").executeCommand(cmd.getCmd());
+    return cmd.getExecutor("", conf).executeCommand(cmd.getCmd());
 
   }
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java
index 50cb3a5..8823f5c 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLICommand.java
@@ -17,11 +17,14 @@
  */
 package org.apache.hadoop.cli.util;
 
+import org.apache.hadoop.conf.Configuration;
+
 /**
  * This interface is to generalize types of test command for upstream projects
  */
 public interface CLICommand {
-  public CommandExecutor getExecutor(String tag) throws IllegalArgumentException;
+  public CommandExecutor getExecutor(String tag, Configuration conf)
+      throws IllegalArgumentException;
   public CLICommandTypes getType();
   public String getCmd();
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java
index 602a07f..d912fad 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CLITestCmd.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.cli.util;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FsShell;
 
 /**
@@ -32,9 +33,10 @@ public class CLITestCmd implements CLICommand {
   }
 
   @Override
-  public CommandExecutor getExecutor(String tag) throws IllegalArgumentException {
+  public CommandExecutor getExecutor(String tag, Configuration conf)
+      throws IllegalArgumentException {
     if (getType() instanceof CLICommandFS)
-      return new FSCmdExecutor(tag, new FsShell());
+      return new FSCmdExecutor(tag, new FsShell(conf));
     throw new
         IllegalArgumentException("Unknown type of test command: " + getType());
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml b/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
index 62afff2..aa9ee76 100644
--- a/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
+++ b/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
@@ -391,7 +391,7 @@
       <comparators>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^-rm \[-f\] \[-r\|-R\] \[-skipTrash\] &lt;src&gt; \.\.\. :\s*</expected-output>
+          <expected-output>^-rm \[-f\] \[-r\|-R\] \[-skipTrash\] \[-safely\] &lt;src&gt; \.\.\. :\s*</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
@@ -403,7 +403,7 @@
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^\s*-skipTrash\s+option bypasses trash, if enabled, and immediately deletes &lt;src&gt;( )*</expected-output>
+          <expected-output>^\s*-skipTrash\s+option bypasses trash, if enabled, and immediately deletes &lt;src&gt;\.( )*</expected-output>
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
@@ -415,7 +415,7 @@
         </comparator>
         <comparator>
           <type>RegexpComparator</type>
-          <expected-output>^\s+-\[rR\]\s+Recursively deletes directories\s*</expected-output>
+          <expected-output>^\s+-\[rR\]\s+Recursively deletes directories\.\s*</expected-output>
         </comparator>
       </comparators>
     </test>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestCmdDFS.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestCmdDFS.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestCmdDFS.java
index 89932cc..992e8fe 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestCmdDFS.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/CLITestCmdDFS.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.cli.util.CLICommandTypes;
 import org.apache.hadoop.cli.util.CLITestCmd;
 import org.apache.hadoop.cli.util.CommandExecutor;
 import org.apache.hadoop.cli.util.FSCmdExecutor;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.tools.DFSAdmin;
 
 public class CLITestCmdDFS extends CLITestCmd {
@@ -30,9 +31,10 @@ public class CLITestCmdDFS extends CLITestCmd {
   }
 
   @Override
-  public CommandExecutor getExecutor(String tag) throws IllegalArgumentException {
+  public CommandExecutor getExecutor(String tag, Configuration conf)
+      throws IllegalArgumentException {
     if (getType() instanceof CLICommandDFSAdmin)
-      return new FSCmdExecutor(tag, new DFSAdmin());
-    return super.getExecutor(tag);
+      return new FSCmdExecutor(tag, new DFSAdmin(conf));
+    return super.getExecutor(tag, conf);
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestAclCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestAclCLI.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestAclCLI.java
index 02207e6..a6f8651 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestAclCLI.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestAclCLI.java
@@ -73,7 +73,7 @@ public class TestAclCLI extends CLITestHelperDFS {
 
   @Override
   protected Result execute(CLICommand cmd) throws Exception {
-    return cmd.getExecutor(namenode).executeCommand(cmd.getCmd());
+    return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java
index f25c4fe..ddb11b6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCacheAdminCLI.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.cli.util.CLITestCmd;
 import org.apache.hadoop.cli.util.CacheAdminCmdExecutor;
 import org.apache.hadoop.cli.util.CommandExecutor;
 import org.apache.hadoop.cli.util.CommandExecutor.Result;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
@@ -119,18 +120,18 @@ public class TestCacheAdminCLI extends CLITestHelper {
     }
 
     @Override
-    public CommandExecutor getExecutor(String tag)
+    public CommandExecutor getExecutor(String tag, Configuration conf)
         throws IllegalArgumentException {
       if (getType() instanceof CLICommandCacheAdmin) {
         return new CacheAdminCmdExecutor(tag, new CacheAdmin(conf));
       }
-      return super.getExecutor(tag);
+      return super.getExecutor(tag, conf);
     }
   }
 
   @Override
   protected Result execute(CLICommand cmd) throws Exception {
-    return cmd.getExecutor("").executeCommand(cmd.getCmd());
+    return cmd.getExecutor("", conf).executeCommand(cmd.getCmd());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
index 1c870a2..44e662f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
@@ -149,18 +149,18 @@ public class TestCryptoAdminCLI extends CLITestHelperDFS {
     }
 
     @Override
-    public CommandExecutor getExecutor(String tag)
+    public CommandExecutor getExecutor(String tag, Configuration conf)
         throws IllegalArgumentException {
       if (getType() instanceof CLICommandCryptoAdmin) {
         return new CryptoAdminCmdExecutor(tag, new CryptoAdmin(conf));
       }
-      return super.getExecutor(tag);
+      return super.getExecutor(tag, conf);
     }
   }
 
   @Override
   protected Result execute(CLICommand cmd) throws Exception {
-    return cmd.getExecutor(namenode).executeCommand(cmd.getCmd());
+    return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestDeleteCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestDeleteCLI.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestDeleteCLI.java
new file mode 100644
index 0000000..04328f5
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestDeleteCLI.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.cli;
+
+import static org.junit.Assert.assertTrue;
+
+import org.apache.hadoop.cli.util.CLICommand;
+import org.apache.hadoop.cli.util.CommandExecutor.Result;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestDeleteCLI extends CLITestHelperDFS {
+  protected MiniDFSCluster dfsCluster = null;
+  protected FileSystem fs = null;
+  protected String namenode = null;
+
+  @Before
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+    conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1);
+    conf.setLong(CommonConfigurationKeysPublic.
+        HADOOP_SHELL_SAFELY_DELETE_LIMIT_NUM_FILES, 5);
+
+    dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
+    dfsCluster.waitClusterUp();
+    namenode = conf.get(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "file:///");
+
+    fs = dfsCluster.getFileSystem();
+    assertTrue("Not an HDFS: " + fs.getUri(),
+        fs instanceof DistributedFileSystem);
+  }
+
+  @After
+  @Override
+  public void tearDown() throws Exception {
+    if (fs != null) {
+      fs.close();
+    }
+    if (dfsCluster != null) {
+      dfsCluster.shutdown();
+    }
+    Thread.sleep(2000);
+    super.tearDown();
+  }
+
+  @Override
+  protected String getTestFile() {
+    return "testDeleteConf.xml";
+  }
+
+  @Override
+  protected String expandCommand(final String cmd) {
+    String expCmd = cmd;
+    expCmd = expCmd.replaceAll("NAMENODE", namenode);
+    expCmd = super.expandCommand(expCmd);
+    return expCmd;
+  }
+
+  @Override
+  protected Result execute(CLICommand cmd) throws Exception {
+    return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd());
+  }
+
+  @Test
+  @Override
+  public void testAll () {
+    super.testAll();
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestHDFSCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestHDFSCLI.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestHDFSCLI.java
index c348348..3630726 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestHDFSCLI.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestHDFSCLI.java
@@ -47,7 +47,7 @@ public class TestHDFSCLI extends CLITestHelperDFS {
     
     // Many of the tests expect a replication value of 1 in the output
     conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1);
-    
+
     // Build racks and hosts configuration to test dfsAdmin -printTopology
     String [] racks =  {"/rack1", "/rack1", "/rack2", "/rack2",
                         "/rack2", "/rack3", "/rack4", "/rack4" };
@@ -95,7 +95,7 @@ public class TestHDFSCLI extends CLITestHelperDFS {
   
   @Override
   protected Result execute(CLICommand cmd) throws Exception {
-    return cmd.getExecutor(namenode).executeCommand(cmd.getCmd());
+    return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd());
   }
   
   @Test

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestXAttrCLI.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestXAttrCLI.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestXAttrCLI.java
index ce107ef..45c7909 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestXAttrCLI.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestXAttrCLI.java
@@ -87,7 +87,7 @@ public class TestXAttrCLI  extends CLITestHelperDFS {
   
   @Override
   protected Result execute(CLICommand cmd) throws Exception {
-    return cmd.getExecutor(namenode).executeCommand(cmd.getCmd());
+    return cmd.getExecutor(namenode, conf).executeCommand(cmd.getCmd());
   }
   
   @Test

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java
index 1a612e8..6f4546d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java
@@ -275,7 +275,8 @@ public class TestStorageRestore {
       String cmd = "-fs NAMENODE -restoreFailedStorage false";
       String namenode = config.get(DFSConfigKeys.FS_DEFAULT_NAME_KEY, "file:///");
       CommandExecutor executor =
-          new CLITestCmdDFS(cmd, new CLICommandDFSAdmin()).getExecutor(namenode);
+          new CLITestCmdDFS(cmd,
+              new CLICommandDFSAdmin()).getExecutor(namenode, config);
 
       executor.executeCommand(cmd);
       restore = fsi.getStorage().getRestoreFailedStorage();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testDeleteConf.xml
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testDeleteConf.xml b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testDeleteConf.xml
new file mode 100644
index 0000000..8701983
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testDeleteConf.xml
@@ -0,0 +1,83 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?xml-stylesheet type="text/xsl" href="testConf.xsl"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<configuration>
+    <!-- Normal mode is test. To run just the commands and dump the output
+         to the log, set it to nocompare -->
+    <mode>test</mode>
+
+    <!--  Comparator types:
+             ExactComparator
+             SubstringComparator
+             RegexpComparator
+             TokenComparator
+             -->
+    <tests>
+        <test>  <!-- TESTED -->
+            <description>rm -r directory that meet warning criteria when -safely is not used</description>
+            <test-commands>
+                <command>-fs NAMENODE -mkdir /dir0</command>
+                <command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data15bytes /dir0/data15bytes</command>
+                <command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data30bytes /dir0/data30bytes</command>
+                <command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data60bytes /dir0/data60bytes</command>
+                <command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data120bytes /dir0/data120bytes</command>
+                <command>-fs NAMENODE -mkdir /dir0/dir00</command>
+                <command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data15bytes /dir0/dir00/data15bytes</command>
+                <command>-fs NAMENODE -mkdir /dir0/dir01</command>
+                <command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data30bytes /dir0/dir01/data30bytes</command>
+                <command>-fs NAMENODE -ls /dir0</command>
+                <command>-fs NAMENODE -rm -r /dir0</command>
+            </test-commands>
+            <cleanup-commands>
+                <command>-fs NAMENODE -rm -r /dir0</command>
+            </cleanup-commands>
+            <comparators>
+                <comparator>
+                    <type>RegexpComparator</type>
+                    <expected-output>Deleted /dir0</expected-output>
+                </comparator>
+            </comparators>
+        </test>
+        <test>  <!-- TESTED -->
+            <description>rm -r directory that does not meet warning criteria when -safely is used</description>
+            <test-commands>
+                <command>-fs NAMENODE -mkdir /dir0</command>
+                <command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data15bytes /dir0/data15bytes</command>
+                <command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data30bytes /dir0/data30bytes</command>
+                <command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data60bytes /dir0/data60bytes</command>
+                <command>-fs NAMENODE -mkdir /dir0/dir00</command>
+                <command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data15bytes /dir0/dir00/data15bytes</command>
+                <command>-fs NAMENODE -mkdir /dir0/dir01</command>
+                <command>-fs NAMENODE -copyFromLocal CLITEST_DATA/data30bytes /dir0/dir01/data30bytes</command>
+                <command>-fs NAMENODE -ls /dir0</command>
+                <command>-fs NAMENODE -rm -r -safely /dir0</command>
+            </test-commands>
+            <cleanup-commands>
+                <command>-fs NAMENODE -rm -r /dir0</command>
+            </cleanup-commands>
+            <comparators>
+                <comparator>
+                    <type>RegexpComparator</type>
+                    <expected-output>Deleted /dir0</expected-output>
+                </comparator>
+            </comparators>
+        </test>
+    </tests>
+</configuration>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e1feaf6d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/cli/CLITestCmdMR.java
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/cli/CLITestCmdMR.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/cli/CLITestCmdMR.java
index f9bc943..f4ab3dc 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/cli/CLITestCmdMR.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/cli/CLITestCmdMR.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.cli;
 import org.apache.hadoop.cli.util.CLICommandTypes;
 import org.apache.hadoop.cli.util.CLITestCmd;
 import org.apache.hadoop.cli.util.CommandExecutor;
+import org.apache.hadoop.conf.Configuration;
 
 public class CLITestCmdMR extends CLITestCmd {
   public CLITestCmdMR(String str, CLICommandTypes type) {
@@ -34,7 +35,7 @@ public class CLITestCmdMR extends CLITestCmd {
    * of the test method.
    */
   @Override
-  public CommandExecutor getExecutor(String tag)
+  public CommandExecutor getExecutor(String tag, Configuration conf)
       throws IllegalArgumentException {
     throw new IllegalArgumentException("Method isn't supported");
   }