You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2013/03/12 02:58:44 UTC

svn commit: r1455390 - in /hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common: ./ dev-support/ src/main/bin/ src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/ipc/ src/main/java/org/apache/hadoop/util/ src/main/proto/ src/...

Author: szetszwo
Date: Tue Mar 12 01:58:43 2013
New Revision: 1455390

URL: http://svn.apache.org/r1455390
Log:
Merge r1454237 through r1455388 from trunk.

Modified:
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1455390&r1=1455389&r2=1455390&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt Tue Mar 12 01:58:43 2013
@@ -8,6 +8,12 @@ Trunk (Unreleased)
     FSDataOutputStream.sync() and Syncable.sync().  (szetszwo)
 
     HADOOP-8886. Remove KFS support. (eli)
+    
+    HADOOP-9163 The rpc msg in ProtobufRpcEngine.proto should be moved out to
+    avoid an extra copy (Sanjay Radia)
+
+    HADOOP-9151 Include RPC error info in RpcResponseHeader instead of sending
+    it separately (sanjay Radia)
 
   NEW FEATURES
     
@@ -157,8 +163,6 @@ Trunk (Unreleased)
     HADOOP-9112. test-patch should -1 for @Tests without a timeout 
     (Surenkumar Nihalani via bobby)
 
-    HADOOP-9163 The rpc msg in ProtobufRpcEngine.proto should be moved out to
-    avoid an extra copy (Sanjay Radia)
 
   BUG FIXES
 
@@ -538,6 +542,9 @@ Release 2.0.5-beta - UNRELEASED
     HADOOP-9369. DNS#reverseDns() can return hostname with . appended at the
     end. (Karthik Kambatla via atm)
 
+    HADOOP-9379. capture the ulimit info after printing the log to the 
+    console. (Arpit Gupta via suresh)
+
 Release 2.0.4-alpha - UNRELEASED
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1454237-1455388

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml?rev=1455390&r1=1455389&r2=1455390&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml Tue Mar 12 01:58:43 2013
@@ -308,4 +308,11 @@
        <Method name="removeRenewAction" />
        <Bug pattern="BC_UNCONFIRMED_CAST" />
      </Match>
+     
+     <!-- Inconsistent synchronization flagged by findbugs is not valid. -->
+     <Match>
+       <Class name="org.apache.hadoop.ipc.Client$Connection" />
+       <Field name="in" />
+       <Bug pattern="IS2_INCONSISTENT_SYNC" />
+     </Match>
  </FindBugsFilter>

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh?rev=1455390&r1=1455389&r2=1455390&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh Tue Mar 12 01:58:43 2013
@@ -156,6 +156,7 @@ case $startStop in
     esac
     echo $! > $pid
     sleep 1
+    head "$log"
     # capture the ulimit output
     if [ "true" = "$starting_secure_dn" ]; then
       echo "ulimit -a for secure datanode user $HADOOP_SECURE_DN_USER" >> $log
@@ -165,7 +166,6 @@ case $startStop in
       echo "ulimit -a for user $USER" >> $log
       ulimit -a >> $log 2>&1
     fi
-    head -30 "$log"
     sleep 3;
     if ! ps -p $! > /dev/null ; then
       exit 1

Propchange: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1454237-1455388

Propchange: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1454237-1455388

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1455390&r1=1455389&r2=1455390&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Tue Mar 12 01:58:43 2013
@@ -59,7 +59,6 @@ import org.apache.hadoop.fs.CommonConfig
 import org.apache.hadoop.io.DataOutputBuffer;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryPolicy;
 import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
@@ -945,31 +944,38 @@ public class Client {
       touch();
       
       try {
-        RpcResponseHeaderProto response = 
+        RpcResponseHeaderProto header = 
             RpcResponseHeaderProto.parseDelimitedFrom(in);
-        if (response == null) {
+        if (header == null) {
           throw new IOException("Response is null.");
         }
 
-        int callId = response.getCallId();
+        int callId = header.getCallId();
         if (LOG.isDebugEnabled())
           LOG.debug(getName() + " got value #" + callId);
 
         Call call = calls.get(callId);
-        RpcStatusProto status = response.getStatus();
+        RpcStatusProto status = header.getStatus();
         if (status == RpcStatusProto.SUCCESS) {
           Writable value = ReflectionUtils.newInstance(valueClass, conf);
           value.readFields(in);                 // read value
           call.setRpcResponse(value);
           calls.remove(callId);
-        } else if (status == RpcStatusProto.ERROR) {
-          call.setException(new RemoteException(WritableUtils.readString(in),
-                                                WritableUtils.readString(in)));
-          calls.remove(callId);
-        } else if (status == RpcStatusProto.FATAL) {
-          // Close the connection
-          markClosed(new RemoteException(WritableUtils.readString(in), 
-                                         WritableUtils.readString(in)));
+        } else { // Rpc Request failed
+            final String exceptionClassName = header.hasExceptionClassName() ?
+                header.getExceptionClassName() : 
+                  "ServerDidNotSetExceptionClassName";
+            final String errorMsg = header.hasErrorMsg() ? 
+                header.getErrorMsg() : "ServerDidNotSetErrorMsg" ;
+          RemoteException re = 
+              new RemoteException(exceptionClassName, errorMsg);
+          if (status == RpcStatusProto.ERROR) {
+            call.setException(re);
+            calls.remove(callId);
+          } else if (status == RpcStatusProto.FATAL) {
+            // Close the connection
+            markClosed(re);
+          }
         }
       } catch (IOException e) {
         markClosed(e);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1455390&r1=1455389&r2=1455390&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Tue Mar 12 01:58:43 2013
@@ -2001,6 +2001,7 @@ public abstract class Server {
         RpcResponseHeaderProto.newBuilder();
     response.setCallId(call.callId);
     response.setStatus(status);
+    response.setServerIpcVersionNum(Server.CURRENT_VERSION);
 
 
     if (status == RpcStatusProto.SUCCESS) {
@@ -2017,13 +2018,10 @@ public abstract class Server {
             StringUtils.stringifyException(t));
         return;
       }
-    } else {
-      if (status == RpcStatusProto.FATAL) {
-        response.setServerIpcVersionNum(Server.CURRENT_VERSION);
-      }
+    } else { // Rpc Failure
+      response.setExceptionClassName(errorClass);
+      response.setErrorMsg(error);
       response.build().writeDelimitedTo(out);
-      WritableUtils.writeString(out, errorClass);
-      WritableUtils.writeString(out, error);
     }
     if (call.connection.useWrap) {
       wrapWithSasl(responseBuf, call);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java?rev=1455390&r1=1455389&r2=1455390&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java Tue Mar 12 01:58:43 2013
@@ -23,11 +23,10 @@ import java.io.IOException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.util.Shell;
 
 /**
  * Class that provides utility functions for checking disk problem
@@ -36,10 +35,16 @@ import org.apache.hadoop.fs.permission.F
 @InterfaceStability.Unstable
 public class DiskChecker {
 
+  private static final long SHELL_TIMEOUT = 10 * 1000;
+
   public static class DiskErrorException extends IOException {
     public DiskErrorException(String msg) {
       super(msg);
     }
+
+    public DiskErrorException(String msg, Throwable cause) {
+      super(msg, cause);
+    }
   }
     
   public static class DiskOutOfSpaceException extends IOException {
@@ -85,25 +90,11 @@ public class DiskChecker {
    * @throws DiskErrorException
    */
   public static void checkDir(File dir) throws DiskErrorException {
-    if (!mkdirsWithExistsCheck(dir))
+    if (!mkdirsWithExistsCheck(dir)) {
       throw new DiskErrorException("Can not create directory: "
                                    + dir.toString());
-
-    if (!dir.isDirectory())
-      throw new DiskErrorException("Not a directory: "
-                                   + dir.toString());
-
-    if (!dir.canRead())
-      throw new DiskErrorException("Directory is not readable: "
-                                   + dir.toString());
-
-    if (!dir.canWrite())
-      throw new DiskErrorException("Directory is not writable: "
-                                   + dir.toString());
-
-    if (!dir.canExecute())
-      throw new DiskErrorException("Directory is not executable: "
-	  + dir.toString());
+    }
+    checkDirAccess(dir);
   }
 
   /**
@@ -152,24 +143,102 @@ public class DiskChecker {
                               FsPermission expected)
   throws DiskErrorException, IOException {
     mkdirsWithExistsAndPermissionCheck(localFS, dir, expected);
+    checkDirAccess(localFS.pathToFile(dir));
+  }
 
-    FileStatus stat = localFS.getFileStatus(dir);
-    FsPermission actual = stat.getPermission();
+  /**
+   * Checks that the given file is a directory and that the current running
+   * process can read, write, and execute it.
+   * 
+   * @param dir File to check
+   * @throws DiskErrorException if dir is not a directory, not readable, not
+   *   writable, or not executable
+   */
+  private static void checkDirAccess(File dir) throws DiskErrorException {
+    if (!dir.isDirectory()) {
+      throw new DiskErrorException("Not a directory: "
+                                   + dir.toString());
+    }
 
-    if (!stat.isDirectory())
-      throw new DiskErrorException("not a directory: "+ dir.toString());
+    if (Shell.WINDOWS) {
+      checkAccessByFileSystemInteraction(dir);
+    } else {
+      checkAccessByFileMethods(dir);
+    }
+  }
 
-    FsAction user = actual.getUserAction();
-    if (!user.implies(FsAction.READ))
-      throw new DiskErrorException("directory is not readable: "
+  /**
+   * Checks that the current running process can read, write, and execute the
+   * given directory by using methods of the File object.
+   * 
+   * @param dir File to check
+   * @throws DiskErrorException if dir is not readable, not writable, or not
+   *   executable
+   */
+  private static void checkAccessByFileMethods(File dir)
+      throws DiskErrorException {
+    if (!dir.canRead()) {
+      throw new DiskErrorException("Directory is not readable: "
+                                   + dir.toString());
+    }
+
+    if (!dir.canWrite()) {
+      throw new DiskErrorException("Directory is not writable: "
                                    + dir.toString());
+    }
 
-    if (!user.implies(FsAction.WRITE))
-      throw new DiskErrorException("directory is not writable: "
+    if (!dir.canExecute()) {
+      throw new DiskErrorException("Directory is not executable: "
                                    + dir.toString());
+    }
+  }
 
-    if (!user.implies(FsAction.EXECUTE))
-      throw new DiskErrorException("directory is not listable: "
+  /**
+   * Checks that the current running process can read, write, and execute the
+   * given directory by attempting each of those operations on the file system.
+   * This method contains several workarounds to known JVM bugs that cause
+   * File.canRead, File.canWrite, and File.canExecute to return incorrect results
+   * on Windows with NTFS ACLs.  See:
+   * http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6203387
+   * These bugs are supposed to be fixed in JDK7.
+   * 
+   * @param dir File to check
+   * @throws DiskErrorException if dir is not readable, not writable, or not
+   *   executable
+   */
+  private static void checkAccessByFileSystemInteraction(File dir)
+      throws DiskErrorException {
+    // Make sure we can read the directory by listing it.
+    if (dir.list() == null) {
+      throw new DiskErrorException("Directory is not readable: "
                                    + dir.toString());
+    }
+
+    // Make sure we can write to the directory by creating a temp file in it.
+    try {
+      File tempFile = File.createTempFile("checkDirAccess", null, dir);
+      if (!tempFile.delete()) {
+        throw new DiskErrorException("Directory is not writable: "
+                                     + dir.toString());
+      }
+    } catch (IOException e) {
+      throw new DiskErrorException("Directory is not writable: "
+                                   + dir.toString(), e);
+    }
+
+    // Make sure the directory is executable by trying to cd into it.  This
+    // launches a separate process.  It does not change the working directory of
+    // the current process.
+    try {
+      String[] cdCmd = new String[] { "cmd", "/C", "cd",
+          dir.getAbsolutePath() };
+      Shell.execCommand(null, cdCmd, SHELL_TIMEOUT);
+    } catch (Shell.ExitCodeException e) {
+      throw new DiskErrorException("Directory is not executable: "
+                                   + dir.toString(), e);
+    } catch (IOException e) {
+      throw new DiskErrorException("Directory is not executable: "
+                                   + dir.toString(), e);
+    }
   }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto?rev=1455390&r1=1455389&r2=1455390&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/RpcHeader.proto Tue Mar 12 01:58:43 2013
@@ -70,12 +70,11 @@ message RpcRequestHeaderProto { // the h
  * | RpcResponseHeaderProto - serialized delimited ie has len         |
  * +------------------------------------------------------------------+
  * | if request is successful:                                        |
- * |   - RpcResponse -  The actual rpc response  bytes                 |
- * |     This response is serialized based on RpcKindProto             |
+ * |   - RpcResponse -  The actual rpc response  bytes follow         |
+ *       the response header                                          |
+ * |     This response is serialized based on RpcKindProto            |
  * | if request fails :                                               |
- * |   - length (4 byte int) + Class name of exception - UTF-8 string |
- * |   - length (4 byte int) + Stacktrace - UTF-8 string              |
- * |   if the strings are null then the length is -1                  |
+ * |   The rpc response header contains the necessary info            |
  * +------------------------------------------------------------------+
  *
  */
@@ -88,5 +87,7 @@ message RpcResponseHeaderProto {
 
   required uint32 callId = 1; // callId used in Request
   required RpcStatusProto status = 2;
-  optional uint32 serverIpcVersionNum = 3; // in case of an fatal IPC error 
+  optional uint32 serverIpcVersionNum = 3; // Sent if success or fail
+  optional string exceptionClassName = 4;  // if request fails
+  optional string errorMsg = 5;  // if request fails, often contains strack trace
 }

Propchange: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1454237-1455388

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java?rev=1455390&r1=1455389&r2=1455390&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java Tue Mar 12 01:58:43 2013
@@ -25,10 +25,13 @@ import static org.junit.Assert.*;
 import static org.mockito.Mockito.*;
 
 import static org.apache.hadoop.test.MockitoMaker.*;
-import org.apache.hadoop.fs.permission.FsPermission;
+import static org.apache.hadoop.fs.permission.FsAction.*;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.DiskChecker.DiskErrorException;
 import org.apache.hadoop.util.Shell;
 
@@ -110,29 +113,21 @@ public class TestDiskChecker {
 
   private void _checkDirs(boolean isDir, FsPermission perm, boolean success)
       throws Throwable {
-    File localDir = make(stub(File.class).returning(true).from.exists());
-    when(localDir.mkdir()).thenReturn(true);
-    Path dir = mock(Path.class);
-    LocalFileSystem fs = make(stub(LocalFileSystem.class)
-        .returning(localDir).from.pathToFile(dir));
-    FileStatus stat = make(stub(FileStatus.class)
-        .returning(perm).from.getPermission());
-    when(stat.isDirectory()).thenReturn(isDir);
-    when(fs.getFileStatus(dir)).thenReturn(stat);
-
+    File localDir = File.createTempFile("test", "tmp");
+    if (isDir) {
+      localDir.delete();
+      localDir.mkdir();
+    }
+    Shell.execCommand(Shell.getSetPermissionCommand(String.format("%04o",
+      perm.toShort()), false, localDir.getAbsolutePath()));
     try {
-      DiskChecker.checkDir(fs, dir, perm);
-
-      verify(stat).isDirectory();
-      verify(fs, times(2)).getFileStatus(dir);
-      verify(stat, times(2)).getPermission();
+      DiskChecker.checkDir(FileSystem.getLocal(new Configuration()),
+        new Path(localDir.getAbsolutePath()), perm);
       assertTrue("checkDir success", success);
-    }
-    catch (DiskErrorException e) {
+    } catch (DiskErrorException e) {
       assertFalse("checkDir success", success);
-      e.printStackTrace();
     }
-    System.out.println("checkDir success: "+ success);
+    localDir.delete();
   }
 
   /**
@@ -168,8 +163,10 @@ public class TestDiskChecker {
   private void _checkDirs(boolean isDir, String perm, boolean success)
       throws Throwable {
     File localDir = File.createTempFile("test", "tmp");
-    localDir.delete();
-    localDir.mkdir();
+    if (isDir) {
+      localDir.delete();
+      localDir.mkdir();
+    }
     Shell.execCommand(Shell.getSetPermissionCommand(perm, false,
                                                     localDir.getAbsolutePath()));
     try {