You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ji...@apache.org on 2015/04/17 07:14:01 UTC

hadoop git commit: HDFS-8153. Error Message points to wrong parent directory in case of path component name length error. Contributed by Anu Engineer.

Repository: hadoop
Updated Branches:
  refs/heads/trunk bb6dde68f -> 369ddc67b


HDFS-8153. Error Message points to wrong parent directory in case of path component name length error. Contributed by Anu Engineer.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/369ddc67
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/369ddc67
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/369ddc67

Branch: refs/heads/trunk
Commit: 369ddc67bdaf61cca3f2f766ab504e2932f6fb72
Parents: bb6dde6
Author: Jitendra Pandey <ji...@apache.org>
Authored: Thu Apr 16 22:13:09 2015 -0700
Committer: Jitendra Pandey <ji...@apache.org>
Committed: Thu Apr 16 22:13:09 2015 -0700

----------------------------------------------------------------------
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt     |  3 +
 .../hdfs/server/namenode/FSDirectory.java       |  2 +-
 .../hdfs/server/namenode/TestFsLimits.java      | 83 +++++++++++++++++++-
 3 files changed, 85 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/369ddc67/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index e977e6a..b980f93 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -543,6 +543,9 @@ Release 2.7.1 - UNRELEASED
     HDFS-8149. The footer of the Web UI "Hadoop, 2014" is old.
     (Brahma Reddy Battula via aajisaka)
 
+    HDFS-8153. Error Message points to wrong parent directory in case of
+    path component name length error (Anu Engineer via jitendra)
+
 Release 2.7.0 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/369ddc67/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
index 966cf3a..f74c42a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
@@ -972,7 +972,7 @@ public class FSDirectory implements Closeable {
     // original location because a quota violation would cause the the item
     // to go "poof".  The fs limits must be bypassed for the same reason.
     if (checkQuota) {
-      final String parentPath = existing.getPath(pos - 1);
+      final String parentPath = existing.getPath();
       verifyMaxComponentLength(inode.getLocalNameBytes(), parentPath);
       verifyMaxDirItems(parent, parentPath);
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/369ddc67/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsLimits.java
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsLimits.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsLimits.java
index 945972d..d6c5183 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsLimits.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsLimits.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.server.namenode;
 
 import static org.apache.hadoop.hdfs.server.common.Util.fileAsURI;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.mock;
 
@@ -173,29 +174,107 @@ public class TestFsLimits {
         HadoopIllegalArgumentException.class);
   }
 
-  private void mkdirs(String name, Class<?> expected)
+  @Test
+  /**
+   * This test verifies that error string contains the
+   * right parent directory name if the operation fails with
+   * PathComponentTooLongException
+   */
+  public void testParentDirectoryNameIsCorrect() throws Exception {
+    conf.setInt(DFSConfigKeys.DFS_NAMENODE_MAX_COMPONENT_LENGTH_KEY, 20);
+    mkdirs("/user", null);
+    mkdirs("/user/testHome", null);
+    mkdirs("/user/testHome/FileNameLength", null);
+
+    mkdirCheckParentDirectory(
+      "/user/testHome/FileNameLength/really_big_name_0003_fail",
+      "/user/testHome/FileNameLength", PathComponentTooLongException.class);
+
+    renameCheckParentDirectory("/user/testHome/FileNameLength",
+      "/user/testHome/really_big_name_0003_fail", "/user/testHome/",
+      PathComponentTooLongException.class);
+
+  }
+
+
+  /**
+   * Verifies that Parent Directory is correct after a failed call to mkdir
+   * @param name Directory Name
+   * @param ParentDirName Expected Parent Directory
+   * @param expected Exception that is expected
+   * @throws Exception
+   */
+  private void mkdirCheckParentDirectory(String name, String ParentDirName,
+                                         Class<?> expected)
+    throws Exception {
+    verify(mkdirs(name, expected), ParentDirName);
+  }
+
+  /**
+   *
+   /**
+   * Verifies that Parent Directory is correct after a failed call to mkdir
+   * @param name Directory Name
+   * @param dst Destination Name
+   * @param ParentDirName Expected Parent Directory
+   * @param expected Exception that is expected
+   * @throws Exception
+   */
+  private void renameCheckParentDirectory(String name, String dst,
+                                          String ParentDirName,
+                                          Class<?> expected)
+    throws Exception {
+    verify(rename(name, dst, expected), ParentDirName);
+  }
+
+  /**
+   * verifies the ParentDirectory Name is present in the message given.
+   * @param message - Expection Message
+   * @param ParentDirName - Parent Directory Name to look for.
+   */
+  private void verify(String message, String ParentDirName) {
+    boolean found = false;
+    if (message != null) {
+      String[] tokens = message.split("\\s+");
+      for (String token : tokens) {
+        if (token != null && token.equals(ParentDirName)) {
+          found = true;
+          break;
+        }
+      }
+    }
+    assertTrue(found);
+  }
+
+  private String mkdirs(String name, Class<?> expected)
   throws Exception {
     lazyInitFSDirectory();
     Class<?> generated = null;
+    String errorString = null;
     try {
       fs.mkdirs(name, perms, false);
     } catch (Throwable e) {
       generated = e.getClass();
       e.printStackTrace();
+      errorString = e.getMessage();
     }
     assertEquals(expected, generated);
+    return errorString;
   }
 
-  private void rename(String src, String dst, Class<?> expected)
+  private String rename(String src, String dst, Class<?> expected)
       throws Exception {
     lazyInitFSDirectory();
     Class<?> generated = null;
+    String errorString = null;
     try {
       fs.renameTo(src, dst, false, new Rename[] { });
     } catch (Throwable e) {
       generated = e.getClass();
+      errorString = e.getMessage();
     }
     assertEquals(expected, generated);
+    return errorString;
   }
 
   @SuppressWarnings("deprecation")