You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by dh...@apache.org on 2007/09/19 19:23:19 UTC

svn commit: r577390 - in /lucene/hadoop/trunk: CHANGES.txt src/java/org/apache/hadoop/dfs/FSConstants.java src/java/org/apache/hadoop/dfs/FSImage.java src/java/org/apache/hadoop/dfs/FSNamesystem.java src/webapps/datanode/browseDirectory.jsp

Author: dhruba
Date: Wed Sep 19 10:23:13 2007
New Revision: 577390

URL: http://svn.apache.org/viewvc?rev=577390&view=rev
Log:
HADOOP-1887. The Namenode encounters an ArrayIndexOutOfBoundsException
while listing a directory that had a file that was
being actively written to.  (Dhruba Borthakur via dhruba)


Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSConstants.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSImage.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java
    lucene/hadoop/trunk/src/webapps/datanode/browseDirectory.jsp

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?rev=577390&r1=577389&r2=577390&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Wed Sep 19 10:23:13 2007
@@ -83,6 +83,10 @@
 
   BUG FIXES
 
+    HADOOP-1887. The Namenode encounters an ArrayIndexOutOfBoundsException
+    while listing a directory that had a file that was
+    being actively written to.  (Dhruba Borthakur via dhruba)
+
     HADOOP-1904. The Namenode encounters an exception because the
     list of blocks per datanode-descriptor was corrupted.
     (Konstantin Shvachko via dhruba)

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSConstants.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSConstants.java?rev=577390&r1=577389&r2=577390&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSConstants.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSConstants.java Wed Sep 19 10:23:13 2007
@@ -159,7 +159,7 @@
   // Version is reflected in the data storage file.
   // Versions are negative.
   // Decrement LAYOUT_VERSION to define a new version.
-  public static final int LAYOUT_VERSION = -9;
+  public static final int LAYOUT_VERSION = -10;
   // Current version: 
-  // files with one block uses the default blocksize
+  // a directory has a block list length of -1
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSImage.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSImage.java?rev=577390&r1=577389&r2=577390&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSImage.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSImage.java Wed Sep 19 10:23:13 2007
@@ -685,7 +685,11 @@
         }
         int numBlocks = in.readInt();
         Block blocks[] = null;
-        if (numBlocks > 0) {
+
+        // for older versions, a blocklist of size 0
+        // indicates a directory.
+        if ((-9 <= imgVersion && numBlocks > 0) ||
+            (imgVersion < -9 && numBlocks >= 0)) {
           blocks = new Block[numBlocks];
           for (int j = 0; j < numBlocks; j++) {
             blocks[j] = new Block();
@@ -842,7 +846,7 @@
       out.writeShort(0);  // replication
       out.writeLong(inode.getModificationTime());
       out.writeLong(0);   // preferred block size
-      out.writeInt(0);    // # of blocks
+      out.writeInt(-1);    // # of blocks
     }
     for(INode child : ((INodeDirectory)inode).getChildren()) {
       saveImage(fullName, child, out);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java?rev=577390&r1=577389&r2=577390&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java Wed Sep 19 10:23:13 2007
@@ -599,7 +599,9 @@
     if (blocks == null) {
       return null;
     }
-    assert blocks.length > 0 : "Array of blocks is empty.";
+    if (blocks.length == 0) {
+      return new LocatedBlocks(inode, new ArrayList<LocatedBlock>(blocks.length));
+    }
     List<LocatedBlock> results;
     results = new ArrayList<LocatedBlock>(blocks.length);
 

Modified: lucene/hadoop/trunk/src/webapps/datanode/browseDirectory.jsp
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/webapps/datanode/browseDirectory.jsp?rev=577390&r1=577389&r2=577390&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/webapps/datanode/browseDirectory.jsp (original)
+++ lucene/hadoop/trunk/src/webapps/datanode/browseDirectory.jsp Wed Sep 19 10:23:13 2007
@@ -41,9 +41,13 @@
         List<LocatedBlock> blocks = 
           dfs.namenode.getBlockLocations(dir, 0, 1).getLocatedBlocks();
 	      
-        LocatedBlock firstBlock = blocks.get(0);
-        DatanodeInfo [] locations = firstBlock.getLocations();
-        if (locations.length == 0) {
+        LocatedBlock firstBlock = null;
+        DatanodeInfo [] locations = null;
+        if (blocks.size() > 0) {
+          firstBlock = blocks.get(0);
+          locations = firstBlock.getLocations();
+        }
+        if (locations == null || locations.length == 0) {
           out.print("Empty file");
         } else {
           DatanodeInfo chosenNode = jspHelper.bestNode(firstBlock);
@@ -100,8 +104,11 @@
           if (!files[i].isDir()) {
             List<LocatedBlock> blocks = 
               dfs.namenode.getBlockLocations(files[i].getPath().toString(), 0, 1).getLocatedBlocks();
-            DatanodeInfo [] locations = blocks.get(0).getLocations();
-            if (locations.length == 0) {
+            DatanodeInfo [] locations = null;
+            if (blocks.size() != 0) {
+              locations = blocks.get(0).getLocations();
+            }
+            if (locations == null || locations.length == 0) {
               cols[0] = files[i].getName();
             } else {
               String datanodeUrl = req.getRequestURL()+"?dir="+