You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2007/04/26 23:33:45 UTC

svn commit: r532873 - in /lucene/hadoop/trunk: CHANGES.txt src/java/org/apache/hadoop/dfs/DataNode.java src/java/org/apache/hadoop/dfs/FSDataset.java

Author: cutting
Date: Thu Apr 26 14:33:44 2007
New Revision: 532873

URL: http://svn.apache.org/viewvc?view=rev&rev=532873
Log:
HADOOP-1297.  Fix datanode so that requests to remove blocks that do not exist no longer causes block reports to be re-sent every second.  Contributed by Dhruba.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNode.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDataset.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=532873&r1=532872&r2=532873
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Thu Apr 26 14:33:44 2007
@@ -273,6 +273,10 @@
 81. HADOOP-1293.  Fix contrib/streaming to print more than the first
     twenty lines of standard error.  (Koji Noguchi via cutting)
 
+82. HADOOP-1297.  Fix datanode so that requests to remove blocks that
+    do not exist no longer causes block reports to be re-sent every
+    second.  (Dhruba Borthakur via cutting)
+
 
 Release 0.12.3 - 2007-04-06
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNode.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNode.java?view=diff&rev=532873&r1=532872&r2=532873
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNode.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DataNode.java Thu Apr 26 14:33:44 2007
@@ -477,8 +477,8 @@
           //
           DatanodeCommand cmd = namenode.blockReport(dnRegistration,
                                                      data.getBlockReport());
-          processCommand(cmd);
           lastBlockReport = now;
+          processCommand(cmd);
         }
             
         //

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDataset.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDataset.java?view=diff&rev=532873&r1=532872&r2=532873
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDataset.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSDataset.java Thu Apr 26 14:33:44 2007
@@ -587,36 +587,57 @@
    * just get rid of it.
    */
   public void invalidate(Block invalidBlks[]) throws IOException {
+    boolean error = false;
     for (int i = 0; i < invalidBlks.length; i++) {
-      File f;
+      File f = null;
       synchronized (this) {
         f = getFile(invalidBlks[i]);
+        FSVolume v = volumeMap.get(invalidBlks[i]);
         if (f == null) {
-          throw new IOException("Unexpected error trying to delete block "
-                                + invalidBlks[i] + 
-                                ". Block not found in blockMap.");
+          DataNode.LOG.warn("Unexpected error trying to delete block "
+                            + invalidBlks[i] + 
+                            ". Block not found in blockMap." +
+                            ((v == null) ? " " : " Block found in volumeMap."));
+          error = true;
+          continue;
         }
-        FSVolume v = volumeMap.get(invalidBlks[i]);
         if (v == null) {
-          throw new IOException("Unexpected error trying to delete block "
-                                + invalidBlks[i] + 
-                                ". No volume for this block.");
+          DataNode.LOG.warn("Unexpected error trying to delete block "
+                            + invalidBlks[i] + 
+                            ". No volume for this block." +
+                            " Block found in blockMap. " + f + ".");
+          error = true;
+          continue;
         }
         File parent = f.getParentFile();
         if (parent == null) {
-          throw new IOException("Unexpected error trying to delete block "
-                                + invalidBlks[i] + 
-                                ". Parent not found for file " + f + ".");
+          DataNode.LOG.warn("Unexpected error trying to delete block "
+                            + invalidBlks[i] + 
+                            ". Parent not found for file " + f + ".");
+          error = true;
+          continue;
         }
         v.clearPath(parent);
         blockMap.remove(invalidBlks[i]);
         volumeMap.remove(invalidBlks[i]);
       }
       if (!f.delete()) {
-        throw new IOException("Unexpected error trying to delete block "
-                              + invalidBlks[i] + " at file " + f);
+        DataNode.LOG.warn("Unexpected error trying to delete block "
+                          + invalidBlks[i] + " at file " + f);
+        error = true;
+        continue;
+      }
+      DataNode.LOG.info("Deleting block " + invalidBlks[i] + " file " + f);
+      if (f.exists()) {
+        //
+        // This is a temporary check especially for hadoop-1220. 
+        // This will go away in the future.
+        //
+        DataNode.LOG.info("File " + f + " was deleted but still exists!");
       }
-      DataNode.LOG.info("Deleting block " + invalidBlks[i]);
+    }
+    if (error) {
+      throw new IOException("Error in deleting blocks.");
     }
   }