You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by ma...@apache.org on 2011/06/01 20:45:21 UTC

svn commit: r1130262 - in /hadoop/hdfs/trunk: CHANGES.txt src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java

Author: mattf
Date: Wed Jun  1 18:45:21 2011
New Revision: 1130262

URL: http://svn.apache.org/viewvc?rev=1130262&view=rev
Log:
HDFS-1934. Fix NullPointerException when certain File APIs return null. Contributed by Bharath Mundlapudi.

Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=1130262&r1=1130261&r2=1130262&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Wed Jun  1 18:45:21 2011
@@ -287,6 +287,9 @@ Trunk (unreleased changes)
 
   IMPROVEMENTS
 
+    HDFS-1934. Fix NullPointerException when certain File APIs return null
+    (Bharath Mundlapudi via mattf)
+
     HDFS-1510. Added test-patch.properties required by test-patch.sh (nigel)
 
     HDFS-1628. Display full path in AccessControlException.  (John George

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java?rev=1130262&r1=1130261&r2=1130262&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/DirectoryScanner.java Wed Jun  1 18:45:21 2011
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs.server.datanode;
 
 import java.io.File;
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -38,11 +39,13 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.protocol.Block;
 import org.apache.hadoop.hdfs.server.common.GenerationStamp;
 import org.apache.hadoop.hdfs.server.datanode.FSDataset.FSVolume;
 import org.apache.hadoop.util.Daemon;
+import org.apache.hadoop.util.StringUtils;
 
 /**
  * Periodically scans the data directories for block and block metadata files.
@@ -480,9 +483,15 @@ public class DirectoryScanner implements
     /** Compile list {@link ScanInfo} for the blocks in the directory <dir> */
     private LinkedList<ScanInfo> compileReport(FSVolume vol, File dir,
         LinkedList<ScanInfo> report) {
-      File[] files = dir.listFiles();
+      File[] files;
+      try {
+        files = FileUtil.listFiles(dir);
+      } catch (IOException ioe) {
+        LOG.warn("Exception occured while compiling report: ", ioe);
+        // Ignore this directory and proceed.
+        return report;
+      }
       Arrays.sort(files);
-
       /*
        * Assumption: In the sorted list of files block file appears immediately
        * before block metadata file. This is true for the current naming

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java?rev=1130262&r1=1130261&r2=1130262&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/datanode/FSDataset.java Wed Jun  1 18:45:21 2011
@@ -99,7 +99,7 @@ public class FSDataset implements FSCons
                                 dir.toString());
         }
       } else {
-        File[] files = dir.listFiles();
+        File[] files = FileUtil.listFiles(dir); 
         int numChildren = 0;
         for (int idx = 0; idx < files.length; idx++) {
           if (files[idx].isDirectory()) {
@@ -187,7 +187,7 @@ public class FSDataset implements FSCons
      * original file name; otherwise the tmp file is deleted.
      */
     private void recoverTempUnlinkedBlock() throws IOException {
-      File files[] = dir.listFiles();
+      File files[] = FileUtil.listFiles(dir);
       for (File file : files) {
         if (!FSDataset.isUnlinkTmpFile(file)) {
           continue;
@@ -420,9 +420,9 @@ public class FSDataset implements FSCons
      * @param isFinalized true if the directory has finalized replicas;
      *                    false if the directory has rbw replicas
      */
-    private void addToReplicasMap(ReplicasMap volumeMap, 
-        File dir, boolean isFinalized) {
-      File blockFiles[] = dir.listFiles();
+    private void addToReplicasMap(ReplicasMap volumeMap, File dir,
+        boolean isFinalized) throws IOException {
+      File blockFiles[] = FileUtil.listFiles(dir);
       for (File blockFile : blockFiles) {
         if (!Block.isBlockFilename(blockFile))
           continue;
@@ -756,7 +756,7 @@ public class FSDataset implements FSCons
           throw new IOException("Failed to delete " + finalizedDir);
         }
         FileUtil.fullyDelete(tmpDir);
-        for (File f : bpCurrentDir.listFiles()) {
+        for (File f : FileUtil.listFiles(bpCurrentDir)) {
           if (!f.delete()) {
             throw new IOException("Failed to delete " + f);
           }
@@ -764,7 +764,7 @@ public class FSDataset implements FSCons
         if (!bpCurrentDir.delete()) {
           throw new IOException("Failed to delete " + bpCurrentDir);
         }
-        for (File f : bpDir.listFiles()) {
+        for (File f : FileUtil.listFiles(bpDir)) {
           if (!f.delete()) {
             throw new IOException("Failed to delete " + f);
           }