You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by dh...@apache.org on 2007/09/28 21:14:59 UTC

svn commit: r580461 - in /lucene/hadoop/trunk: CHANGES.txt src/java/org/apache/hadoop/dfs/FSNamesystem.java src/java/org/apache/hadoop/util/HostsFileReader.java

Author: dhruba
Date: Fri Sep 28 12:14:58 2007
New Revision: 580461

URL: http://svn.apache.org/viewvc?rev=580461&view=rev
Log:
The nodes listed in include and exclude files
are always listed in the datanode report.
(Raghu Angadi via dhruba)


Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/HostsFileReader.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?rev=580461&r1=580460&r2=580461&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Fri Sep 28 12:14:58 2007
@@ -94,6 +94,10 @@
 
   BUG FIXES
 
+    HADOOP-1933. The nodes listed in include and exclude files 
+    are always listed in the datanode report.
+    (Raghu Angadi via dhruba)
+
     HADOOP-1953. The job tracker should wait beteween calls to try and delete 
     the system directory (Owen O'Malley via devaraj)
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java?rev=580461&r1=580460&r2=580461&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java Fri Sep 28 12:14:58 2007
@@ -2474,43 +2474,83 @@
     }
   }
 
-  public synchronized DatanodeInfo[] datanodeReport( DatanodeReportType type ) {
-    ArrayList<DatanodeInfo> results = new ArrayList<DatanodeInfo>();
+  private synchronized ArrayList<DatanodeDescriptor> getDatanodeListForReport(
+                                                      DatanodeReportType type) {                  
+    
+    boolean listLiveNodes = type == DatanodeReportType.ALL ||
+                            type == DatanodeReportType.LIVE;
+    boolean listDeadNodes = type == DatanodeReportType.ALL ||
+                            type == DatanodeReportType.DEAD;
+
+    HashMap<String, String> mustList = new HashMap<String, String>();
+    
+    if (listDeadNodes) {
+      //first load all the nodes listed in include and exclude files.
+      for (Iterator<String> it = hostsReader.getHosts().iterator(); 
+           it.hasNext();) {
+        mustList.put(it.next(), "");
+      }
+      for (Iterator<String> it = hostsReader.getExcludedHosts().iterator(); 
+           it.hasNext();) {
+        mustList.put(it.next(), "");
+      }
+    }
+   
+    ArrayList<DatanodeDescriptor> nodes = null;
+    
     synchronized (datanodeMap) {
-      for(Iterator<DatanodeDescriptor> it = datanodeMap.values().iterator(); it.hasNext();) {
-        DatanodeDescriptor tmp = it.next();
-        switch (type) {
-        case ALL: 
-          results.add(new DatanodeInfo(tmp));
-          break;
-        case DEAD: 
-          if(isDatanodeDead(tmp)) {
-            results.add(new DatanodeInfo(tmp));
-          }
-          break;
-        case LIVE:
-          if(!isDatanodeDead(tmp)) {
-            results.add(new DatanodeInfo(tmp));
-          }
-          break;
+      nodes = new ArrayList<DatanodeDescriptor>(datanodeMap.size() + 
+                                                mustList.size());
+      
+      for(Iterator<DatanodeDescriptor> it = datanodeMap.values().iterator(); 
+                                                               it.hasNext();) {
+        DatanodeDescriptor dn = it.next();
+        boolean isDead = isDatanodeDead(dn);
+        if ( (isDead && listDeadNodes) || (!isDead && listLiveNodes) ) {
+          nodes.add(dn);
         }
+        //Remove any form of the this datanode in include/exclude lists.
+        mustList.remove(dn.getName());
+        mustList.remove(dn.getHost());
+        mustList.remove(dn.getHostName());
+      }
+    }
+    
+    if (listDeadNodes) {
+      for (Iterator<String> it = mustList.keySet().iterator(); it.hasNext();) {
+        DatanodeDescriptor dn = 
+            new DatanodeDescriptor(new DatanodeID(it.next(), "", 0));
+        dn.setLastUpdate(0);
+        nodes.add(dn);
       }
     }
-    return results.toArray(new DatanodeInfo[results.size()]);
+    
+    return nodes;
+  }
+
+  public synchronized DatanodeInfo[] datanodeReport( DatanodeReportType type ) {
+
+    ArrayList<DatanodeDescriptor> results = getDatanodeListForReport(type);
+    DatanodeInfo[] arr = new DatanodeInfo[results.size()];
+    for (int i=0; i<arr.length; i++) {
+      arr[i] = new DatanodeInfo(results.get(i));
+    }
+    return arr;
   }
     
   /**
    */
   public synchronized void DFSNodesStatus(ArrayList<DatanodeDescriptor> live, 
                                           ArrayList<DatanodeDescriptor> dead) {
-    synchronized (datanodeMap) {
-      for(Iterator<DatanodeDescriptor> it = datanodeMap.values().iterator(); it.hasNext();) {
-        DatanodeDescriptor node = it.next();
-        if (isDatanodeDead(node))
-          dead.add(node);
-        else
-          live.add(node);
-      }
+
+    ArrayList<DatanodeDescriptor> results = 
+                            getDatanodeListForReport(DatanodeReportType.ALL);    
+    for(Iterator<DatanodeDescriptor> it = results.iterator(); it.hasNext();) {
+      DatanodeDescriptor node = it.next();
+      if (isDatanodeDead(node))
+        dead.add(node);
+      else
+        live.add(node);
     }
   }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/HostsFileReader.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/HostsFileReader.java?rev=580461&r1=580460&r2=580461&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/HostsFileReader.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/HostsFileReader.java Fri Sep 28 12:14:58 2007
@@ -49,7 +49,9 @@
         String[] nodes = line.split("[ \t\n\f\r]+");
         if (nodes != null) {
           for (int i = 0; i < nodes.length; i++) {
-            set.add(nodes[i]);  // might need to add canonical name
+            if (!nodes[i].equals("")) {
+              set.add(nodes[i]);  // might need to add canonical name
+            }
           }
         }
       }