You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2007/01/16 20:27:12 UTC

svn commit: r496827 - in /lucene/hadoop/trunk: ./ src/java/org/apache/hadoop/dfs/ src/test/org/apache/hadoop/dfs/ src/webapps/datanode/ src/webapps/dfs/

Author: cutting
Date: Tue Jan 16 11:27:10 2007
New Revision: 496827

URL: http://svn.apache.org/viewvc?view=rev&rev=496827
Log:
HADOOP-842.  Change HDFS protocol so that open() method is passed the client host name.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/ClientProtocol.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NamenodeFsck.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestRestartDFS.java
    lucene/hadoop/trunk/src/webapps/datanode/browseBlock.jsp
    lucene/hadoop/trunk/src/webapps/datanode/browseDirectory.jsp
    lucene/hadoop/trunk/src/webapps/datanode/tail.jsp
    lucene/hadoop/trunk/src/webapps/dfs/browseDirectory.jsp

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=496827&r1=496826&r2=496827
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Tue Jan 16 11:27:10 2007
@@ -6,6 +6,11 @@
  1. HADOOP-781.  Remove methods deprecated in 0.10 that are no longer
     widely used.  (cutting)
 
+ 2. HADOOP-842.  Change HDFS protocol so that the open() method is
+    passed the client hostname, to permit the namenode to order block
+    locations on the basis of network topology.
+    (Hairong Kuang via cutting)
+
 
 Release 0.10.1 - 2007-01-10
 
@@ -30,7 +35,7 @@
     (Arun C Murthy via cutting)
 
  8. HADOOP-868.  Decrease the number of open files during map,
-    respecting io.sort.factor.  (Devaraj Das via cutting)
+    respecting io.sort.fa ctor.  (Devaraj Das via cutting)
 
  9. HADOOP-865.  Fix S3 FileSystem so that partially created files can
     be deleted.  (Tom White via cutting)

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/ClientProtocol.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/ClientProtocol.java?view=diff&rev=496827&r1=496826&r2=496827
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/ClientProtocol.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/ClientProtocol.java Tue Jan 16 11:27:10 2007
@@ -29,19 +29,22 @@
  **********************************************************************/
 interface ClientProtocol extends VersionedProtocol {
 
-  public static final long versionID = 4L; // decommission node added
+  public static final long versionID = 5L; // open() takes a new parameter
   
     ///////////////////////////////////////
     // File contents
     ///////////////////////////////////////
     /**
      * Open an existing file, at the given name.  Returns block 
-     * and DataNode info.  The client will then have to contact
+     * and DataNode info.  DataNodes for each block are sorted by
+     * the distance to the clientMachine, which contains the host name.
+     * The client will then have to contact
      * each indicated DataNode to obtain the actual data.  There
      * is no need to call close() or any other function after
      * calling open().
      */
-    public LocatedBlock[] open(String src) throws IOException;
+    public LocatedBlock[] open( String clientMachine,
+                                String src) throws IOException;
 
     /**
      * Create a new file.  Get back block and datanode info,

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java?view=diff&rev=496827&r1=496826&r2=496827
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java Tue Jan 16 11:27:10 2007
@@ -517,7 +517,7 @@
         synchronized void openInfo() throws IOException {
             Block oldBlocks[] = this.blocks;
 
-            LocatedBlock results[] = namenode.open(src);            
+            LocatedBlock results[] = namenode.open(localName, src);            
             Vector blockV = new Vector();
             Vector nodeV = new Vector();
             for (int i = 0; i < results.length; i++) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java?view=diff&rev=496827&r1=496826&r2=496827
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSNamesystem.java Tue Jan 16 11:27:10 2007
@@ -291,7 +291,7 @@
      * The client should choose one of the machines from the machineArray
      * at random.
      */
-    public Object[] open(UTF8 src) {
+    public Object[] open(String clientMachine, UTF8 src) {
         Object results[] = null;
         Block blocks[] = dir.getFile(src);
         if (blocks != null) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java?view=diff&rev=496827&r1=496826&r2=496827
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NameNode.java Tue Jan 16 11:27:10 2007
@@ -204,8 +204,8 @@
     
     /**
      */
-    public LocatedBlock[] open(String src) throws IOException {
-        Object openResults[] = namesystem.open(new UTF8(src));
+    public LocatedBlock[] open(String clientMachine, String src) throws IOException {
+        Object openResults[] = namesystem.open(clientMachine, new UTF8(src));
         if (openResults == null) {
             throw new IOException("Cannot open filename " + src);
         } else {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NamenodeFsck.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NamenodeFsck.java?view=diff&rev=496827&r1=496826&r2=496827
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NamenodeFsck.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NamenodeFsck.java Tue Jan 16 11:27:10 2007
@@ -36,6 +36,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSOutputStream;
 import org.apache.hadoop.io.UTF8;
+import org.apache.hadoop.net.DNS;
 
 
 /**
@@ -151,7 +152,8 @@
     }
     res.totalFiles++;
     res.totalSize += file.getLen();
-    LocatedBlock[] blocks = nn.open(file.getPath());
+    LocatedBlock[] blocks = nn.open(DNS.getDefaultHost("default"),
+                                    file.getPath());
     res.totalBlocks += blocks.length;
     if (showFiles) {
       out.print(file.getPath() + " " + file.getLen() + ", " + blocks.length + " block(s): ");

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestRestartDFS.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestRestartDFS.java?view=diff&rev=496827&r1=496826&r2=496827
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestRestartDFS.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestRestartDFS.java Tue Jan 16 11:27:10 2007
@@ -38,6 +38,7 @@
   private static String TEST_ROOT_DIR =
     new Path(System.getProperty("test.build.data","/tmp"))
     .toString().replace(' ', '+');
+  private static Configuration conf = new Configuration();
 
   /** class MyFile contains enough information to recreate the contents of
    * a single file.
@@ -102,7 +103,6 @@
       files[idx] = new MyFile();
     }
     
-    Configuration conf = new Configuration();
     FileSystem fs = FileSystem.getNamed(fsname, conf);
     Path root = new Path(topdir);
     
@@ -128,7 +128,7 @@
   private static boolean checkFiles(String fsname, String topdir, MyFile[] files) 
   throws IOException {
     
-    Configuration conf = new Configuration();
+    //Configuration conf = new Configuration();
     FileSystem fs = FileSystem.getNamed(fsname, conf);
     Path root = new Path(topdir);
     
@@ -156,7 +156,6 @@
   /** delete directory and everything underneath it.*/
   private static void deldir(String fsname, String topdir)
   throws IOException {
-    Configuration conf = new Configuration();
     FileSystem fs = FileSystem.getNamed(fsname, conf);
     Path root = new Path(topdir);
     fs.delete(root);
@@ -168,7 +167,6 @@
     MiniDFSCluster cluster = null;
     MyFile[] files = null;
     try {
-      Configuration conf = new Configuration();
       cluster = new MiniDFSCluster(65314, conf, 4, false);
       namenode = conf.get("fs.default.name", "local");
       if (!"local".equals(namenode)) {
@@ -179,7 +177,6 @@
     }
     assertTrue("Error creating files", files != null);
     try {
-      Configuration conf = new Configuration();
       // Here we restart the MiniDFScluster without formatting namenode
       cluster = new MiniDFSCluster(65320, conf, 4, false, false);
       namenode = conf.get("fs.default.name", "local");

Modified: lucene/hadoop/trunk/src/webapps/datanode/browseBlock.jsp
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/webapps/datanode/browseBlock.jsp?view=diff&rev=496827&r1=496826&r2=496827
==============================================================================
--- lucene/hadoop/trunk/src/webapps/datanode/browseBlock.jsp (original)
+++ lucene/hadoop/trunk/src/webapps/datanode/browseBlock.jsp Tue Jan 16 11:27:10 2007
@@ -8,6 +8,7 @@
   import="org.apache.hadoop.dfs.*"
   import="org.apache.hadoop.io.*"
   import="org.apache.hadoop.conf.*"
+  import="org.apache.hadoop.net.DNS"
   import="java.text.DateFormat"
 %>
 
@@ -67,7 +68,8 @@
     blockSize = Long.parseLong(blockSizeStr);
 
     DFSClient dfs = new DFSClient(jspHelper.nameNodeAddr, jspHelper.conf);
-    LocatedBlock[] blocks = dfs.namenode.open(filename);
+    LocatedBlock[] blocks = dfs.namenode.open(
+        DNS.getDefaultHost("default"), filename);
     //Add the various links for looking at the file contents
     //URL for downloading the full file
     String downloadUrl = "http://" + req.getServerName() + ":" +
@@ -229,7 +231,8 @@
     //determine data for the next link
     if (startOffset + chunkSizeToView >= blockSize) {
       //we have to go to the next block from this point onwards
-      LocatedBlock[] blocks = dfs.namenode.open(filename);
+      LocatedBlock[] blocks = dfs.namenode.open(
+           DNS.getDefaultHost("default"), filename);
       for (int i = 0; i < blocks.length; i++) {
         if (blocks[i].getBlock().getBlockId() == blockId) {
           if (i != blocks.length - 1) {
@@ -276,7 +279,8 @@
     int prevPort = req.getServerPort();
     int prevDatanodePort = datanodePort;
     if (startOffset == 0) {
-      LocatedBlock [] blocks = dfs.namenode.open(filename);
+      LocatedBlock [] blocks = dfs.namenode.open(
+              DNS.getDefaultHost("default"), filename);
       for (int i = 0; i < blocks.length; i++) {
         if (blocks[i].getBlock().getBlockId() == blockId) {
           if (i != 0) {

Modified: lucene/hadoop/trunk/src/webapps/datanode/browseDirectory.jsp
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/webapps/datanode/browseDirectory.jsp?view=diff&rev=496827&r1=496826&r2=496827
==============================================================================
--- lucene/hadoop/trunk/src/webapps/datanode/browseDirectory.jsp (original)
+++ lucene/hadoop/trunk/src/webapps/datanode/browseDirectory.jsp Tue Jan 16 11:27:10 2007
@@ -8,6 +8,7 @@
   import="org.apache.hadoop.dfs.*"
   import="org.apache.hadoop.io.*"
   import="org.apache.hadoop.conf.*"
+  import="org.apache.hadoop.net.DNS"
   import="java.text.DateFormat"
 %>
 <%!
@@ -54,7 +55,8 @@
       //Get the location of the first block of the file
       if (files[i].getPath().endsWith(".crc")) continue;
       if (!files[i].isDir()) {
-        LocatedBlock[] blocks = dfs.namenode.open(files[i].getPath());
+        LocatedBlock[] blocks = dfs.namenode.open(
+            DNS.getDefaultHost("default"), files[i].getPath());
         DatanodeInfo [] locations = blocks[0].getLocations();
         if (locations.length == 0) {
           cols[0] = files[i].getPath();

Modified: lucene/hadoop/trunk/src/webapps/datanode/tail.jsp
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/webapps/datanode/tail.jsp?view=diff&rev=496827&r1=496826&r2=496827
==============================================================================
--- lucene/hadoop/trunk/src/webapps/datanode/tail.jsp (original)
+++ lucene/hadoop/trunk/src/webapps/datanode/tail.jsp Tue Jan 16 11:27:10 2007
@@ -8,6 +8,7 @@
   import="org.apache.hadoop.dfs.*"
   import="org.apache.hadoop.io.*"
   import="org.apache.hadoop.conf.*"
+  import="org.apache.hadoop.net.DNS"
   import="java.text.DateFormat"
 %>
 
@@ -54,7 +55,8 @@
     //fetch the block from the datanode that has the last block for this file
     DFSClient dfs = new DFSClient(jspHelper.nameNodeAddr, 
                                          jspHelper.conf);
-    LocatedBlock blocks[] = dfs.namenode.open(filename);
+    LocatedBlock blocks[] = dfs.namenode.open(
+        DNS.getDefaultHost("default"), filename); 
     if (blocks == null || blocks.length == 0) {
       out.print("No datanodes contain blocks of file "+filename);
       dfs.close();

Modified: lucene/hadoop/trunk/src/webapps/dfs/browseDirectory.jsp
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/webapps/dfs/browseDirectory.jsp?view=diff&rev=496827&r1=496826&r2=496827
==============================================================================
--- lucene/hadoop/trunk/src/webapps/dfs/browseDirectory.jsp (original)
+++ lucene/hadoop/trunk/src/webapps/dfs/browseDirectory.jsp Tue Jan 16 11:27:10 2007
@@ -9,6 +9,7 @@
   import="org.apache.hadoop.dfs.*"
   import="org.apache.hadoop.io.*"
   import="org.apache.hadoop.conf.*"
+  import="org.apache.hadoop.net.DNS"
   import="java.text.DateFormat"
 %>
 <%!
@@ -32,7 +33,8 @@
     DFSClient dfs = new DFSClient(jspHelper.nameNodeAddr, jspHelper.conf);
     UTF8 target = new UTF8(dir);
     if( !dfs.isDirectory(target) ) { // a file
-      LocatedBlock[] blocks = dfs.namenode.open(dir);
+      LocatedBlock[] blocks = dfs.namenode.open(
+          DNS.getDefaultHost("default"), dir);
       DatanodeInfo [] locations = blocks[0].getLocations();
       if (locations.length == 0) {
         out.print("Empty file");
@@ -86,7 +88,9 @@
       //Get the location of the first block of the file
       if (files[i].getPath().endsWith(".crc")) continue;
       if (!files[i].isDir()) {
-        LocatedBlock[] blocks = dfs.namenode.open(files[i].getPath());
+        LocatedBlock[] blocks = dfs.namenode.open(
+            DNS.getDefaultHost("default"), files[i].getPath());
+
         DatanodeInfo [] locations = blocks[0].getLocations();
         if (locations.length == 0) {
           cols[0] = files[i].getName();