You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2007/08/23 16:24:24 UTC

svn commit: r569011 - in /lucene/hadoop/trunk: ./ src/java/org/apache/hadoop/dfs/ src/test/org/apache/hadoop/dfs/ src/webapps/datanode/

Author: cutting
Date: Thu Aug 23 07:24:19 2007
New Revision: 569011

URL: http://svn.apache.org/viewvc?rev=569011&view=rev
Log:
HADOOP-1744.  Remove many uses of the deprecated UTF8 class from the HDFS namenode.  Contributed by Christophe Taton.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NamenodeFsck.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/StreamFile.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java
    lucene/hadoop/trunk/src/webapps/datanode/browseDirectory.jsp

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?rev=569011&r1=569010&r2=569011&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Thu Aug 23 07:24:19 2007
@@ -85,6 +85,9 @@
     installed in a particular location, as long as it is on $PATH.
     (cutting)
 
+    HADOOP-1744.  Remove many uses of the deprecated UTF8 class from
+    the HDFS namenode.  (Christophe Taton via cutting)
+
 
 Release 0.14.0 - 2007-08-17
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java?rev=569011&r1=569010&r2=569011&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSClient.java Thu Aug 23 07:24:19 2007
@@ -263,7 +263,7 @@
     return hints;
   }
 
-  public DFSInputStream open(UTF8 src) throws IOException {
+  public DFSInputStream open(String src) throws IOException {
     return open(src, conf.getInt("io.file.buffer.size", 4096));
   }
   /**
@@ -272,10 +272,10 @@
    * inner subclass of InputStream that does the right out-of-band
    * work.
    */
-  public DFSInputStream open(UTF8 src, int buffersize) throws IOException {
+  public DFSInputStream open(String src, int buffersize) throws IOException {
     checkOpen();
     //    Get block info from namenode
-    return new DFSInputStream(src.toString(), buffersize);
+    return new DFSInputStream(src, buffersize);
   }
 
   /**
@@ -391,32 +391,32 @@
    * Make a direct connection to namenode and manipulate structures
    * there.
    */
-  public boolean rename(UTF8 src, UTF8 dst) throws IOException {
+  public boolean rename(String src, String dst) throws IOException {
     checkOpen();
-    return namenode.rename(src.toString(), dst.toString());
+    return namenode.rename(src, dst);
   }
 
   /**
    * Make a direct connection to namenode and manipulate structures
    * there.
    */
-  public boolean delete(UTF8 src) throws IOException {
+  public boolean delete(String src) throws IOException {
     checkOpen();
-    return namenode.delete(src.toString());
+    return namenode.delete(src);
   }
 
   /**
    */
-  public boolean exists(UTF8 src) throws IOException {
+  public boolean exists(String src) throws IOException {
     checkOpen();
-    return namenode.exists(src.toString());
+    return namenode.exists(src);
   }
 
   /**
    */
-  public boolean isDirectory(UTF8 src) throws IOException {
+  public boolean isDirectory(String src) throws IOException {
     checkOpen();
-    return namenode.isDir(src.toString());
+    return namenode.isDir(src);
   }
 
   /**
@@ -503,9 +503,9 @@
 
   /**
    */
-  public boolean mkdirs(UTF8 src) throws IOException {
+  public boolean mkdirs(String src) throws IOException {
     checkOpen();
-    return namenode.mkdirs(src.toString());
+    return namenode.mkdirs(src);
   }
 
   /**

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java?rev=569011&r1=569010&r2=569011&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DistributedFileSystem.java Thu Aug 23 07:24:19 2007
@@ -94,13 +94,6 @@
     workingDir = makeAbsolute(dir);
   }
 
-  /**
-   * @deprecated use {@link #getPathName(Path)} instead.
-   */
-  private UTF8 getPath(Path file) {
-    return new UTF8(getPathName(file));
-  }
-
   private String getPathName(Path file) {
     checkPath(file);
     String result = makeAbsolute(file).toUri().getPath();
@@ -117,7 +110,7 @@
   }
 
   public FSDataInputStream open(Path f, int bufferSize) throws IOException {
-    return new DFSClient.DFSDataInputStream(dfs.open(getPath(f),bufferSize));
+    return new DFSClient.DFSDataInputStream(dfs.open(getPathName(f),bufferSize));
   }
 
   public FSDataOutputStream create(Path f, boolean overwrite,
@@ -146,18 +139,18 @@
    * Rename files/dirs
    */
   public boolean rename(Path src, Path dst) throws IOException {
-    return dfs.rename(getPath(src), getPath(dst));
+    return dfs.rename(getPathName(src), getPathName(dst));
   }
 
   /**
    * Get rid of Path f, whether a true file or dir.
    */
   public boolean delete(Path f) throws IOException {
-    return dfs.delete(getPath(f));
+    return dfs.delete(getPathName(f));
   }
 
   public boolean exists(Path f) throws IOException {
-    return dfs.exists(getPath(f));
+    return dfs.exists(getPathName(f));
   }
 
   public long getContentLength(Path f) throws IOException {
@@ -187,7 +180,7 @@
   }
 
   public boolean mkdirs(Path f) throws IOException {
-    return dfs.mkdirs(getPath(f));
+    return dfs.mkdirs(getPathName(f));
   }
 
   public void close() throws IOException {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NamenodeFsck.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NamenodeFsck.java?rev=569011&r1=569010&r2=569011&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NamenodeFsck.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/NamenodeFsck.java Thu Aug 23 07:24:19 2007
@@ -71,7 +71,7 @@
   public static final int FIXING_DELETE = 2;
   
   private NameNode nn;
-  private UTF8 lostFound = null;
+  private String lostFound = null;
   private boolean lfInited = false;
   private boolean lfInitedOk = false;
   private boolean showFiles = false;
@@ -248,7 +248,7 @@
     if (!lfInitedOk) {
       return;
     }
-    String target = lostFound.toString() + file.getPath();
+    String target = lostFound + file.getPath();
     String errmsg = "Failed to move " + file.getPath() + " to /lost+found";
     try {
       if (!nn.mkdirs(target)) {
@@ -294,7 +294,7 @@
       }
       if (fos != null) fos.close();
       LOG.warn("\n - moved corrupted file " + file.getPath() + " to /lost+found");
-      dfs.delete(new UTF8(file.getPath().toString()));
+      dfs.delete(file.getPath().toString());
     }  catch (Exception e) {
       e.printStackTrace();
       LOG.warn(errmsg + ": " + e.getMessage());
@@ -419,7 +419,7 @@
   private void lostFoundInit(DFSClient dfs) {
     lfInited = true;
     try {
-      UTF8 lfName = new UTF8("/lost+found");
+      String lfName = "/lost+found";
       // check that /lost+found exists
       if (!dfs.exists(lfName)) {
         lfInitedOk = dfs.mkdirs(lfName);

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/StreamFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/StreamFile.java?rev=569011&r1=569010&r2=569011&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/StreamFile.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/StreamFile.java Thu Aug 23 07:24:19 2007
@@ -17,17 +17,13 @@
  */
 package org.apache.hadoop.dfs;
 
-
 import javax.servlet.*;
 import javax.servlet.http.*;
 import java.io.*;
 import java.util.*;
 import java.net.*;
-import org.apache.hadoop.dfs.*;
 import org.apache.hadoop.fs.*;
-import org.apache.hadoop.io.*;
 import org.apache.hadoop.conf.*;
-import java.text.DateFormat;
 
 public class StreamFile extends HttpServlet {
 
@@ -50,7 +46,7 @@
       return;
     }
     DFSClient dfs = new DFSClient(nameNodeAddr, conf);
-    FSInputStream in = dfs.open(new UTF8(filename));
+    FSInputStream in = dfs.open(filename);
     OutputStream os = response.getOutputStream();
     response.setHeader("Content-Disposition", "attachment; filename=\"" + 
                        filename + "\"");

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java?rev=569011&r1=569010&r2=569011&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java Thu Aug 23 07:24:19 2007
@@ -307,7 +307,7 @@
         ListIterator li = testfilesList.listIterator();
         while (li.hasNext()) {
           testFileName = (UTF8) li.next();
-          FSInputStream nis = dfsClient.open(testFileName);
+          FSInputStream nis = dfsClient.open(testFileName.toString());
           byte[] bufferGolden = new byte[bufferSize];
           int m = 42;
           try {
@@ -337,7 +337,7 @@
         li = testfilesList.listIterator();
         while (li.hasNext()) {
           testFileName = (UTF8) li.next();
-          assertTrue(dfsClient.delete(testFileName));
+          assertTrue(dfsClient.delete(testFileName.toString()));
         }
 
         //

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java?rev=569011&r1=569010&r2=569011&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java Thu Aug 23 07:24:19 2007
@@ -135,14 +135,14 @@
     
       // create a directory
       try {
-        assertTrue(dfsClient.mkdirs(new UTF8("/data")));
+        assertTrue(dfsClient.mkdirs("/data"));
         assertMkdirs("/data", false);
       } catch (IOException ioe) {
       	ioe.printStackTrace();
       }
        
       try {
-        assertTrue(dfsClient.mkdirs(new UTF8("data")));
+        assertTrue(dfsClient.mkdirs("data"));
         assertMkdirs("data", true);
       } catch (IOException ioe) {
        	ioe.printStackTrace();
@@ -175,7 +175,7 @@
     
       // delete the file
       try {
-        dfsClient.delete(new UTF8("/data/yy"));
+        dfsClient.delete("/data/yy");
         assertDelete("/data/yy", false);
       } catch(IOException ioe) {
         ioe.printStackTrace();
@@ -184,21 +184,21 @@
     
       // rename the file
       try {
-        dfsClient.rename(new UTF8("/data/xx"), new UTF8("/data/yy"));
+        dfsClient.rename("/data/xx", "/data/yy");
         assertRename("/data/xx", "/data/yy", false);
       } catch(IOException ioe) {
       	ioe.printStackTrace();
       }
 
       try {
-        dfsClient.delete(new UTF8("/data/xx"));
+        dfsClient.delete("/data/xx");
         assertDelete("/data/xx", true);
       } catch(IOException ioe) {
     	ioe.printStackTrace();
       }
       
       try {
-        dfsClient.rename(new UTF8("/data/xx"), new UTF8("/data/yy"));    
+        dfsClient.rename("/data/xx", "/data/yy");    
         assertRename("/data/xx", "/data/yy", true);
       } catch(IOException ioe) {
     	ioe.printStackTrace();

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java?rev=569011&r1=569010&r2=569011&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSUpgradeFromImage.java Thu Aug 23 07:24:19 2007
@@ -149,7 +149,7 @@
       } else {
         // this is not a directory. Checksum the file data.
         CRC32 fileCRC = new CRC32();
-        FSInputStream in = client.open(new UTF8(path));
+        FSInputStream in = client.open(path);
         byte[] buf = new byte[4096];
         int nRead = 0;
         while ( (nRead = in.read(buf, 0, buf.length)) > 0 ) {

Modified: lucene/hadoop/trunk/src/webapps/datanode/browseDirectory.jsp
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/webapps/datanode/browseDirectory.jsp?rev=569011&r1=569010&r2=569011&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/webapps/datanode/browseDirectory.jsp (original)
+++ lucene/hadoop/trunk/src/webapps/datanode/browseDirectory.jsp Thu Aug 23 07:24:19 2007
@@ -31,10 +31,10 @@
       namenodeInfoPort = Integer.parseInt(namenodeInfoPortStr);
     
     DFSClient dfs = new DFSClient(jspHelper.nameNodeAddr, jspHelper.conf);
-    UTF8 target = new UTF8(dir);
+    String target = dir;
     if (!dfs.exists(target)) {
       out.print("<h3>File or directory : " + target + " does not exists</h3>");
-      JspHelper.printGotoForm(out, namenodeInfoPort, target.toString());
+      JspHelper.printGotoForm(out, namenodeInfoPort, target);
     }
     else {
       if( !dfs.isDirectory(target) ) { // a file
@@ -67,7 +67,7 @@
         return;
       }
       // directory
-      DFSFileInfo[] files = dfs.listPaths(target.toString());
+      DFSFileInfo[] files = dfs.listPaths(target);
       //generate a table and dump the info
       String [] headings = new String[6];
       headings[0] = "Name"; headings[1] = "Type"; headings[2] = "Size";