You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by su...@apache.org on 2009/10/26 22:55:43 UTC

svn commit: r829990 - in /hadoop/hdfs/trunk: CHANGES.txt src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java src/test/hdfs/org/apache/hadoop/hdfs/TestListPathServlet.java

Author: suresh
Date: Mon Oct 26 21:55:43 2009
New Revision: 829990

URL: http://svn.apache.org/viewvc?rev=829990&view=rev
Log:
HDFS-625. Fix NullPointerException thrown from ListPathServlet. Contributed by Suresh Srinivas.


Added:
    hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestListPathServlet.java
Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=829990&r1=829989&r2=829990&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Mon Oct 26 21:55:43 2009
@@ -456,6 +456,8 @@
     HDFS-725. Support the build error fix for HADOOP-6327.  (Sanjay Radia via
     szetszwo)
 
+    HDFS-625. Fix NullPointerException thrown from ListPathServlet. (suresh)
+
 Release 0.20.2 - Unreleased
 
   IMPROVEMENTS

Modified: hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java?rev=829990&r1=829989&r2=829990&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java (original)
+++ hadoop/hdfs/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java Mon Oct 26 21:55:43 2009
@@ -152,7 +152,12 @@
       while (!pathstack.empty()) {
         String p = pathstack.pop();
         try {
-          for (FileStatus i : nnproxy.getListing(p)) {
+          FileStatus[] listing = nnproxy.getListing(p);
+          if (listing == null) {
+            LOG.warn("ListPathsServlet - Path " + p + " does not exist");
+            continue;
+          }
+          for (FileStatus i : listing) {
             if (exclude.matcher(i.getPath().getName()).matches()
                 || !filter.matcher(i.getPath().getName()).matches()) {
               continue;

Added: hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestListPathServlet.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestListPathServlet.java?rev=829990&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestListPathServlet.java (added)
+++ hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestListPathServlet.java Mon Oct 26 21:55:43 2009
@@ -0,0 +1,136 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs;
+
+import java.io.IOException;
+import java.net.URI;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.server.namenode.ListPathsServlet;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Test for {@link ListPathsServlet} that serves the URL
+ * http://<namenodeaddress:httpport?/listPaths
+ * 
+ * This test does not use the servlet directly. Instead it is based on
+ * {@link HftpFileSystem}, which uses this servlet to implement
+ * {@link HftpFileSystem#listStatus(Path)} method.
+ */
+public class TestListPathServlet {
+  private static final Configuration CONF = new HdfsConfiguration();
+  private static MiniDFSCluster cluster;
+  private static FileSystem fs;
+  private static URI hftpURI;
+  private static HftpFileSystem hftpFs;
+  private Random r = new Random();
+  private List<String> filelist = new ArrayList<String>();
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    // start a cluster with single datanode
+    cluster = new MiniDFSCluster(CONF, 1, true, null);
+    cluster.waitActive();
+    fs = cluster.getFileSystem();
+
+    final String str = "hftp://"
+        + CONF.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
+    hftpURI = new URI(str);
+    hftpFs = (HftpFileSystem) FileSystem.newInstance(hftpURI, CONF);
+  }
+
+  @AfterClass
+  public static void teardown() {
+    cluster.shutdown();
+  }
+
+  /** create a file with a length of <code>fileLen</code> */
+  private void createFile(String fileName, long fileLen) throws IOException {
+    filelist.add(hftpURI + fileName);
+    final Path filePath = new Path(fileName);
+    DFSTestUtil.createFile(fs, filePath, fileLen, (short) 1, r.nextLong());
+  }
+
+  private void mkdirs(String dirName) throws IOException {
+    filelist.add(hftpURI + dirName);
+    fs.mkdirs(new Path(dirName));
+  }
+
+  @Test
+  public void testListStatus() throws Exception {
+    // Empty root directory
+    checkStatus("/");
+
+    // Root directory with files and directories
+    createFile("/a", 1);
+    createFile("/b", 1);
+    mkdirs("/dir");
+    checkStatus("/");
+
+    // A directory with files and directories
+    createFile("/dir/a", 1);
+    createFile("/dir/b", 1);
+    mkdirs("/dir/dir1");
+    checkStatus("/dir");
+
+    // Non existent path
+    checkStatus("/nonexistent");
+    checkStatus("/nonexistent/a");
+  }
+
+  private void checkStatus(String listdir) throws IOException {
+    final Path listpath = hftpFs.makeQualified(new Path(listdir));
+    listdir = listpath.toString();
+    final FileStatus[] statuslist = hftpFs.listStatus(listpath);
+    for (String directory : filelist) {
+      System.out.println("dir:" + directory);
+    }
+    for (String file : filelist) {
+      System.out.println("file:" + file);
+    }
+    for (FileStatus status : statuslist) {
+      System.out.println("status:" + status.getPath().toString() + " type "
+          + (status.isDir() ? "directory" : "file"));
+    }
+    for (String file : filelist) {
+      boolean found = false;
+      // Consider only file under the list path
+      if (!file.startsWith(listpath.toString()) ||
+          file.equals(listpath.toString())) {
+        continue;
+      }
+      for (FileStatus status : statuslist) {
+        if (status.getPath().toString().equals(file)) {
+          found = true;
+          break;
+        }
+      }
+      Assert.assertTrue("Directory/file not returned in list status " + file,
+          found);
+    }
+  }
+}