You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/03/04 05:43:20 UTC

svn commit: r1077678 - in /hadoop/common/branches/branch-0.20-security-patches/src: hdfs/org/apache/hadoop/hdfs/ hdfs/org/apache/hadoop/hdfs/server/namenode/ test/org/apache/hadoop/hdfs/

Author: omalley
Date: Fri Mar  4 04:43:19 2011
New Revision: 1077678

URL: http://svn.apache.org/viewvc?rev=1077678&view=rev
Log:
commit 3ab63a360581b66b4d039da3439b907ff2a212fe
Author: tsz <ts...@yahoo-inc.com>
Date:   Thu Sep 16 13:46:38 2010 -0700

    HDFS-1383 from https://issues.apache.org/jira/secure/attachment/12454697/h1383_20100915b_y20.patch

Modified:
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
    hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/MiniDFSCluster.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestFileStatus.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestListPathServlet.java

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java?rev=1077678&r1=1077677&r2=1077678&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java Fri Mar  4 04:43:19 2011
@@ -262,17 +262,26 @@ public class HftpFileSystem extends File
 
   @Override
   public FSDataInputStream open(Path f, int buffersize) throws IOException {
-    HttpURLConnection connection = null;
-    connection = openConnection("/data" + f.toUri().getPath(),
-        "ugi=" + getUgiParameter());
-    connection.setRequestMethod("GET");
-    connection.connect();
+    final HttpURLConnection connection = openConnection(
+        "/data" + f.toUri().getPath(), "ugi=" + getUgiParameter());
+    final InputStream in;
+    try {
+      connection.setRequestMethod("GET");
+      connection.connect();
+      in = connection.getInputStream();
+    } catch(IOException ioe) {
+      final int code = connection.getResponseCode();
+      final String s = connection.getResponseMessage();
+      throw s == null? ioe:
+          new IOException(s + " (error code=" + code + ")", ioe);
+    }
+
     final String cl = connection.getHeaderField(StreamFile.CONTENT_LENGTH);
     final long filelength = cl == null? -1: Long.parseLong(cl);
     if (LOG.isDebugEnabled()) {
       LOG.debug("filelength = " + filelength);
     }
-    final InputStream in = connection.getInputStream();
+
     return new FSDataInputStream(new FSInputStream() {
         long currentPos = 0;
 

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java?rev=1077678&r1=1077677&r2=1077678&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java Fri Mar  4 04:43:19 2011
@@ -121,9 +121,9 @@ public class FileDataServlet extends Dfs
                   response.getWriter().println(e.toString());
                 }
               } else if (info == null){
-                response.sendError(400, "cat: File not found " + path);
+                response.sendError(400, "File not found " + path);
               } else {
-                response.sendError(400, "cat: " + path + ": is a directory");
+                response.sendError(400, path + " is a directory");
               }
               return null;
             }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java?rev=1077678&r1=1077677&r2=1077678&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/hdfs/org/apache/hadoop/hdfs/server/namenode/ListPathsServlet.java Fri Mar  4 04:43:19 2011
@@ -23,7 +23,6 @@ import org.apache.hadoop.hdfs.HftpFileSy
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.DirectoryListing;
-import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.util.VersionInfo;
 
 import org.znerd.xmlenc.*;
@@ -129,9 +128,11 @@ public class ListPathsServlet extends Df
     throws ServletException, IOException {
     final PrintWriter out = response.getWriter();
     final XMLOutputter doc = new XMLOutputter(out, "UTF-8");
+
+    final Map<String, String> root = buildRoot(request, doc);
+    final String path = root.get("path");
+
     try {
-      final Map<String, String> root = buildRoot(request, doc);
-      final String path = root.get("path");
       final boolean recur = "yes".equals(root.get("recursive"));
       final Pattern filter = Pattern.compile(root.get("filter"));
       final Pattern exclude = Pattern.compile(root.get("exclude"));
@@ -188,17 +189,18 @@ public class ListPathsServlet extends Df
               writeXml(re, p, doc);
             }
           }
-          if (doc != null) {
-            doc.endDocument();
-          }
           return null;
         }
       });
-      
+    } catch(IOException ioe) {
+      writeXml(ioe, path, doc);
     } catch (InterruptedException e) {
       LOG.warn("ListPathServlet encountered InterruptedException", e);
       response.sendError(400, e.getMessage());
     } finally {
+      if (doc != null) {
+        doc.endDocument();
+      }
       if (out != null) {
         out.close();
       }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/MiniDFSCluster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/MiniDFSCluster.java?rev=1077678&r1=1077677&r2=1077678&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/MiniDFSCluster.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/MiniDFSCluster.java Fri Mar  4 04:43:19 2011
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.net.URI;
 import java.net.URISyntaxException;
+import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.nio.channels.FileChannel;
@@ -787,6 +788,22 @@ public class MiniDFSCluster {
   }
 
   /**
+   *  @return a {@link HftpFileSystem} object as specified user. 
+   */
+  public HftpFileSystem getHftpFileSystemAs(final String username,
+      final Configuration conf, final String... groups
+      ) throws IOException, InterruptedException {
+    final UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
+        username, groups);
+    return ugi.doAs(new PrivilegedExceptionAction<HftpFileSystem>() {
+      @Override
+      public HftpFileSystem run() throws Exception {
+        return getHftpFileSystem();
+      }
+    });
+  }
+
+  /**
    * Get the directories where the namenode stores its image.
    */
   public Collection<File> getNameDirs() {

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestDistributedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestDistributedFileSystem.java?rev=1077678&r1=1077677&r2=1077678&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestDistributedFileSystem.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestDistributedFileSystem.java Fri Mar  4 04:43:19 2011
@@ -18,6 +18,11 @@
 
 package org.apache.hadoop.hdfs;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 import java.io.IOException;
 import java.net.URI;
 import java.util.Random;
@@ -34,7 +39,6 @@ import org.apache.hadoop.fs.permission.F
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.log4j.Level;
 import org.junit.Test;
-import static org.junit.Assert.*;
 
 public class TestDistributedFileSystem {
   private static final Random RAN = new Random();
@@ -138,90 +142,6 @@ public class TestDistributedFileSystem {
   }
   
   @Test
-  public void testFileChecksum() throws IOException {
-    ((Log4JLogger)HftpFileSystem.LOG).getLogger().setLevel(Level.ALL);
-
-    final long seed = RAN.nextLong();
-    System.out.println("seed=" + seed);
-    RAN.setSeed(seed);
-
-    final Configuration conf = getTestConfiguration();
-    conf.set("slave.host.name", "localhost");
-
-    final MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);
-    final FileSystem hdfs = cluster.getFileSystem();
-    final String hftpuri = "hftp://" + conf.get("dfs.http.address");
-    System.out.println("hftpuri=" + hftpuri);
-    final FileSystem hftp = new Path(hftpuri).getFileSystem(conf);
-
-    final String dir = "/filechecksum";
-    final int block_size = 1024;
-    final int buffer_size = conf.getInt("io.file.buffer.size", 4096);
-    conf.setInt("io.bytes.per.checksum", 512);
-
-    //try different number of blocks
-    for(int n = 0; n < 5; n++) {
-      //generate random data
-      final byte[] data = new byte[RAN.nextInt(block_size/2-1)+n*block_size+1];
-      RAN.nextBytes(data);
-      System.out.println("data.length=" + data.length);
-  
-      //write data to a file
-      final Path foo = new Path(dir, "foo" + n);
-      {
-        final FSDataOutputStream out = hdfs.create(foo, false, buffer_size,
-            (short)2, block_size);
-        out.write(data);
-        out.close();
-      }
-      
-      //compute checksum
-      final FileChecksum hdfsfoocs = hdfs.getFileChecksum(foo);
-      System.out.println("hdfsfoocs=" + hdfsfoocs);
-      
-      final FileChecksum hftpfoocs = hftp.getFileChecksum(foo);
-      System.out.println("hftpfoocs=" + hftpfoocs);
-
-      final Path qualified = new Path(hftpuri + dir, "foo" + n);
-      final FileChecksum qfoocs = hftp.getFileChecksum(qualified);
-      System.out.println("qfoocs=" + qfoocs);
-
-      //write another file
-      final Path bar = new Path(dir, "bar" + n);
-      {
-        final FSDataOutputStream out = hdfs.create(bar, false, buffer_size,
-            (short)2, block_size);
-        out.write(data);
-        out.close();
-      }
-  
-      { //verify checksum
-        final FileChecksum barcs = hdfs.getFileChecksum(bar);
-        final int barhashcode = barcs.hashCode();
-        assertEquals(hdfsfoocs.hashCode(), barhashcode);
-        assertEquals(hdfsfoocs, barcs);
-
-        assertEquals(hftpfoocs.hashCode(), barhashcode);
-        assertEquals(hftpfoocs, barcs);
-
-        assertEquals(qfoocs.hashCode(), barhashcode);
-        assertEquals(qfoocs, barcs);
-      }
-    }
-    cluster.shutdown();
-  }
-  
-  @Test
-  public void testAllWithDualPort() throws Exception {
-    dualPortTesting = true;
-
-    testFileSystemCloseAll();
-    testDFSClose();
-    testDFSClient();
-    testFileChecksum();
-  }
-  
-  @Test
   public void testStatistics() throws Exception {
     int lsLimit = 2;
     final Configuration conf = getTestConfiguration();
@@ -312,4 +232,100 @@ public class TestDistributedFileSystem {
     assertEquals(writeOps, DFSTestUtil.getStatistics(fs).getWriteOps());
     assertEquals(largeReadOps, DFSTestUtil.getStatistics(fs).getLargeReadOps());
   }
+
+  @Test
+  public void testFileChecksum() throws Exception {
+    ((Log4JLogger)HftpFileSystem.LOG).getLogger().setLevel(Level.ALL);
+
+    final long seed = RAN.nextLong();
+    System.out.println("seed=" + seed);
+    RAN.setSeed(seed);
+
+    final Configuration conf = getTestConfiguration();
+    conf.set("slave.host.name", "localhost");
+
+    final MiniDFSCluster cluster = new MiniDFSCluster(conf, 2, true, null);
+    final FileSystem hdfs = cluster.getFileSystem();
+    final String hftpuri = "hftp://" + conf.get("dfs.http.address");
+    System.out.println("hftpuri=" + hftpuri);
+    final FileSystem hftp = new Path(hftpuri).getFileSystem(conf);
+
+    final String dir = "/filechecksum";
+    final int block_size = 1024;
+    final int buffer_size = conf.getInt("io.file.buffer.size", 4096);
+    conf.setInt("io.bytes.per.checksum", 512);
+
+    //try different number of blocks
+    for(int n = 0; n < 5; n++) {
+      //generate random data
+      final byte[] data = new byte[RAN.nextInt(block_size/2-1)+n*block_size+1];
+      RAN.nextBytes(data);
+      System.out.println("data.length=" + data.length);
+  
+      //write data to a file
+      final Path foo = new Path(dir, "foo" + n);
+      {
+        final FSDataOutputStream out = hdfs.create(foo, false, buffer_size,
+            (short)2, block_size);
+        out.write(data);
+        out.close();
+      }
+      
+      //compute checksum
+      final FileChecksum hdfsfoocs = hdfs.getFileChecksum(foo);
+      System.out.println("hdfsfoocs=" + hdfsfoocs);
+      
+      final FileChecksum hftpfoocs = hftp.getFileChecksum(foo);
+      System.out.println("hftpfoocs=" + hftpfoocs);
+
+      final Path qualified = new Path(hftpuri + dir, "foo" + n);
+      final FileChecksum qfoocs = hftp.getFileChecksum(qualified);
+      System.out.println("qfoocs=" + qfoocs);
+
+      //write another file
+      final Path bar = new Path(dir, "bar" + n);
+      {
+        final FSDataOutputStream out = hdfs.create(bar, false, buffer_size,
+            (short)2, block_size);
+        out.write(data);
+        out.close();
+      }
+  
+      { //verify checksum
+        final FileChecksum barcs = hdfs.getFileChecksum(bar);
+        final int barhashcode = barcs.hashCode();
+        assertEquals(hdfsfoocs.hashCode(), barhashcode);
+        assertEquals(hdfsfoocs, barcs);
+
+        assertEquals(hftpfoocs.hashCode(), barhashcode);
+        assertEquals(hftpfoocs, barcs);
+
+        assertEquals(qfoocs.hashCode(), barhashcode);
+        assertEquals(qfoocs, barcs);
+      }
+
+      { //test permission error on hftp 
+        hdfs.setPermission(new Path(dir), new FsPermission((short)0));
+        try {
+          final String username = UserGroupInformation.getCurrentUser().getShortUserName() + "1";
+          final HftpFileSystem hftp2 = cluster.getHftpFileSystemAs(username, conf, "somegroup");
+          hftp2.getFileChecksum(qualified);
+          fail();
+        } catch(IOException ioe) {
+          FileSystem.LOG.info("GOOD: getting an exception", ioe);
+        }
+      }
+    }
+    cluster.shutdown();
+  }
+  
+  @Test
+  public void testAllWithDualPort() throws Exception {
+    dualPortTesting = true;
+
+    testFileSystemCloseAll();
+    testDFSClose();
+    testDFSClient();
+    testFileChecksum();
+  }
 }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestFileStatus.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestFileStatus.java?rev=1077678&r1=1077677&r2=1077678&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestFileStatus.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestFileStatus.java Fri Mar  4 04:43:19 2011
@@ -17,7 +17,9 @@
  */
 package org.apache.hadoop.hdfs;
 
-import junit.framework.TestCase;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -25,20 +27,23 @@ import java.util.Random;
 
 import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DFSClient.DFSDataInputStream;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.log4j.Level;
+import org.junit.Test;
 
 /**
  * This class tests the FileStatus API.
  */
-public class TestFileStatus extends TestCase {
+public class TestFileStatus {
   {
     ((Log4JLogger)FSNamesystem.LOG).getLogger().setLevel(Level.ALL);
     ((Log4JLogger)FileSystem.LOG).getLogger().setLevel(Level.ALL);
@@ -71,7 +76,8 @@ public class TestFileStatus extends Test
   /**
    * Tests various options of DFSShell.
    */
-  public void testFileStatus() throws IOException {
+  @Test
+  public void testFileStatus() throws Exception {
     Configuration conf = new Configuration();
     conf.setInt(DFSConfigKeys.DFS_LIST_LIMIT, 2);
     MiniDFSCluster cluster = new MiniDFSCluster(conf, 1, true, null);
@@ -132,7 +138,7 @@ public class TestFileStatus extends Test
 
       // test listStatus on a non-existent file/directory
       stats = fs.listStatus(dir);
-      assertEquals(null, stats);
+      assertTrue(null == stats);
       try {
         status = fs.getFileStatus(dir);
         fail("getFileStatus of non-existent path should fail");
@@ -226,6 +232,18 @@ public class TestFileStatus extends Test
       assertEquals(dir5.toString(), stats[2].getPath().toString());
       assertEquals(file2.toString(), stats[3].getPath().toString());
       assertEquals(file3.toString(), stats[4].getPath().toString());
+
+      { //test permission error on hftp 
+        fs.setPermission(dir, new FsPermission((short)0));
+        try {
+          final String username = UserGroupInformation.getCurrentUser().getShortUserName() + "1";
+          final HftpFileSystem hftp2 = cluster.getHftpFileSystemAs(username, conf, "somegroup");
+          hftp2.getContentSummary(dir);
+          fail();
+        } catch(IOException ioe) {
+          FileSystem.LOG.info("GOOD: getting an exception", ioe);
+        }
+      }
     } finally {
       fs.close();
       cluster.shutdown();

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestListPathServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestListPathServlet.java?rev=1077678&r1=1077677&r2=1077678&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestListPathServlet.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/hdfs/TestListPathServlet.java Fri Mar  4 04:43:19 2011
@@ -27,7 +27,9 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.server.namenode.ListPathsServlet;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
@@ -100,6 +102,29 @@ public class TestListPathServlet {
     // Non existent path
     checkStatus("/nonexistent");
     checkStatus("/nonexistent/a");
+
+    final String username = UserGroupInformation.getCurrentUser().getShortUserName() + "1";
+    final HftpFileSystem hftp2 = cluster.getHftpFileSystemAs(username, CONF, "somegroup");
+    { //test file not found on hftp 
+      final Path nonexistent = new Path("/nonexistent");
+      try {
+        hftp2.getFileStatus(nonexistent);
+        Assert.fail();
+      } catch(IOException ioe) {
+        FileSystem.LOG.info("GOOD: getting an exception", ioe);
+      }
+    }
+
+    { //test permission error on hftp
+      final Path dir = new Path("/dir");
+      fs.setPermission(dir, new FsPermission((short)0));
+      try {
+        hftp2.getFileStatus(new Path(dir, "a"));
+        Assert.fail();
+      } catch(IOException ioe) {
+        FileSystem.LOG.info("GOOD: getting an exception", ioe);
+      }
+    }
   }
 
   private void checkStatus(String listdir) throws IOException {