You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/03/04 05:43:44 UTC

svn commit: r1077680 - in /hadoop/common/branches/branch-0.20-security-patches: ./ src/core/org/apache/hadoop/io/ src/core/org/apache/hadoop/io/nativeio/ src/mapred/org/apache/hadoop/mapred/ src/native/ src/native/src/ src/native/src/org/apache/hadoop/...

Author: omalley
Date: Fri Mar  4 04:43:43 2011
New Revision: 1077680

URL: http://svn.apache.org/viewvc?rev=1077680&view=rev
Log:
commit 9eb13a357be8202418bbf2bb8b80f669c738f99c
Author: Devaraj Das <dd...@yahoo-inc.com>
Date:   Fri Sep 17 00:24:41 2010 -0700

    : Fixes task log servlet vulnerabilities via symlinks
    
    +++ b/YAHOO-CHANGES.txt
    +    : Fixes task log servlet vulnerabilities via symlinks.
    +    (Todd Lipcon and Devaraj Das)
    +
    +    , : Write task initialization to avoid race conditions
    +    leading to privilege escalation and resource leakage by performing more acti
    +    as the user. Owen O'Malley, Devaraj Das, Chris Douglas
    +

Added:
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/SecureIOUtils.java
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/Errno.java
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/NativeIO.java
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/NativeIOException.java
    hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/
    hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
    hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.c
    hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.h
    hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c
    hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h
    hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/TestSecureIOUtils.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/nativeio/
    hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/nativeio/TestNativeIO.java
Modified:
    hadoop/common/branches/branch-0.20-security-patches/build.xml
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/IndexCache.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/LinuxTaskController.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/MapTask.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/SpillRecord.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskController.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskLog.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskLogServlet.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskLogsTruncater.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTracker.java
    hadoop/common/branches/branch-0.20-security-patches/src/native/Makefile.am
    hadoop/common/branches/branch-0.20-security-patches/src/native/config.h.in
    hadoop/common/branches/branch-0.20-security-patches/src/native/configure.ac
    hadoop/common/branches/branch-0.20-security-patches/src/native/src/org_apache_hadoop.h
    hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestIndexCache.java

Modified: hadoop/common/branches/branch-0.20-security-patches/build.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/build.xml?rev=1077680&r1=1077679&r2=1077680&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/build.xml (original)
+++ hadoop/common/branches/branch-0.20-security-patches/build.xml Fri Mar  4 04:43:43 2011
@@ -548,6 +548,7 @@
   	
     <mkdir dir="${build.native}/lib"/>
     <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/>
+    <mkdir dir="${build.native}/src/org/apache/hadoop/io/nativeio"/>
     <mkdir dir="${build.native}/src/org/apache/hadoop/security"/>
 
   	<javah
@@ -560,6 +561,14 @@
       <class name="org.apache.hadoop.io.compress.zlib.ZlibDecompressor" />
   	</javah>
 
+        <javah
+          classpath="${build.classes}"
+          destdir="${build.native}/src/org/apache/hadoop/io/nativeio"
+       force="yes"
+          verbose="yes"
+          >
+          <class name="org.apache.hadoop.io.nativeio.NativeIO" />
+        </javah>
   	<javah
   	  classpath="${build.classes}"
   	  destdir="${build.native}/src/org/apache/hadoop/security"

Added: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/SecureIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/SecureIOUtils.java?rev=1077680&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/SecureIOUtils.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/SecureIOUtils.java Fri Mar  4 04:43:43 2011
@@ -0,0 +1,206 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io;
+
+import java.io.File;
+import java.io.FileDescriptor;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.io.nativeio.Errno;
+import org.apache.hadoop.io.nativeio.NativeIO;
+import org.apache.hadoop.io.nativeio.NativeIOException;
+import org.apache.hadoop.io.nativeio.NativeIO.Stat;
+import org.apache.hadoop.security.UserGroupInformation;
+
+/**
+ * This class provides secure APIs for opening and creating files on the local
+ * disk. The main issue this class tries to handle is that of symlink traversal.
+ * <br/>
+ * An example of such an attack is:
+ * <ol>
+ * <li> Malicious user removes his task's syslog file, and puts a link to the
+ * jobToken file of a target user.</li>
+ * <li> Malicious user tries to open the syslog file via the servlet on the
+ * tasktracker.</li>
+ * <li> The tasktracker is unaware of the symlink, and simply streams the contents
+ * of the jobToken file. The malicious user can now access potentially sensitive
+ * map outputs, etc. of the target user's job.</li>
+ * </ol>
+ * A similar attack is possible involving the
+ * {@link org.apache.hadoop.mapred.TaskLogsTruncater}, but here it is to do
+ * with unsecure write to a file.
+ * <br/>
+ */
+public class SecureIOUtils {
+
+  /**
+   * Ensure that we are set up to run with the appropriate native support code.
+   * If security is disabled, and the support code is unavailable, this class
+   * still tries its best to be secure, but is vulnerable to some race condition
+   * attacks.
+   *
+   * If security is enabled but the support code is unavailable, throws a
+   * RuntimeException since we don't want to run insecurely.
+   */
+  static {
+    boolean shouldBeSecure = UserGroupInformation.isSecurityEnabled();
+    boolean canBeSecure = NativeIO.isAvailable();
+
+    if (!canBeSecure && shouldBeSecure) {
+      throw new RuntimeException(
+        "Secure IO is not possible without native code extensions.");
+    }
+
+    // Pre-cache an instance of the raw FileSystem since we sometimes
+    // do secure IO in a shutdown hook, where this call could fail.
+    try {
+      rawFilesystem = FileSystem.getLocal(new Configuration()).getRaw();
+    } catch (IOException ie) {
+      throw new RuntimeException(
+      "Couldn't obtain an instance of RawLocalFileSystem.");
+    }
+
+    // SecureIO just skips security checks in the case that security is
+    // disabled
+    skipSecurity = !canBeSecure;
+  }
+
+  private final static boolean skipSecurity;
+  private final static FileSystem rawFilesystem;
+
+  /**
+   * Open the given File for read access, verifying the expected user/group
+   * constraints.
+   * @param f the file that we are trying to open
+   * @param expectedOwner the expected user owner for the file
+   * @param expectedGroup the expected group owner for the file
+   * @throws IOException if an IO Error occurred, or the user/group does not 
+   * match
+   */
+  public static FileInputStream openForRead(File f, String expectedOwner, 
+      String expectedGroup) throws IOException {
+    if (skipSecurity) {
+      // Subject to race conditions but this is the best we can do
+      FileStatus status =
+        rawFilesystem.getFileStatus(new Path(f.getAbsolutePath()));
+      checkStat(f, status.getOwner(), status.getGroup(),
+          expectedOwner, expectedGroup);
+      return new FileInputStream(f);
+    }
+
+    FileInputStream fis = new FileInputStream(f);
+    boolean success = false;
+    try {
+      Stat stat = NativeIO.fstat(fis.getFD());
+      checkStat(f, stat.getOwner(), stat.getGroup(), expectedOwner,
+          expectedGroup);
+      success = true;
+      return fis;
+    } finally {
+      if (!success) fis.close();
+    }
+  }
+
+  private static FileOutputStream insecureCreateForWrite(File f,
+      int permissions) throws IOException {
+    // If we can't do real security, do a racy exists check followed by an
+    // open and chmod
+    if (f.exists()) {
+      throw new AlreadyExistsException("File " + f + " already exists");
+    }
+    FileOutputStream fos = new FileOutputStream(f);
+    boolean success = false;
+    try {
+      rawFilesystem.setPermission(new Path(f.getAbsolutePath()),
+        new FsPermission((short)permissions));
+      success = true;
+      return fos;
+    } finally {
+      if (!success) {
+        fos.close();
+      }
+    }
+  }
+
+  /**
+   * Open the specified File for write access, ensuring that it does not exist.
+   * @param f the file that we want to create
+   * @param permissions we want to have on the file (if security is enabled)
+   *
+   * @throws AlreadyExistsException if the file already exists
+   * @throws IOException if any other error occurred
+   */
+  public static FileOutputStream createForWrite(File f, int permissions)
+  throws IOException {
+    if (skipSecurity) {
+      return insecureCreateForWrite(f, permissions);
+    } else {
+      // Use the native wrapper around open(2)
+      try {
+        FileDescriptor fd = NativeIO.open(f.getAbsolutePath(),
+          NativeIO.O_WRONLY | NativeIO.O_CREAT | NativeIO.O_EXCL,
+          permissions);
+        return new FileOutputStream(fd);
+      } catch (NativeIOException nioe) {
+        if (nioe.getErrno() == Errno.EEXIST) {
+          throw new AlreadyExistsException(nioe);
+        }
+        throw nioe;
+      }
+    }
+  }
+
+  private static void checkStat(File f, String owner, String group, 
+      String expectedOwner, 
+      String expectedGroup) throws IOException {
+    if (expectedOwner != null &&
+        !expectedOwner.equals(owner)) {
+      throw new IOException(
+        "Owner '" + owner + "' for path " + f + " did not match " +
+        "expected owner '" + expectedOwner + "'");
+    }
+    if (expectedGroup != null &&
+        !expectedGroup.equals(group)) {
+      throw new IOException(
+        "Group '" + group + "' for path " + f + " did not match " +
+        "expected group '" + expectedGroup + "'");
+    }
+  }
+
+  /**
+   * Signals that an attempt to create a file at a given pathname has failed
+   * because another file already existed at that path.
+   */
+  public static class AlreadyExistsException extends IOException {
+    private static final long serialVersionUID = -6615764817774423232L;
+    public AlreadyExistsException(String msg) {
+      super(msg);
+    }
+
+    public AlreadyExistsException(Throwable cause) {
+      super(cause);
+    }
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/Errno.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/Errno.java?rev=1077680&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/Errno.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/Errno.java Fri Mar  4 04:43:43 2011
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.nativeio;
+
+/**
+ * Enum representing POSIX errno values.
+ */
+public enum Errno {
+  EPERM,
+  ENOENT,
+  ESRCH,
+  EINTR,
+  EIO,
+  ENXIO,
+  E2BIG,
+  ENOEXEC,
+  EBADF,
+  ECHILD,
+  EAGAIN,
+  ENOMEM,
+  EACCES,
+  EFAULT,
+  ENOTBLK,
+  EBUSY,
+  EEXIST,
+  EXDEV,
+  ENODEV,
+  ENOTDIR,
+  EISDIR,
+  EINVAL,
+  ENFILE,
+  EMFILE,
+  ENOTTY,
+  ETXTBSY,
+  EFBIG,
+  ENOSPC,
+  ESPIPE,
+  EROFS,
+  EMLINK,
+  EPIPE,
+  EDOM,
+  ERANGE,
+
+  UNKNOWN;
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/NativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/NativeIO.java?rev=1077680&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/NativeIO.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/NativeIO.java Fri Mar  4 04:43:43 2011
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.nativeio;
+
+import java.io.FileDescriptor;
+import java.io.IOException;
+
+import org.apache.hadoop.util.NativeCodeLoader;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+/**
+ * JNI wrappers for various native IO-related calls not available in Java.
+ * These functions should generally be used alongside a fallback to another
+ * more portable mechanism.
+ */
+public class NativeIO {
+  // Flags for open() call from bits/fcntl.h
+  public static final int O_RDONLY   =    00;
+  public static final int O_WRONLY   =    01;
+  public static final int O_RDWR     =    02;
+  public static final int O_CREAT    =  0100;
+  public static final int O_EXCL     =  0200;
+  public static final int O_NOCTTY   =  0400;
+  public static final int O_TRUNC    = 01000;
+  public static final int O_APPEND   = 02000;
+  public static final int O_NONBLOCK = 04000;
+  public static final int O_SYNC   =  010000;
+  public static final int O_ASYNC  =  020000;
+  public static final int O_FSYNC = O_SYNC;
+  public static final int O_NDELAY = O_NONBLOCK;
+
+  private static final Log LOG = LogFactory.getLog(NativeIO.class);
+
+  private static boolean nativeLoaded = false;
+
+  static {
+    if (NativeCodeLoader.isNativeCodeLoaded()) {
+      try {
+        initNative();
+        nativeLoaded = true;
+      } catch (Throwable t) {
+        // This can happen if the user has an older version of libhadoop.so
+        // installed - in this case we can continue without native IO
+        // after warning
+        LOG.error("Unable to initialize NativeIO libraries", t);
+      }
+    }
+  }
+
+  /**
+   * Return true if the JNI-based native IO extensions are available.
+   */
+  public static boolean isAvailable() {
+    return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
+  }
+
+  /** Wrapper around open(2) */
+  public static native FileDescriptor open(String path, int flags, int mode) throws IOException;
+  /** Wrapper around fstat(2) */
+  public static native Stat fstat(FileDescriptor fd) throws IOException;
+  /** Initialize the JNI method ID and class ID cache */
+  private static native void initNative();
+
+
+  /**
+   * Result type of the fstat call
+   */
+  public static class Stat {
+    private String owner, group;
+    private int mode;
+
+    // Mode constants
+    public static final int S_IFMT = 0170000;      /* type of file */
+    public static final int   S_IFIFO  = 0010000;  /* named pipe (fifo) */
+    public static final int   S_IFCHR  = 0020000;  /* character special */
+    public static final int   S_IFDIR  = 0040000;  /* directory */
+    public static final int   S_IFBLK  = 0060000;  /* block special */
+    public static final int   S_IFREG  = 0100000;  /* regular */
+    public static final int   S_IFLNK  = 0120000;  /* symbolic link */
+    public static final int   S_IFSOCK = 0140000;  /* socket */
+    public static final int   S_IFWHT  = 0160000;  /* whiteout */
+    public static final int S_ISUID = 0004000;  /* set user id on execution */
+    public static final int S_ISGID = 0002000;  /* set group id on execution */
+    public static final int S_ISVTX = 0001000;  /* save swapped text even after use */
+    public static final int S_IRUSR = 0000400;  /* read permission, owner */
+    public static final int S_IWUSR = 0000200;  /* write permission, owner */
+    public static final int S_IXUSR = 0000100;  /* execute/search permission, owner */
+
+    Stat(String owner, String group, int mode) {
+      this.owner = owner;
+      this.group = group;
+      this.mode = mode;
+    }
+
+    public String toString() {
+      return "Stat(owner='" + owner + "', group='" + group + "'" +
+        ", mode=" + mode + ")";
+    }
+
+    public String getOwner() {
+      return owner;
+    }
+    public String getGroup() {
+      return group;
+    }
+    public int getMode() {
+      return mode;
+    }
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/NativeIOException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/NativeIOException.java?rev=1077680&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/NativeIOException.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/io/nativeio/NativeIOException.java Fri Mar  4 04:43:43 2011
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.nativeio;
+
+import java.io.IOException;
+
+/**
+ * An exception generated by a call to the native IO code.
+ *
+ * These exceptions simply wrap <i>errno</i> result codes.
+ */
+public class NativeIOException extends IOException {
+  private static final long serialVersionUID = -6615764817732323232L;
+  private Errno errno;
+
+  public NativeIOException(String msg, Errno errno) {
+    super(msg);
+    this.errno = errno;
+  }
+
+  public Errno getErrno() {
+    return errno;
+  }
+
+  public String toString() {
+    return errno.toString() + ": " + super.getMessage();
+  }
+}
+
+

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/IndexCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/IndexCache.java?rev=1077680&r1=1077679&r2=1077680&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/IndexCache.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/IndexCache.java Fri Mar  4 04:43:43 2011
@@ -53,16 +53,17 @@ class IndexCache {
    * @param reduce
    * @param fileName The file to read the index information from if it is not
    *                 already present in the cache
+   * @param expectedIndexOwner The expected owner of the index file
    * @return The Index Information
    * @throws IOException
    */
   public IndexRecord getIndexInformation(String mapId, int reduce,
-      Path fileName) throws IOException {
+      Path fileName, String expectedIndexOwner) throws IOException {
 
     IndexInformation info = cache.get(mapId);
 
     if (info == null) {
-      info = readIndexFileToCache(fileName, mapId);
+      info = readIndexFileToCache(fileName, mapId, expectedIndexOwner);
     } else {
       synchronized (info) {
         while (null == info.mapSpillRecord) {
@@ -86,7 +87,7 @@ class IndexCache {
   }
 
   private IndexInformation readIndexFileToCache(Path indexFileName,
-      String mapId) throws IOException {
+      String mapId, String expectedIndexOwner) throws IOException {
     IndexInformation info;
     IndexInformation newInd = new IndexInformation();
     if ((info = cache.putIfAbsent(mapId, newInd)) != null) {
@@ -105,7 +106,7 @@ class IndexCache {
     LOG.debug("IndexCache MISS: MapId " + mapId + " not found") ;
     SpillRecord tmp = null;
     try { 
-      tmp = new SpillRecord(indexFileName, conf);
+      tmp = new SpillRecord(indexFileName, conf, expectedIndexOwner);
     } catch (Throwable e) { 
       tmp = new SpillRecord(0);
       cache.remove(mapId);

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/LinuxTaskController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/LinuxTaskController.java?rev=1077680&r1=1077679&r2=1077680&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/LinuxTaskController.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/LinuxTaskController.java Fri Mar  4 04:43:43 2011
@@ -290,5 +290,10 @@ class LinuxTaskController extends TaskCo
       }
     }
   }
+
+  @Override
+  public String getRunAsUser(JobConf conf) {
+    return conf.getUser();
+  }
 }
 

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/MapTask.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/MapTask.java?rev=1077680&r1=1077679&r2=1077680&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/MapTask.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/MapTask.java Fri Mar  4 04:43:43 2011
@@ -65,6 +65,7 @@ import org.apache.hadoop.mapreduce.split
 import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitIndex;
 import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.IndexedSortable;
 import org.apache.hadoop.util.IndexedSorter;
 import org.apache.hadoop.util.Progress;
@@ -1506,7 +1507,8 @@ class MapTask extends Task {
       // read in paged indices
       for (int i = indexCacheList.size(); i < numSpills; ++i) {
         Path indexFileName = mapOutputFile.getSpillIndexFile(i);
-        indexCacheList.add(new SpillRecord(indexFileName, job));
+        indexCacheList.add(new SpillRecord(indexFileName, job, 
+            UserGroupInformation.getCurrentUser().getShortUserName()));
       }
 
       //make correction in the length to include the sequence file header

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/SpillRecord.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/SpillRecord.java?rev=1077680&r1=1077679&r2=1077680&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/SpillRecord.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/SpillRecord.java Fri Mar  4 04:43:43 2011
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.mapred;
 
+import java.io.DataInputStream;
+import java.io.File;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.nio.LongBuffer;
@@ -26,11 +28,11 @@ import java.util.zip.CheckedOutputStream
 import java.util.zip.Checksum;
 
 import org.apache.hadoop.fs.ChecksumException;
-import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.SecureIOUtils;
 
 import static org.apache.hadoop.mapred.MapTask.MAP_OUTPUT_INDEX_RECORD_LENGTH;
 
@@ -47,15 +49,18 @@ class SpillRecord {
     entries = buf.asLongBuffer();
   }
 
-  public SpillRecord(Path indexFileName, JobConf job) throws IOException {
-    this(indexFileName, job, new CRC32());
+  public SpillRecord(Path indexFileName, JobConf job, String expectedIndexOwner)
+  throws IOException {
+    this(indexFileName, job, new CRC32(), expectedIndexOwner);
   }
 
-  public SpillRecord(Path indexFileName, JobConf job, Checksum crc)
-      throws IOException {
+  public SpillRecord(Path indexFileName, JobConf job, Checksum crc, 
+      String expectedIndexOwner) throws IOException {
 
     final FileSystem rfs = FileSystem.getLocal(job).getRaw();
-    final FSDataInputStream in = rfs.open(indexFileName);
+    final DataInputStream in =
+      new DataInputStream(SecureIOUtils.openForRead(
+         new File(indexFileName.toUri().getPath()), expectedIndexOwner, null));
     try {
       final long length = rfs.getFileStatus(indexFileName).getLen();
       final int partitions = (int) length / MAP_OUTPUT_INDEX_RECORD_LENGTH;

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskController.java?rev=1077680&r1=1077679&r2=1077680&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskController.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskController.java Fri Mar  4 04:43:43 2011
@@ -169,6 +169,13 @@ public abstract class TaskController imp
       }
     }
   }
+  
+   /**
+    * Returns the local unix user that a given job will run as.
+    */
+   public String getRunAsUser(JobConf conf) {
+     return System.getProperty("user.name");
+   }
 
   //Write the JVM command line to a file under the specified directory
   // Note that the JVM will be launched using a setuid executable, and

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskLog.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskLog.java?rev=1077680&r1=1077679&r2=1077680&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskLog.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskLog.java Fri Mar  4 04:43:43 2011
@@ -26,6 +26,7 @@ import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.InputStreamReader;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Enumeration;
@@ -39,8 +40,10 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.LocalFileSystem;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.SecureIOUtils;
 import org.apache.hadoop.mapreduce.JobID;
 import org.apache.hadoop.util.ProcessTree;
 import org.apache.hadoop.util.Shell;
@@ -106,7 +109,8 @@ public class TaskLog {
         new HashMap<LogName, LogFileDetail>();
 
     File indexFile = getIndexFile(taskid, isCleanup);
-    BufferedReader fis = new BufferedReader(new java.io.FileReader(indexFile));
+    BufferedReader fis = new BufferedReader(new InputStreamReader(
+      SecureIOUtils.openForRead(indexFile, obtainLogDirOwner(taskid), null)));
     //the format of the index file is
     //LOG_DIR: <the dir where the task logs are really stored>
     //stdout:<start-offset in the stdout file> <length>
@@ -153,6 +157,18 @@ public class TaskLog {
     return new File(getAttemptDir(taskid, isCleanup), "log.index");
   }
 
+  /**
+   * Obtain the owner of the log dir. This is 
+   * determined by checking the job's log directory.
+   */
+  static String obtainLogDirOwner(TaskAttemptID taskid) throws IOException {
+    Configuration conf = new Configuration();
+    FileSystem raw = FileSystem.getLocal(conf).getRaw();
+    Path jobLogDir = new Path(getJobDir(taskid.getJobID()).getAbsolutePath());
+    FileStatus jobStat = raw.getFileStatus(jobLogDir);
+    return jobStat.getOwner();
+  }
+
   static String getBaseLogDir() {
     return System.getProperty("hadoop.log.dir");
   }
@@ -194,7 +210,8 @@ public class TaskLog {
     File tmpIndexFile = getTmpIndexFile(currentTaskid, isCleanup);
     
     BufferedOutputStream bos = 
-      new BufferedOutputStream(new FileOutputStream(tmpIndexFile,false));
+      new BufferedOutputStream(
+        SecureIOUtils.createForWrite(tmpIndexFile, 0644));
     DataOutputStream dos = new DataOutputStream(bos);
     //the format of the index file is
     //LOG_DIR: <the dir where the task logs are really stored>
@@ -330,7 +347,9 @@ public class TaskLog {
       start += fileDetail.start;
       end += fileDetail.start;
       bytesRemaining = end - start;
-      file = new FileInputStream(new File(fileDetail.location, kind.toString()));
+      String owner = obtainLogDirOwner(taskid);
+      file = SecureIOUtils.openForRead(new File(fileDetail.location, kind.toString()), 
+          owner, null);
       // skip upto start
       long pos = 0;
       while (pos < start) {

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskLogServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskLogServlet.java?rev=1077680&r1=1077679&r2=1077680&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskLogServlet.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskLogServlet.java Fri Mar  4 04:43:43 2011
@@ -28,6 +28,8 @@ import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.QueueManager.QueueACL;
 import org.apache.hadoop.mapreduce.JobACL;
@@ -43,6 +45,9 @@ import org.apache.hadoop.util.StringUtil
 public class TaskLogServlet extends HttpServlet {
   private static final long serialVersionUID = -6615764817774487321L;
   
+  private static final Log LOG =
+    LogFactory.getLog(TaskLog.class);
+  
   private boolean haveTaskLog(TaskAttemptID taskId, boolean isCleanup,
       TaskLog.LogName type) {
     File f = TaskLog.getTaskLogFile(taskId, isCleanup, type);
@@ -101,11 +106,10 @@ public class TaskLogServlet extends Http
         // do nothing
       }
       else {
-        response.sendError(HttpServletResponse.SC_GONE,
-                         "Failed to retrieve " + filter + " log for task: " + 
-                         taskId);
-        out.write(("TaskLogServlet exception:\n" + 
-                 StringUtils.stringifyException(ioe) + "\n").getBytes());
+        String msg = "Failed to retrieve " + filter + " log for task: " + 
+                     taskId;
+        LOG.warn(msg, ioe);
+        response.sendError(HttpServletResponse.SC_GONE, msg);
       }
     }
   }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskLogsTruncater.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskLogsTruncater.java?rev=1077680&r1=1077679&r2=1077680&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskLogsTruncater.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskLogsTruncater.java Fri Mar  4 04:43:43 2011
@@ -20,7 +20,6 @@ package org.apache.hadoop.mapred;
 
 import java.io.File;
 import java.io.FileInputStream;
-import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.util.HashMap;
@@ -32,6 +31,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.SecureIOUtils;
 import org.apache.hadoop.mapred.TaskLog;
 import org.apache.hadoop.mapred.TaskLog.LogName;
 import org.apache.hadoop.mapred.TaskLog.LogFileDetail;
@@ -74,6 +74,15 @@ public class TaskLogsTruncater {
    * retainSize.
    */
   public void truncateLogs(JVMInfo lInfo) {
+    Task firstAttempt = lInfo.getAllAttempts().get(0);
+    String owner;
+    try {
+      owner = TaskLog.obtainLogDirOwner(firstAttempt.getTaskID());
+    } catch (IOException ioe) {
+      LOG.error("Unable to create a secure IO context to truncate logs for " +
+        firstAttempt, ioe);
+      return;
+    }
 
     // Read the log-file details for all the attempts that ran in this JVM
     Map<Task, Map<LogName, LogFileDetail>> taskLogFileDetails;
@@ -120,7 +129,7 @@ public class TaskLogsTruncater {
       // ////// Open truncate.tmp file for writing //////
       File tmpFile = new File(attemptLogDir, "truncate.tmp");
       try {
-        tmpFileOutputStream = new FileOutputStream(tmpFile);
+        tmpFileOutputStream = SecureIOUtils.createForWrite(tmpFile, 0644);
       } catch (IOException ioe) {
         LOG.warn("Cannot open " + tmpFile.getAbsolutePath()
             + " for writing truncated log-file "
@@ -132,11 +141,11 @@ public class TaskLogsTruncater {
 
       // ////// Open logFile for reading //////
       try {
-        logFileInputStream = new FileInputStream(logFile);
-      } catch (FileNotFoundException fe) {
+        logFileInputStream = SecureIOUtils.openForRead(logFile, owner, null);
+      } catch (IOException ioe) {
         if (LOG.isDebugEnabled()) {
           LOG.debug("Cannot open " + logFile.getAbsolutePath()
-              + " for reading. Continuing with other log files");
+              + " for reading. Continuing with other log files", ioe);
         }
         try {
           tmpFileOutputStream.close();
@@ -275,8 +284,8 @@ public class TaskLogsTruncater {
 
   /**
    * Get the logFileDetails of all the list of attempts passed.
+   * @param allAttempts the attempts we are interested in
    * 
-   * @param lInfo
    * @return a map of task to the log-file detail
    * @throws IOException
    */
@@ -287,8 +296,7 @@ public class TaskLogsTruncater {
     for (Task task : allAttempts) {
       Map<LogName, LogFileDetail> allLogsFileDetails;
       allLogsFileDetails =
-          TaskLog.getAllLogsFileDetails(task.getTaskID(),
-              task.isTaskCleanupTask());
+          TaskLog.getAllLogsFileDetails(task.getTaskID(), task.isTaskCleanupTask());
       taskLogFileDetails.put(task, allLogsFileDetails);
     }
     return taskLogFileDetails;

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTracker.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTracker.java?rev=1077680&r1=1077679&r2=1077680&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTracker.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/TaskTracker.java Fri Mar  4 04:43:43 2011
@@ -18,6 +18,7 @@
 package org.apache.hadoop.mapred;
 
 import java.io.File;
+import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.IOException;
@@ -70,6 +71,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.http.HttpServer;
 import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.SecureIOUtils;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.ipc.Server;
@@ -2897,6 +2899,15 @@ public class TaskTracker implements MRCo
       return task.getTaskID().hashCode();
     }
   }
+  
+  private void authorizeJVM(JobID jobId) throws IOException {
+    String currentJobId = 
+      UserGroupInformation.getCurrentUser().getUserName();
+    if (!currentJobId.equals(jobId.toString())) {
+      throw new IOException ("JVM with " + currentJobId + 
+          " is not authorized for " + jobId);
+    }
+  }
 
     
   // ///////////////////////////////////////////////////////////////
@@ -2908,6 +2919,7 @@ public class TaskTracker implements MRCo
    */
   public synchronized JvmTask getTask(JvmContext context) 
   throws IOException {
+    authorizeJVM(context.jvmId.getJobId());
     JVMId jvmId = context.jvmId;
     LOG.debug("JVM with ID : " + jvmId + " asked for a task");
     // save pid of task JVM sent by child
@@ -2948,6 +2960,7 @@ public class TaskTracker implements MRCo
   public synchronized boolean statusUpdate(TaskAttemptID taskid, 
                                               TaskStatus taskStatus) 
   throws IOException {
+    authorizeJVM(taskid.getJobID());
     TaskInProgress tip = tasks.get(taskid);
     if (tip != null) {
       tip.reportProgress(taskStatus);
@@ -2963,6 +2976,7 @@ public class TaskTracker implements MRCo
    * diagnostic info
    */
   public synchronized void reportDiagnosticInfo(TaskAttemptID taskid, String info) throws IOException {
+    authorizeJVM(taskid.getJobID());
     TaskInProgress tip = tasks.get(taskid);
     if (tip != null) {
       tip.reportDiagnosticInfo(info);
@@ -2973,6 +2987,7 @@ public class TaskTracker implements MRCo
   
   public synchronized void reportNextRecordRange(TaskAttemptID taskid, 
       SortedRanges.Range range) throws IOException {
+    authorizeJVM(taskid.getJobID());
     TaskInProgress tip = tasks.get(taskid);
     if (tip != null) {
       tip.reportNextRecordRange(range);
@@ -2984,6 +2999,7 @@ public class TaskTracker implements MRCo
 
   /** Child checking to see if we're alive.  Normally does nothing.*/
   public synchronized boolean ping(TaskAttemptID taskid) throws IOException {
+    authorizeJVM(taskid.getJobID());
     return tasks.get(taskid) != null;
   }
 
@@ -2994,6 +3010,7 @@ public class TaskTracker implements MRCo
   public synchronized void commitPending(TaskAttemptID taskid,
                                          TaskStatus taskStatus) 
   throws IOException {
+    authorizeJVM(taskid.getJobID());
     LOG.info("Task " + taskid + " is in commit-pending," +"" +
              " task state:" +taskStatus.getRunState());
     statusUpdate(taskid, taskStatus);
@@ -3003,7 +3020,9 @@ public class TaskTracker implements MRCo
   /**
    * Child checking whether it can commit 
    */
-  public synchronized boolean canCommit(TaskAttemptID taskid) {
+  public synchronized boolean canCommit(TaskAttemptID taskid) 
+  throws IOException {
+    authorizeJVM(taskid.getJobID());
     return commitResponses.contains(taskid); //don't remove it now
   }
   
@@ -3012,6 +3031,7 @@ public class TaskTracker implements MRCo
    */
   public synchronized void done(TaskAttemptID taskid) 
   throws IOException {
+    authorizeJVM(taskid.getJobID());
     TaskInProgress tip = tasks.get(taskid);
     commitResponses.remove(taskid);
     if (tip != null) {
@@ -3027,6 +3047,7 @@ public class TaskTracker implements MRCo
    */  
   public synchronized void shuffleError(TaskAttemptID taskId, String message) 
   throws IOException { 
+    authorizeJVM(taskId.getJobID());
     LOG.fatal("Task: " + taskId + " - Killed due to Shuffle Failure: " + message);
     TaskInProgress tip = runningTasks.get(taskId);
     tip.reportDiagnosticInfo("Shuffle Error: " + message);
@@ -3038,6 +3059,7 @@ public class TaskTracker implements MRCo
    */  
   public synchronized void fsError(TaskAttemptID taskId, String message) 
   throws IOException {
+    authorizeJVM(taskId.getJobID());
     LOG.fatal("Task: " + taskId + " - Killed due to FSError: " + message);
     TaskInProgress tip = runningTasks.get(taskId);
     tip.reportDiagnosticInfo("FSError: " + message);
@@ -3049,6 +3071,7 @@ public class TaskTracker implements MRCo
    */  
   public synchronized void fatalError(TaskAttemptID taskId, String msg) 
   throws IOException {
+    authorizeJVM(taskId.getJobID());
     LOG.fatal("Task: " + taskId + " - Killed : " + msg);
     TaskInProgress tip = runningTasks.get(taskId);
     tip.reportDiagnosticInfo("Error: " + msg);
@@ -3058,6 +3081,7 @@ public class TaskTracker implements MRCo
   public synchronized MapTaskCompletionEventsUpdate getMapCompletionEvents(
       JobID jobId, int fromEventId, int maxLocs, TaskAttemptID id) 
   throws IOException {
+    authorizeJVM(jobId);
     TaskCompletionEvent[]mapEvents = TaskCompletionEvent.EMPTY_ARRAY;
     synchronized (shouldReset) {
       if (shouldReset.remove(id)) {
@@ -3316,7 +3340,7 @@ public class TaskTracker implements MRCo
       // true iff IOException was caused by attempt to access input
       boolean isInputException = true;
       OutputStream outStream = null;
-      FSDataInputStream mapOutputIn = null;
+      FileInputStream mapOutputIn = null;
  
       long totalRead = 0;
       ShuffleServerInstrumentation shuffleMetrics =
@@ -3339,12 +3363,14 @@ public class TaskTracker implements MRCo
             context.getAttribute("local.file.system")).getRaw();
 
       String userName = null;
+      String runAsUserName = null;
       synchronized (tracker.runningJobs) {
         RunningJob rjob = tracker.runningJobs.get(JobID.forName(jobId));
         if (rjob == null) {
           throw new IOException("Unknown job " + jobId + "!!");
         }
         userName = rjob.jobConf.getUser();
+        runAsUserName = tracker.getTaskController().getRunAsUser(rjob.jobConf);
       }
       // Index file
       Path indexFileName =
@@ -3363,7 +3389,8 @@ public class TaskTracker implements MRCo
          * the map-output for the given reducer is available. 
          */
         IndexRecord info = 
-          tracker.indexCache.getIndexInformation(mapId, reduce,indexFileName);
+          tracker.indexCache.getIndexInformation(mapId, reduce,indexFileName, 
+              runAsUserName);
           
         //set the custom "from-map-task" http header to the map task from which
         //the map output data is being transferred
@@ -3391,10 +3418,11 @@ public class TaskTracker implements MRCo
          * send it to the reducer.
          */
         //open the map-output file
-        mapOutputIn = rfs.open(mapOutputFileName);
+        mapOutputIn = SecureIOUtils.openForRead(
+            new File(mapOutputFileName.toUri().getPath()), runAsUserName, null);
 
         //seek to the correct offset for the reduce
-        mapOutputIn.seek(info.startOffset);
+        mapOutputIn.skip(info.startOffset);
         long rem = info.partLength;
         int len =
           mapOutputIn.read(buffer, 0, (int)Math.min(rem, MAX_BYTES_TO_READ));

Modified: hadoop/common/branches/branch-0.20-security-patches/src/native/Makefile.am
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/native/Makefile.am?rev=1077680&r1=1077679&r2=1077680&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/native/Makefile.am (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/native/Makefile.am Fri Mar  4 04:43:43 2011
@@ -33,6 +33,7 @@ export PLATFORM = $(shell echo $$OS_NAME
 
 AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \
               -Isrc/org/apache/hadoop/io/compress/zlib \
+              -Isrc/org/apache/hadoop/io/nativeio \
               -Isrc/org/apache/hadoop/security
 AM_LDFLAGS = @JNI_LDFLAGS@ -m$(JVM_DATA_MODEL)
 AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL)
@@ -42,7 +43,10 @@ libhadoop_la_SOURCES = src/org/apache/ha
                        src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \
                        src/org/apache/hadoop/security/getGroup.c \
                        src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c \
-                       src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c
+                       src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c \
+                       src/org/apache/hadoop/io/nativeio/file_descriptor.c \
+                       src/org/apache/hadoop/io/nativeio/errno_enum.c \
+                       src/org/apache/hadoop/io/nativeio/NativeIO.c
 libhadoop_la_LDFLAGS = -version-info 1:0:0
 libhadoop_la_LIBADD = -ldl -ljvm
 

Modified: hadoop/common/branches/branch-0.20-security-patches/src/native/config.h.in
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/native/config.h.in?rev=1077680&r1=1077679&r2=1077680&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/native/config.h.in (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/native/config.h.in Fri Mar  4 04:43:43 2011
@@ -3,6 +3,10 @@
 /* The 'actual' dynamic-library for '-lz' */
 #undef HADOOP_ZLIB_LIBRARY
 
+/* Define to 1 if you have the declaration of `strerror_r', and to 0 if you
+   don't. */
+#undef HAVE_DECL_STRERROR_R
+
 /* Define to 1 if you have the <dlfcn.h> header file. */
 #undef HAVE_DLFCN_H
 
@@ -39,6 +43,9 @@
 /* Define to 1 if you have the <stdlib.h> header file. */
 #undef HAVE_STDLIB_H
 
+/* Define to 1 if you have the `strerror_r' function. */
+#undef HAVE_STRERROR_R
+
 /* Define to 1 if you have the <strings.h> header file. */
 #undef HAVE_STRINGS_H
 
@@ -81,6 +88,9 @@
 /* Define to 1 if you have the ANSI C header files. */
 #undef STDC_HEADERS
 
+/* Define to 1 if strerror_r returns char *. */
+#undef STRERROR_R_CHAR_P
+
 /* Version number of package */
 #undef VERSION
 

Modified: hadoop/common/branches/branch-0.20-security-patches/src/native/configure.ac
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/native/configure.ac?rev=1077680&r1=1077679&r2=1077680&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/native/configure.ac (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/native/configure.ac Fri Mar  4 04:43:43 2011
@@ -95,6 +95,9 @@ AC_C_CONST
 # Checks for library functions.
 AC_CHECK_FUNCS([memset])
 
+# Check for nonstandard STRERROR_R
+AC_FUNC_STRERROR_R
+
 AC_CONFIG_FILES([Makefile])
 AC_OUTPUT
 

Added: hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1077680&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c Fri Mar  4 04:43:43 2011
@@ -0,0 +1,273 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <assert.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <grp.h>
+#include <jni.h>
+#include <pwd.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "org_apache_hadoop.h"
+#include "org_apache_hadoop_io_nativeio_NativeIO.h"
+#include "file_descriptor.h"
+#include "errno_enum.h"
+
+// the NativeIO$Stat inner class and its constructor
+static jclass stat_clazz;
+static jmethodID stat_ctor;
+
+// the NativeIOException class and its constructor
+static jclass nioe_clazz;
+static jmethodID nioe_ctor;
+
+// Internal functions
+static void throw_ioe(JNIEnv* env, int errnum);
+static ssize_t get_pw_buflen();
+
+
+static void stat_init(JNIEnv *env) {
+  // Init Stat
+  jclass clazz = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/NativeIO$Stat");
+  PASS_EXCEPTIONS(env);
+  stat_clazz = (*env)->NewGlobalRef(env, clazz);
+  stat_ctor = (*env)->GetMethodID(env, stat_clazz, "<init>",
+    "(Ljava/lang/String;Ljava/lang/String;I)V");
+}
+
+static void stat_deinit(JNIEnv *env) {
+  if (stat_clazz != NULL) {  
+    (*env)->DeleteGlobalRef(env, stat_clazz);
+    stat_clazz = NULL;
+  }
+}
+
+static void nioe_init(JNIEnv *env) {
+  // Init NativeIOException
+  nioe_clazz = (*env)->FindClass(
+    env, "org/apache/hadoop/io/nativeio/NativeIOException");
+  PASS_EXCEPTIONS(env);
+
+  nioe_clazz = (*env)->NewGlobalRef(env, nioe_clazz);
+  nioe_ctor = (*env)->GetMethodID(env, nioe_clazz, "<init>",
+    "(Ljava/lang/String;Lorg/apache/hadoop/io/nativeio/Errno;)V");
+}
+
+static void nioe_deinit(JNIEnv *env) {
+  if (nioe_clazz != NULL) {
+    (*env)->DeleteGlobalRef(env, nioe_clazz);
+    nioe_clazz = NULL;
+  }
+  nioe_ctor = NULL;
+}
+
+/*
+ * private static native void initNative();
+ *
+ * We rely on this function rather than lazy initialization because
+ * the lazy approach may have a race if multiple callers try to
+ * init at the same time.
+ */
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_initNative(
+	JNIEnv *env, jclass clazz) {
+
+  stat_init(env);
+  PASS_EXCEPTIONS_GOTO(env, error);
+  nioe_init(env);
+  PASS_EXCEPTIONS_GOTO(env, error);
+  fd_init(env);
+  PASS_EXCEPTIONS_GOTO(env, error);
+  errno_enum_init(env);
+  PASS_EXCEPTIONS_GOTO(env, error);
+  return;
+error:
+  // these are all idempodent and safe to call even if the
+  // class wasn't initted yet
+  stat_deinit(env);
+  nioe_deinit(env);
+  fd_deinit(env);
+  errno_enum_deinit(env);
+}
+
+/*
+ * public static native Stat fstat(FileDescriptor fd);
+ */
+JNIEXPORT jobject JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_fstat(
+  JNIEnv *env, jclass clazz, jobject fd_object)
+{
+  jobject ret = NULL;
+  char *pw_buf = NULL;
+
+  int fd = fd_get(env, fd_object);
+  PASS_EXCEPTIONS_GOTO(env, cleanup);
+
+  struct stat s;
+  int rc = fstat(fd, &s);
+  if (rc != 0) {
+    throw_ioe(env, errno);
+    goto cleanup;
+  }
+
+  size_t pw_buflen = get_pw_buflen();
+  if ((pw_buf = malloc(pw_buflen)) == NULL) {
+    THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
+    goto cleanup;
+  }
+
+  // Grab username
+  struct passwd pwd, *pwdp;
+  while ((rc = getpwuid_r(s.st_uid, &pwd, pw_buf, pw_buflen, &pwdp)) != 0) {
+    if (rc != ERANGE) {
+      throw_ioe(env, rc);
+      goto cleanup;
+    }
+    free(pw_buf);
+    pw_buflen *= 2;
+    if ((pw_buf = malloc(pw_buflen)) == NULL) {
+      THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
+      goto cleanup;
+    }
+  }
+  assert(pwdp == &pwd);
+
+  jstring jstr_username = (*env)->NewStringUTF(env, pwd.pw_name);
+  if (jstr_username == NULL) goto cleanup;
+
+  // Grab group
+  struct group grp, *grpp;
+  while ((rc = getgrgid_r(s.st_gid, &grp, pw_buf, pw_buflen, &grpp)) != 0) {
+    if (rc != ERANGE) {
+      throw_ioe(env, rc);
+      goto cleanup;
+    }
+    free(pw_buf);
+    pw_buflen *= 2;
+    if ((pw_buf = malloc(pw_buflen)) == NULL) {
+      THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
+      goto cleanup;
+    }
+  }
+  assert(grpp == &grp);
+
+  jstring jstr_groupname = (*env)->NewStringUTF(env, grp.gr_name);
+  PASS_EXCEPTIONS_GOTO(env, cleanup);
+
+  // Construct result
+  ret = (*env)->NewObject(env, stat_clazz, stat_ctor,
+    jstr_username, jstr_groupname, s.st_mode);
+
+cleanup:
+  if (pw_buf != NULL) free(pw_buf);
+  return ret;
+}
+
+
+/*
+ * public static native FileDescriptor open(String path, int flags, int mode);
+ */
+JNIEXPORT jobject JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_open(
+  JNIEnv *env, jclass clazz, jstring j_path,
+  jint flags, jint mode)
+{
+  jobject ret = NULL;
+
+  const char *path = (*env)->GetStringUTFChars(env, j_path, NULL);
+  if (path == NULL) goto cleanup; // JVM throws Exception for us
+
+  int fd;  
+  if (flags & O_CREAT) {
+    fd = open(path, flags, mode);
+  } else {
+    fd = open(path, flags);
+  }
+
+  if (fd == -1) {
+    throw_ioe(env, errno);
+    goto cleanup;
+  }
+
+  ret = fd_create(env, fd);
+
+cleanup:
+  if (path != NULL) {
+    (*env)->ReleaseStringUTFChars(env, j_path, path);
+  }
+  return ret;
+}
+
+/*
+ * Throw a java.IO.IOException, generating the message from errno.
+ */
+static void throw_ioe(JNIEnv* env, int errnum)
+{
+  const char* message;
+  char buffer[80];
+  jstring jstr_message;
+
+  buffer[0] = 0;
+#ifdef STRERROR_R_CHAR_P
+  // GNU strerror_r
+  message = strerror_r(errnum, buffer, sizeof(buffer));
+  assert (message != NULL);
+#else
+  int ret = strerror_r(errnum, buffer, sizeof(buffer));
+  if (ret == 0) {
+    message = buffer;
+  } else {
+    message = "Unknown error";
+  }
+#endif
+  jobject errno_obj = errno_to_enum(env, errnum);
+
+  if ((jstr_message = (*env)->NewStringUTF(env, message)) == NULL)
+    goto err;
+
+  jthrowable obj = (jthrowable)(*env)->NewObject(env, nioe_clazz, nioe_ctor,
+    jstr_message, errno_obj);
+  if (obj == NULL) goto err;
+
+  (*env)->Throw(env, obj);
+  return;
+
+err:
+  if (jstr_message != NULL)
+    (*env)->ReleaseStringUTFChars(env, jstr_message, message);
+}
+
+
+/*
+ * Determine how big a buffer we need for reentrant getpwuid_r and getgrnam_r
+ */
+ssize_t get_pw_buflen() {
+  size_t ret = 0;
+  #ifdef _SC_GETPW_R_SIZE_MAX
+  ret = sysconf(_SC_GETPW_R_SIZE_MAX);
+  #endif
+  return (ret > 512) ? ret : 512;
+}
+/**
+ * vim: sw=2: ts=2: et:
+ */
+

Added: hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.c?rev=1077680&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.c (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.c Fri Mar  4 04:43:43 2011
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ #include <assert.h>
+ #include <errno.h>
+ #include <jni.h>
+
+#include "org_apache_hadoop.h"
+
+typedef struct errno_mapping {
+  int errno_val;
+  char *errno_str;
+} errno_mapping_t;
+
+#define MAPPING(x) {x, #x}
+static errno_mapping_t ERRNO_MAPPINGS[] = {
+  MAPPING(EPERM),
+  MAPPING(ENOENT),
+  MAPPING(ESRCH),
+  MAPPING(EINTR),
+  MAPPING(EIO),
+  MAPPING(ENXIO),
+  MAPPING(E2BIG),
+  MAPPING(ENOEXEC),
+  MAPPING(EBADF),
+  MAPPING(ECHILD),
+  MAPPING(EAGAIN),
+  MAPPING(ENOMEM),
+  MAPPING(EACCES),
+  MAPPING(EFAULT),
+  MAPPING(ENOTBLK),
+  MAPPING(EBUSY),
+  MAPPING(EEXIST),
+  MAPPING(EXDEV),
+  MAPPING(ENODEV),
+  MAPPING(ENOTDIR),
+  MAPPING(EISDIR),
+  MAPPING(EINVAL),
+  MAPPING(ENFILE),
+  MAPPING(EMFILE),
+  MAPPING(ENOTTY),
+  MAPPING(ETXTBSY),
+  MAPPING(EFBIG),
+  MAPPING(ENOSPC),
+  MAPPING(ESPIPE),
+  MAPPING(EROFS),
+  MAPPING(EMLINK),
+  MAPPING(EPIPE),
+  MAPPING(EDOM),
+  MAPPING(ERANGE),
+  {-1, NULL}
+};
+
+static jclass enum_class;
+static jmethodID enum_valueOf;
+static jclass errno_class;
+
+void errno_enum_init(JNIEnv *env) {
+  if (enum_class != NULL) return;
+
+  enum_class = (*env)->FindClass(env, "java/lang/Enum");
+  PASS_EXCEPTIONS(env);
+  enum_class = (*env)->NewGlobalRef(env, enum_class);
+  enum_valueOf = (*env)->GetStaticMethodID(env, enum_class,
+    "valueOf", "(Ljava/lang/Class;Ljava/lang/String;)Ljava/lang/Enum;");
+  PASS_EXCEPTIONS(env);
+
+  errno_class = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/Errno");
+  PASS_EXCEPTIONS(env);
+  errno_class = (*env)->NewGlobalRef(env, errno_class);
+}
+
+void errno_enum_deinit(JNIEnv *env) {
+  if (enum_class != NULL) {
+    (*env)->DeleteGlobalRef(env, enum_class);
+    enum_class = NULL;
+  }
+  if (errno_class != NULL) {
+    (*env)->DeleteGlobalRef(env, errno_class);
+    errno_class = NULL;
+  }
+  enum_valueOf = NULL;
+}
+
+
+static char *errno_to_string(int errnum) {
+  int i;
+  for (i = 0; ERRNO_MAPPINGS[i].errno_str != NULL; i++) {
+    if (ERRNO_MAPPINGS[i].errno_val == errnum)
+      return ERRNO_MAPPINGS[i].errno_str;
+  }
+  return "UNKNOWN";
+}
+
+jobject errno_to_enum(JNIEnv *env, int errnum) {
+  char *str = errno_to_string(errnum);
+  assert(str != NULL);
+
+  jstring jstr = (*env)->NewStringUTF(env, str);
+  PASS_EXCEPTIONS_RET(env, NULL);
+
+  return (*env)->CallStaticObjectMethod(
+    env, enum_class, enum_valueOf, errno_class, jstr);
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.h?rev=1077680&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.h (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.h Fri Mar  4 04:43:43 2011
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ERRNO_ENUM_H
+#define ERRNO_ENUM_H
+
+#include <jni.h>
+
+void errno_enum_init(JNIEnv *env);
+void errno_enum_deinit(JNIEnv *env);
+jobject errno_to_enum(JNIEnv *env, int errnum);
+
+#endif

Added: hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c?rev=1077680&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c Fri Mar  4 04:43:43 2011
@@ -0,0 +1,69 @@
+/*
+ *  Licensed to the Apache Software Foundation (ASF) under one or more
+ *  contributor license agreements.  See the NOTICE file distributed with
+ *  this work for additional information regarding copyright ownership.
+ *  The ASF licenses this file to You under the Apache License, Version 2.0
+ *  (the "License"); you may not use this file except in compliance with
+ *  the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+ 
+#include <jni.h>
+#include "file_descriptor.h"
+#include "org_apache_hadoop.h"
+
+// class of java.io.FileDescriptor
+static jclass fd_class;
+// the internal field for the integer fd
+static jfieldID fd_descriptor;
+// the no-argument constructor
+static jmethodID fd_constructor;
+
+
+void fd_init(JNIEnv* env)
+{
+  if (fd_class != NULL) return; // already initted
+
+  fd_class = (*env)->FindClass(env, "java/io/FileDescriptor");
+  PASS_EXCEPTIONS(env);
+  fd_class = (*env)->NewGlobalRef(env, fd_class);
+
+  fd_descriptor = (*env)->GetFieldID(env, fd_class, "fd", "I");
+  PASS_EXCEPTIONS(env);
+  fd_constructor = (*env)->GetMethodID(env, fd_class, "<init>", "()V");
+}
+
+void fd_deinit(JNIEnv *env) {
+  if (fd_class != NULL) {
+    (*env)->DeleteGlobalRef(env, fd_class);
+    fd_class = NULL;
+  }
+  fd_descriptor = NULL;
+  fd_constructor = NULL;
+}
+
+/*
+ * Given an instance 'obj' of java.io.FileDescriptor, return the
+ * underlying fd, or throw if unavailable
+ */
+int fd_get(JNIEnv* env, jobject obj) {
+  return (*env)->GetIntField(env, obj, fd_descriptor);
+}
+
+/*
+ * Create a FileDescriptor object corresponding to the given int fd
+ */
+jobject fd_create(JNIEnv *env, int fd) {
+  jobject obj = (*env)->NewObject(env, fd_class, fd_constructor);
+  PASS_EXCEPTIONS_RET(env, NULL);
+
+  (*env)->SetIntField(env, obj, fd_descriptor, fd);
+  return obj;
+} 

Added: hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h?rev=1077680&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h Fri Mar  4 04:43:43 2011
@@ -0,0 +1,28 @@
+/*
+ *  Licensed to the Apache Software Foundation (ASF) under one or more
+ *  contributor license agreements.  See the NOTICE file distributed with
+ *  this work for additional information regarding copyright ownership.
+ *  The ASF licenses this file to You under the Apache License, Version 2.0
+ *  (the "License"); you may not use this file except in compliance with
+ *  the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+#ifndef FILE_DESCRIPTOR_H
+#define FILE_DESCRIPTOR_H
+
+#include <jni.h>
+
+void fd_init(JNIEnv *env);
+void fd_deinit(JNIEnv *env);
+
+int fd_get(JNIEnv* env, jobject obj);
+jobject fd_create(JNIEnv *env, int fd);
+
+#endif

Modified: hadoop/common/branches/branch-0.20-security-patches/src/native/src/org_apache_hadoop.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/native/src/org_apache_hadoop.h?rev=1077680&r1=1077679&r2=1077680&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/native/src/org_apache_hadoop.h (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/native/src/org_apache_hadoop.h Fri Mar  4 04:43:43 2011
@@ -50,6 +50,22 @@
 	} \
   }
 
+/* Helper macro to return if an exception is pending */
+#define PASS_EXCEPTIONS(env) \
+  { \
+    if ((*env)->ExceptionCheck(env)) return; \
+  }
+
+#define PASS_EXCEPTIONS_GOTO(env, target) \
+  { \
+    if ((*env)->ExceptionCheck(env)) goto target; \
+  }
+
+#define PASS_EXCEPTIONS_RET(env, ret) \
+  { \
+    if ((*env)->ExceptionCheck(env)) return (ret); \
+  }
+
 /** 
  * A helper function to dlsym a 'symbol' from a given library-handle. 
  * 

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/TestSecureIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/TestSecureIOUtils.java?rev=1077680&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/TestSecureIOUtils.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/TestSecureIOUtils.java Fri Mar  4 04:43:43 2011
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.io.nativeio.NativeIO;
+
+import org.junit.BeforeClass;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assume.*;
+import static org.junit.Assert.*;
+import java.io.IOException;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+
+public class TestSecureIOUtils {
+  private static String realOwner, realGroup; 
+  private static final File testFilePath =
+      new File(System.getProperty("test.build.data"), "TestSecureIOContext");
+
+  @BeforeClass
+  public static void makeTestFile() throws Exception {
+    FileOutputStream fos = new FileOutputStream(testFilePath);
+    fos.write("hello".getBytes("UTF-8"));
+    fos.close();
+
+    Configuration conf = new Configuration();
+    FileSystem rawFS = FileSystem.getLocal(conf).getRaw();
+    FileStatus stat = rawFS.getFileStatus(
+      new Path(testFilePath.toString()));
+    realOwner = stat.getOwner();
+    realGroup = stat.getGroup();
+  }
+
+  @Test
+  public void testReadUnrestricted() throws IOException {
+    SecureIOUtils.openForRead(testFilePath, null, null).close();
+  }
+
+  @Test
+  public void testReadCorrectlyRestrictedWithSecurity() throws IOException {
+    SecureIOUtils
+      .openForRead(testFilePath, realOwner, realGroup).close();
+  }
+
+  @Test
+  public void testReadIncorrectlyRestrictedWithSecurity() throws IOException {
+    try {
+      SecureIOUtils
+        .openForRead(testFilePath, "invalidUser", null).close();
+      fail("Didn't throw expection for wrong ownership!");
+    } catch (IOException ioe) {
+      // expected
+    }
+  }
+
+  @Test
+  public void testCreateForWrite() throws IOException {
+    try {
+      SecureIOUtils.createForWrite(testFilePath, 0777);
+      fail("Was able to create file at " + testFilePath);
+    } catch (SecureIOUtils.AlreadyExistsException aee) {
+      // expected
+    }
+  }
+}

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1077680&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/nativeio/TestNativeIO.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/io/nativeio/TestNativeIO.java Fri Mar  4 04:43:43 2011
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.nativeio;
+
+import java.io.File;
+import java.io.FileDescriptor;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assume.*;
+import static org.junit.Assert.*;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.util.NativeCodeLoader;
+
+public class TestNativeIO {
+  static final Log LOG = LogFactory.getLog(TestNativeIO.class);
+
+  static final File TEST_DIR = new File(
+    System.getProperty("test.build.data"), "testnativeio");
+
+  @Before
+  public void checkLoaded() {
+    assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
+  }
+
+  @Before
+  public void setupTestDir() throws IOException {
+    FileUtil.fullyDelete(TEST_DIR);
+    TEST_DIR.mkdirs();
+  }
+
+  @Test
+  public void testFstat() throws Exception {
+    FileOutputStream fos = new FileOutputStream(
+      new File(TEST_DIR, "testfstat"));
+    NativeIO.Stat stat = NativeIO.fstat(fos.getFD());
+    fos.close();
+    LOG.info("Stat: " + String.valueOf(stat));
+
+    assertEquals(System.getProperty("user.name"), stat.getOwner());
+    assertNotNull(stat.getGroup());
+    assertTrue(!"".equals(stat.getGroup()));
+    assertEquals(NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT);
+  }
+
+  @Test
+  public void testFstatClosedFd() throws Exception {
+    FileOutputStream fos = new FileOutputStream(
+      new File(TEST_DIR, "testfstat2"));
+    fos.close();
+    try {
+      NativeIO.Stat stat = NativeIO.fstat(fos.getFD());
+    } catch (IOException e) {
+      LOG.info("Got expected exception", e);
+    }
+  }
+
+  @Test
+  public void testOpen() throws Exception {
+    LOG.info("Open a missing file without O_CREAT and it should fail");
+    try {
+      FileDescriptor fd = NativeIO.open(
+        new File(TEST_DIR, "doesntexist").getAbsolutePath(),
+        NativeIO.O_WRONLY, 0700);
+      fail("Able to open a new file without O_CREAT");
+    } catch (IOException ioe) {
+      // expected
+    }
+
+    LOG.info("Test creating a file with O_CREAT");
+    FileDescriptor fd = NativeIO.open(
+      new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
+      NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
+    assertNotNull(true);
+    assertTrue(fd.valid());
+    FileOutputStream fos = new FileOutputStream(fd);
+    fos.write("foo".getBytes());
+    fos.close();
+
+    assertFalse(fd.valid());
+
+    LOG.info("Test exclusive create");
+    try {
+      fd = NativeIO.open(
+        new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
+        NativeIO.O_WRONLY | NativeIO.O_CREAT | NativeIO.O_EXCL, 0700);
+      fail("Was able to create existing file with O_EXCL");
+    } catch (IOException ioe) {
+      // expected
+    }
+  }
+
+  /**
+   * Test that opens and closes a file 10000 times - this would crash with
+   * "Too many open files" if we leaked fds using this access pattern.
+   */
+  @Test
+  public void testFDDoesntLeak() throws IOException {
+    for (int i = 0; i < 10000; i++) {
+      FileDescriptor fd = NativeIO.open(
+        new File(TEST_DIR, "testNoFdLeak").getAbsolutePath(),
+        NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
+      assertNotNull(true);
+      assertTrue(fd.valid());
+      FileOutputStream fos = new FileOutputStream(fd);
+      fos.write("foo".getBytes());
+      fos.close();
+    }
+  }
+
+}

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestIndexCache.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestIndexCache.java?rev=1077680&r1=1077679&r2=1077680&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestIndexCache.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestIndexCache.java Fri Mar  4 04:43:43 2011
@@ -29,6 +29,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.security.UserGroupInformation;
 
 import junit.framework.TestCase;
 
@@ -55,7 +56,8 @@ public class TestIndexCache extends Test
       Path f = new Path(p, Integer.toString(totalsize, 36));
       writeFile(fs, f, totalsize, partsPerMap);
       IndexRecord rec = cache.getIndexInformation(
-          Integer.toString(totalsize, 36), r.nextInt(partsPerMap), f);
+          Integer.toString(totalsize, 36), r.nextInt(partsPerMap), f,
+          UserGroupInformation.getCurrentUser().getShortUserName());
       checkRecord(rec, totalsize);
     }
 
@@ -66,7 +68,8 @@ public class TestIndexCache extends Test
     for (int i = bytesPerFile; i < 1024 * 1024; i += bytesPerFile) {
       Path f = new Path(p, Integer.toString(i, 36));
       IndexRecord rec = cache.getIndexInformation(Integer.toString(i, 36),
-          r.nextInt(partsPerMap), f);
+          r.nextInt(partsPerMap), f,
+          UserGroupInformation.getCurrentUser().getShortUserName());
       checkRecord(rec, i);
     }
 
@@ -74,14 +77,16 @@ public class TestIndexCache extends Test
     Path f = new Path(p, Integer.toString(totalsize, 36));
     writeFile(fs, f, totalsize, partsPerMap);
     cache.getIndexInformation(Integer.toString(totalsize, 36),
-        r.nextInt(partsPerMap), f);
+        r.nextInt(partsPerMap), f,
+        UserGroupInformation.getCurrentUser().getShortUserName());
     fs.delete(f, false);
 
     // oldest fails to read, or error
     boolean fnf = false;
     try {
       cache.getIndexInformation(Integer.toString(bytesPerFile, 36),
-          r.nextInt(partsPerMap), new Path(p, Integer.toString(bytesPerFile)));
+          r.nextInt(partsPerMap), new Path(p, Integer.toString(bytesPerFile)),
+          UserGroupInformation.getCurrentUser().getShortUserName());
     } catch (IOException e) {
       if (e.getCause() == null ||
           !(e.getCause()  instanceof FileNotFoundException)) {
@@ -96,11 +101,13 @@ public class TestIndexCache extends Test
     // should find all the other entries
     for (int i = bytesPerFile << 1; i < 1024 * 1024; i += bytesPerFile) {
       IndexRecord rec = cache.getIndexInformation(Integer.toString(i, 36),
-          r.nextInt(partsPerMap), new Path(p, Integer.toString(i, 36)));
+          r.nextInt(partsPerMap), new Path(p, Integer.toString(i, 36)),
+          UserGroupInformation.getCurrentUser().getShortUserName());
       checkRecord(rec, i);
     }
     IndexRecord rec = cache.getIndexInformation(Integer.toString(totalsize, 36),
-        r.nextInt(partsPerMap), f);
+        r.nextInt(partsPerMap), f,
+        UserGroupInformation.getCurrentUser().getShortUserName());
     checkRecord(rec, totalsize);
   }
 
@@ -130,7 +137,8 @@ public class TestIndexCache extends Test
     out.writeLong(iout.getChecksum().getValue());
     dout.close();
     try {
-      cache.getIndexInformation("badindex", 7, f);
+      cache.getIndexInformation("badindex", 7, f, 
+          UserGroupInformation.getCurrentUser().getShortUserName());
       fail("Did not detect bad checksum");
     } catch (IOException e) {
       if (!(e.getCause() instanceof ChecksumException)) {