You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by dd...@apache.org on 2010/12/01 09:03:59 UTC

svn commit: r1040883 [1/3] - in /hadoop/common/trunk: ./ src/java/org/apache/hadoop/io/ src/java/org/apache/hadoop/io/nativeio/ src/native/ src/native/lib/ src/native/src/ src/native/src/org/apache/hadoop/io/nativeio/ src/test/core/org/apache/hadoop/io...

Author: ddas
Date: Wed Dec  1 08:03:58 2010
New Revision: 1040883

URL: http://svn.apache.org/viewvc?rev=1040883&view=rev
Log:
HADOOP-6978. Adds support for NativeIO using JNI. Contributed by Todd Lipcon, Devaraj Das & Owen O'Malley.

Added:
    hadoop/common/trunk/src/java/org/apache/hadoop/io/SecureIOUtils.java
    hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/
    hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/Errno.java
    hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/NativeIO.java
    hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/NativeIOException.java
    hadoop/common/trunk/src/native/src/org/apache/hadoop/io/nativeio/
    hadoop/common/trunk/src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
    hadoop/common/trunk/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.c
    hadoop/common/trunk/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.h
    hadoop/common/trunk/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c
    hadoop/common/trunk/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h
    hadoop/common/trunk/src/test/core/org/apache/hadoop/io/TestSecureIOUtils.java
    hadoop/common/trunk/src/test/core/org/apache/hadoop/io/nativeio/
    hadoop/common/trunk/src/test/core/org/apache/hadoop/io/nativeio/TestNativeIO.java
Modified:
    hadoop/common/trunk/CHANGES.txt
    hadoop/common/trunk/build.xml
    hadoop/common/trunk/src/native/Makefile.am
    hadoop/common/trunk/src/native/Makefile.in
    hadoop/common/trunk/src/native/config.h.in
    hadoop/common/trunk/src/native/configure
    hadoop/common/trunk/src/native/configure.ac
    hadoop/common/trunk/src/native/lib/Makefile.am
    hadoop/common/trunk/src/native/src/org_apache_hadoop.h

Modified: hadoop/common/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=1040883&r1=1040882&r2=1040883&view=diff
==============================================================================
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Wed Dec  1 08:03:58 2010
@@ -75,6 +75,9 @@ Release 0.22.0 - Unreleased
     HADOOP-7013. Add boolean field isCorrupt to BlockLocation. 
     (Patrick Kling via hairong)
 
+    HADOOP-6978. Adds support for NativeIO using JNI. 
+    (Todd Lipcon, Devaraj Das & Owen O'Malley via ddas)
+
   IMPROVEMENTS
 
     HADOOP-6644. util.Shell getGROUPS_FOR_USER_COMMAND method name 

Modified: hadoop/common/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/build.xml?rev=1040883&r1=1040882&r2=1040883&view=diff
==============================================================================
--- hadoop/common/trunk/build.xml (original)
+++ hadoop/common/trunk/build.xml Wed Dec  1 08:03:58 2010
@@ -366,6 +366,7 @@
   	
     <mkdir dir="${build.native}/lib"/>
     <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/>
+    <mkdir dir="${build.native}/src/org/apache/hadoop/io/nativeio"/>
     <mkdir dir="${build.native}/src/org/apache/hadoop/security"/>
 
   	<javah 
@@ -386,6 +387,14 @@
   	  >
   	  <class name="org.apache.hadoop.security.JniBasedUnixGroupsMapping" />
   	</javah>
+  	<javah
+  	  classpath="${build.classes}"
+  	  destdir="${build.native}/src/org/apache/hadoop/io/nativeio"
+      force="yes"
+  	  verbose="yes"
+  	  >
+  	  <class name="org.apache.hadoop.io.nativeio.NativeIO" />
+  	</javah>
 
 	<exec dir="${build.native}" executable="sh" failonerror="true">
 	  <env key="OS_NAME" value="${os.name}"/>

Added: hadoop/common/trunk/src/java/org/apache/hadoop/io/SecureIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/SecureIOUtils.java?rev=1040883&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/SecureIOUtils.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/SecureIOUtils.java Wed Dec  1 08:03:58 2010
@@ -0,0 +1,208 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io;
+
+import java.io.File;
+import java.io.FileDescriptor;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.io.nativeio.Errno;
+import org.apache.hadoop.io.nativeio.NativeIO;
+import org.apache.hadoop.io.nativeio.NativeIOException;
+import org.apache.hadoop.io.nativeio.NativeIO.Stat;
+import org.apache.hadoop.security.UserGroupInformation;
+
+/**
+ * This class provides secure APIs for opening and creating files on the local
+ * disk. The main issue this class tries to handle is that of symlink traversal.
+ * <br/>
+ * An example of such an attack is:
+ * <ol>
+ * <li> Malicious user removes his task's syslog file, and puts a link to the
+ * jobToken file of a target user.</li>
+ * <li> Malicious user tries to open the syslog file via the servlet on the
+ * tasktracker.</li>
+ * <li> The tasktracker is unaware of the symlink, and simply streams the contents
+ * of the jobToken file. The malicious user can now access potentially sensitive
+ * map outputs, etc. of the target user's job.</li>
+ * </ol>
+ * A similar attack is possible involving task log truncation, but in that case
+ * due to an insecure write to a file.
+ * <br/>
+ */
+public class SecureIOUtils {
+
+  /**
+   * Ensure that we are set up to run with the appropriate native support code.
+   * If security is disabled, and the support code is unavailable, this class
+   * still tries its best to be secure, but is vulnerable to some race condition
+   * attacks.
+   *
+   * If security is enabled but the support code is unavailable, throws a
+   * RuntimeException since we don't want to run insecurely.
+   */
+  static {
+    boolean shouldBeSecure = UserGroupInformation.isSecurityEnabled();
+    boolean canBeSecure = NativeIO.isAvailable();
+
+    if (!canBeSecure && shouldBeSecure) {
+      throw new RuntimeException(
+        "Secure IO is not possible without native code extensions.");
+    }
+
+    // Pre-cache an instance of the raw FileSystem since we sometimes
+    // do secure IO in a shutdown hook, where this call could fail.
+    try {
+      rawFilesystem = FileSystem.getLocal(new Configuration()).getRaw();
+    } catch (IOException ie) {
+      throw new RuntimeException(
+      "Couldn't obtain an instance of RawLocalFileSystem.");
+    }
+
+    // SecureIO just skips security checks in the case that security is
+    // disabled
+    skipSecurity = !canBeSecure;
+  }
+
+  private final static boolean skipSecurity;
+  private final static FileSystem rawFilesystem;
+
+  /**
+   * Open the given File for read access, verifying the expected user/group
+   * constraints.
+   * @param f the file that we are trying to open
+   * @param expectedOwner the expected user owner for the file
+   * @param expectedGroup the expected group owner for the file
+   * @throws IOException if an IO Error occurred, or the user/group does not 
+   * match
+   */
+  public static FileInputStream openForRead(File f, String expectedOwner, 
+      String expectedGroup) throws IOException {
+    if (skipSecurity) {
+      // Subject to race conditions but this is the best we can do
+      FileStatus status =
+        rawFilesystem.getFileStatus(new Path(f.getAbsolutePath()));
+      checkStat(f, status.getOwner(), status.getGroup(),
+          expectedOwner, expectedGroup);
+      return new FileInputStream(f);
+    }
+
+    FileInputStream fis = new FileInputStream(f);
+    boolean success = false;
+    try {
+      Stat stat = NativeIO.fstat(fis.getFD());
+      checkStat(f, stat.getOwner(), stat.getGroup(), expectedOwner,
+          expectedGroup);
+      success = true;
+      return fis;
+    } finally {
+      if (!success) {
+        fis.close();
+      }
+    }
+  }
+
+  private static FileOutputStream insecureCreateForWrite(File f,
+      int permissions) throws IOException {
+    // If we can't do real security, do a racy exists check followed by an
+    // open and chmod
+    if (f.exists()) {
+      throw new AlreadyExistsException("File " + f + " already exists");
+    }
+    FileOutputStream fos = new FileOutputStream(f);
+    boolean success = false;
+    try {
+      rawFilesystem.setPermission(new Path(f.getAbsolutePath()),
+        new FsPermission((short)permissions));
+      success = true;
+      return fos;
+    } finally {
+      if (!success) {
+        fos.close();
+      }
+    }
+  }
+
+  /**
+   * Open the specified File for write access, ensuring that it does not exist.
+   * @param f the file that we want to create
+   * @param permissions we want to have on the file (if security is enabled)
+   *
+   * @throws AlreadyExistsException if the file already exists
+   * @throws IOException if any other error occurred
+   */
+  public static FileOutputStream createForWrite(File f, int permissions)
+  throws IOException {
+    if (skipSecurity) {
+      return insecureCreateForWrite(f, permissions);
+    } else {
+      // Use the native wrapper around open(2)
+      try {
+        FileDescriptor fd = NativeIO.open(f.getAbsolutePath(),
+          NativeIO.O_WRONLY | NativeIO.O_CREAT | NativeIO.O_EXCL,
+          permissions);
+        return new FileOutputStream(fd);
+      } catch (NativeIOException nioe) {
+        if (nioe.getErrno() == Errno.EEXIST) {
+          throw new AlreadyExistsException(nioe);
+        }
+        throw nioe;
+      }
+    }
+  }
+
+  private static void checkStat(File f, String owner, String group, 
+      String expectedOwner, 
+      String expectedGroup) throws IOException {
+    if (expectedOwner != null &&
+        !expectedOwner.equals(owner)) {
+      throw new IOException(
+        "Owner '" + owner + "' for path " + f + " did not match " +
+        "expected owner '" + expectedOwner + "'");
+    }
+    if (expectedGroup != null &&
+        !expectedGroup.equals(group)) {
+      throw new IOException(
+        "Group '" + group + "' for path " + f + " did not match " +
+        "expected group '" + expectedGroup + "'");
+    }
+  }
+
+  /**
+   * Signals that an attempt to create a file at a given pathname has failed
+   * because another file already existed at that path.
+   */
+  public static class AlreadyExistsException extends IOException {
+    private static final long serialVersionUID = 1L;
+
+    public AlreadyExistsException(String msg) {
+      super(msg);
+    }
+
+    public AlreadyExistsException(Throwable cause) {
+      super(cause);
+    }
+  }
+}

Added: hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/Errno.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/Errno.java?rev=1040883&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/Errno.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/Errno.java Wed Dec  1 08:03:58 2010
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.nativeio;
+
+/**
+ * Enum representing POSIX errno values.
+ */
+public enum Errno {
+  EPERM,
+  ENOENT,
+  ESRCH,
+  EINTR,
+  EIO,
+  ENXIO,
+  E2BIG,
+  ENOEXEC,
+  EBADF,
+  ECHILD,
+  EAGAIN,
+  ENOMEM,
+  EACCES,
+  EFAULT,
+  ENOTBLK,
+  EBUSY,
+  EEXIST,
+  EXDEV,
+  ENODEV,
+  ENOTDIR,
+  EISDIR,
+  EINVAL,
+  ENFILE,
+  EMFILE,
+  ENOTTY,
+  ETXTBSY,
+  EFBIG,
+  ENOSPC,
+  ESPIPE,
+  EROFS,
+  EMLINK,
+  EPIPE,
+  EDOM,
+  ERANGE,
+
+  UNKNOWN;
+}

Added: hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/NativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/NativeIO.java?rev=1040883&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/NativeIO.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/NativeIO.java Wed Dec  1 08:03:58 2010
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.nativeio;
+
+import java.io.FileDescriptor;
+import java.io.IOException;
+
+import org.apache.hadoop.util.NativeCodeLoader;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+/**
+ * JNI wrappers for various native IO-related calls not available in Java.
+ * These functions should generally be used alongside a fallback to another
+ * more portable mechanism.
+ */
+public class NativeIO {
+  // Flags for open() call from bits/fcntl.h
+  public static final int O_RDONLY   =    00;
+  public static final int O_WRONLY   =    01;
+  public static final int O_RDWR     =    02;
+  public static final int O_CREAT    =  0100;
+  public static final int O_EXCL     =  0200;
+  public static final int O_NOCTTY   =  0400;
+  public static final int O_TRUNC    = 01000;
+  public static final int O_APPEND   = 02000;
+  public static final int O_NONBLOCK = 04000;
+  public static final int O_SYNC   =  010000;
+  public static final int O_ASYNC  =  020000;
+  public static final int O_FSYNC = O_SYNC;
+  public static final int O_NDELAY = O_NONBLOCK;
+
+  private static final Log LOG = LogFactory.getLog(NativeIO.class);
+
+  private static boolean nativeLoaded = false;
+
+  static {
+    if (NativeCodeLoader.isNativeCodeLoaded()) {
+      try {
+        initNative();
+        nativeLoaded = true;
+      } catch (Throwable t) {
+        // This can happen if the user has an older version of libhadoop.so
+        // installed - in this case we can continue without native IO
+        // after warning
+        LOG.error("Unable to initialize NativeIO libraries", t);
+      }
+    }
+  }
+
+  /**
+   * Return true if the JNI-based native IO extensions are available.
+   */
+  public static boolean isAvailable() {
+    return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
+  }
+
+  /** Wrapper around open(2) */
+  public static native FileDescriptor open(String path, int flags, int mode) throws IOException;
+  /** Wrapper around fstat(2) */
+  public static native Stat fstat(FileDescriptor fd) throws IOException;
+  /** Initialize the JNI method ID and class ID cache */
+  private static native void initNative();
+
+
+  /**
+   * Result type of the fstat call
+   */
+  public static class Stat {
+    private String owner, group;
+    private int mode;
+
+    // Mode constants
+    public static final int S_IFMT = 0170000;      /* type of file */
+    public static final int   S_IFIFO  = 0010000;  /* named pipe (fifo) */
+    public static final int   S_IFCHR  = 0020000;  /* character special */
+    public static final int   S_IFDIR  = 0040000;  /* directory */
+    public static final int   S_IFBLK  = 0060000;  /* block special */
+    public static final int   S_IFREG  = 0100000;  /* regular */
+    public static final int   S_IFLNK  = 0120000;  /* symbolic link */
+    public static final int   S_IFSOCK = 0140000;  /* socket */
+    public static final int   S_IFWHT  = 0160000;  /* whiteout */
+    public static final int S_ISUID = 0004000;  /* set user id on execution */
+    public static final int S_ISGID = 0002000;  /* set group id on execution */
+    public static final int S_ISVTX = 0001000;  /* save swapped text even after use */
+    public static final int S_IRUSR = 0000400;  /* read permission, owner */
+    public static final int S_IWUSR = 0000200;  /* write permission, owner */
+    public static final int S_IXUSR = 0000100;  /* execute/search permission, owner */
+
+    Stat(String owner, String group, int mode) {
+      this.owner = owner;
+      this.group = group;
+      this.mode = mode;
+    }
+
+    public String toString() {
+      return "Stat(owner='" + owner + "', group='" + group + "'" +
+        ", mode=" + mode + ")";
+    }
+
+    public String getOwner() {
+      return owner;
+    }
+    public String getGroup() {
+      return group;
+    }
+    public int getMode() {
+      return mode;
+    }
+  }
+}

Added: hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/NativeIOException.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/NativeIOException.java?rev=1040883&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/NativeIOException.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/io/nativeio/NativeIOException.java Wed Dec  1 08:03:58 2010
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.nativeio;
+
+import java.io.IOException;
+
+/**
+ * An exception generated by a call to the native IO code.
+ *
+ * These exceptions simply wrap <i>errno</i> result codes.
+ */
+public class NativeIOException extends IOException {
+  private static final long serialVersionUID = 1L;
+
+  private Errno errno;
+
+  public NativeIOException(String msg, Errno errno) {
+    super(msg);
+    this.errno = errno;
+  }
+
+  public Errno getErrno() {
+    return errno;
+  }
+
+  public String toString() {
+    return errno.toString() + ": " + super.getMessage();
+  }
+}
+
+

Modified: hadoop/common/trunk/src/native/Makefile.am
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/native/Makefile.am?rev=1040883&r1=1040882&r2=1040883&view=diff
==============================================================================
--- hadoop/common/trunk/src/native/Makefile.am (original)
+++ hadoop/common/trunk/src/native/Makefile.am Wed Dec  1 08:03:58 2010
@@ -33,7 +33,8 @@ export PLATFORM = $(shell echo $$OS_NAME
 
 AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \
               -Isrc/org/apache/hadoop/io/compress/zlib \
-              -Isrc/org/apache/hadoop/security
+              -Isrc/org/apache/hadoop/security \
+              -Isrc/org/apache/hadoop/io/nativeio/
 AM_LDFLAGS = @JNI_LDFLAGS@ -m$(JVM_DATA_MODEL)
 AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL)
 
@@ -41,8 +42,12 @@ lib_LTLIBRARIES = libhadoop.la
 libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \
                        src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \
                        src/org/apache/hadoop/security/getGroup.c \
-                       src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c
-libhadoop_la_LDFLAGS = -version-info 1:0:0
+                       src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c \
+                       src/org/apache/hadoop/io/nativeio/file_descriptor.c \
+                       src/org/apache/hadoop/io/nativeio/errno_enum.c \
+                       src/org/apache/hadoop/io/nativeio/NativeIO.c
+
+libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS)
 libhadoop_la_LIBADD = -ldl -ljvm
 
 #

Modified: hadoop/common/trunk/src/native/Makefile.in
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/native/Makefile.in?rev=1040883&r1=1040882&r2=1040883&view=diff
==============================================================================
--- hadoop/common/trunk/src/native/Makefile.in (original)
+++ hadoop/common/trunk/src/native/Makefile.in Wed Dec  1 08:03:58 2010
@@ -93,7 +93,8 @@ libLTLIBRARIES_INSTALL = $(INSTALL)
 LTLIBRARIES = $(lib_LTLIBRARIES)
 libhadoop_la_DEPENDENCIES =
 am_libhadoop_la_OBJECTS = ZlibCompressor.lo ZlibDecompressor.lo \
-	getGroup.lo JniBasedUnixGroupsMapping.lo
+	getGroup.lo JniBasedUnixGroupsMapping.lo file_descriptor.lo \
+	errno_enum.lo NativeIO.lo
 libhadoop_la_OBJECTS = $(am_libhadoop_la_OBJECTS)
 DEFAULT_INCLUDES = -I. -I$(srcdir) -I.
 depcomp = $(SHELL) $(top_srcdir)/config/depcomp
@@ -222,7 +223,8 @@ sysconfdir = @sysconfdir@
 target_alias = @target_alias@
 AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \
               -Isrc/org/apache/hadoop/io/compress/zlib \
-              -Isrc/org/apache/hadoop/security
+              -Isrc/org/apache/hadoop/security \
+              -Isrc/org/apache/hadoop/io/nativeio/
 
 AM_LDFLAGS = @JNI_LDFLAGS@ -m$(JVM_DATA_MODEL)
 AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL)
@@ -230,9 +232,12 @@ lib_LTLIBRARIES = libhadoop.la
 libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \
                        src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \
                        src/org/apache/hadoop/security/getGroup.c \
-                       src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c
+                       src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c \
+                       src/org/apache/hadoop/io/nativeio/file_descriptor.c \
+                       src/org/apache/hadoop/io/nativeio/errno_enum.c \
+                       src/org/apache/hadoop/io/nativeio/NativeIO.c
 
-libhadoop_la_LDFLAGS = -version-info 1:0:0
+libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS)
 libhadoop_la_LIBADD = -ldl -ljvm
 all: config.h
 	$(MAKE) $(AM_MAKEFLAGS) all-am
@@ -326,8 +331,11 @@ distclean-compile:
 	-rm -f *.tab.c
 
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/JniBasedUnixGroupsMapping.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/NativeIO.Plo@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibCompressor.Plo@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibDecompressor.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/errno_enum.Plo@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/file_descriptor.Plo@am__quote@
 @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/getGroup.Plo@am__quote@
 
 .c.o:
@@ -379,6 +387,27 @@ JniBasedUnixGroupsMapping.lo: src/org/ap
 @AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
 @am__fastdepCC_FALSE@	$(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o JniBasedUnixGroupsMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c
 
+file_descriptor.lo: src/org/apache/hadoop/io/nativeio/file_descriptor.c
+@am__fastdepCC_TRUE@	if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT file_descriptor.lo -MD -MP -MF "$(DEPDIR)/file_descriptor.Tpo" -c -o file_descriptor.lo `test -f 'src/org/apache/hadoop/io/nativeio/file_descriptor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/file_descriptor.c; \
+@am__fastdepCC_TRUE@	then mv -f "$(DEPDIR)/file_descriptor.Tpo" "$(DEPDIR)/file_descriptor.Plo"; else rm -f "$(DEPDIR)/file_descriptor.Tpo"; exit 1; fi
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='src/org/apache/hadoop/io/nativeio/file_descriptor.c' object='file_descriptor.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o file_descriptor.lo `test -f 'src/org/apache/hadoop/io/nativeio/file_descriptor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/file_descriptor.c
+
+errno_enum.lo: src/org/apache/hadoop/io/nativeio/errno_enum.c
+@am__fastdepCC_TRUE@	if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT errno_enum.lo -MD -MP -MF "$(DEPDIR)/errno_enum.Tpo" -c -o errno_enum.lo `test -f 'src/org/apache/hadoop/io/nativeio/errno_enum.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/errno_enum.c; \
+@am__fastdepCC_TRUE@	then mv -f "$(DEPDIR)/errno_enum.Tpo" "$(DEPDIR)/errno_enum.Plo"; else rm -f "$(DEPDIR)/errno_enum.Tpo"; exit 1; fi
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='src/org/apache/hadoop/io/nativeio/errno_enum.c' object='errno_enum.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o errno_enum.lo `test -f 'src/org/apache/hadoop/io/nativeio/errno_enum.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/errno_enum.c
+
+NativeIO.lo: src/org/apache/hadoop/io/nativeio/NativeIO.c
+@am__fastdepCC_TRUE@	if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT NativeIO.lo -MD -MP -MF "$(DEPDIR)/NativeIO.Tpo" -c -o NativeIO.lo `test -f 'src/org/apache/hadoop/io/nativeio/NativeIO.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/NativeIO.c; \
+@am__fastdepCC_TRUE@	then mv -f "$(DEPDIR)/NativeIO.Tpo" "$(DEPDIR)/NativeIO.Plo"; else rm -f "$(DEPDIR)/NativeIO.Tpo"; exit 1; fi
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	source='src/org/apache/hadoop/io/nativeio/NativeIO.c' object='NativeIO.lo' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@	DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@	$(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o NativeIO.lo `test -f 'src/org/apache/hadoop/io/nativeio/NativeIO.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/NativeIO.c
+
 mostlyclean-libtool:
 	-rm -f *.lo
 

Modified: hadoop/common/trunk/src/native/config.h.in
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/native/config.h.in?rev=1040883&r1=1040882&r2=1040883&view=diff
==============================================================================
--- hadoop/common/trunk/src/native/config.h.in (original)
+++ hadoop/common/trunk/src/native/config.h.in Wed Dec  1 08:03:58 2010
@@ -3,6 +3,10 @@
 /* The 'actual' dynamic-library for '-lz' */
 #undef HADOOP_ZLIB_LIBRARY
 
+/* Define to 1 if you have the declaration of `strerror_r', and to 0 if you
+   don't. */
+#undef HAVE_DECL_STRERROR_R
+
 /* Define to 1 if you have the <dlfcn.h> header file. */
 #undef HAVE_DLFCN_H
 
@@ -39,6 +43,9 @@
 /* Define to 1 if you have the <stdlib.h> header file. */
 #undef HAVE_STDLIB_H
 
+/* Define to 1 if you have the `strerror_r' function. */
+#undef HAVE_STRERROR_R
+
 /* Define to 1 if you have the <strings.h> header file. */
 #undef HAVE_STRINGS_H
 
@@ -81,8 +88,17 @@
 /* Define to 1 if you have the ANSI C header files. */
 #undef STDC_HEADERS
 
+/* Define to 1 if strerror_r returns char *. */
+#undef STRERROR_R_CHAR_P
+
 /* Version number of package */
 #undef VERSION
 
+/* Number of bits in a file offset, on hosts where this is settable. */
+#undef _FILE_OFFSET_BITS
+
+/* Define for large files, on AIX-style hosts. */
+#undef _LARGE_FILES
+
 /* Define to empty if `const' does not conform to ANSI C. */
 #undef const