You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by su...@apache.org on 2012/10/18 02:43:57 UTC

svn commit: r1399496 - in /hadoop/common/branches/branch-1-win: ./ bin/ src/core/org/apache/hadoop/fs/ src/core/org/apache/hadoop/io/ src/core/org/apache/hadoop/io/nativeio/ src/core/org/apache/hadoop/util/ src/hdfs/org/apache/hadoop/hdfs/server/datano...

Author: suresh
Date: Thu Oct 18 00:43:56 2012
New Revision: 1399496

URL: http://svn.apache.org/viewvc?rev=1399496&view=rev
Log:
HDFS-8564. Reverting r1399493 due to build failure

Modified:
    hadoop/common/branches/branch-1-win/CHANGES.branch-1-win.txt
    hadoop/common/branches/branch-1-win/bin/hadoop-config.cmd
    hadoop/common/branches/branch-1-win/build.xml
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileUtil.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/SecureIOUtils.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/nativeio/NativeIO.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/nativeio/NativeIOException.java
    hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/PlatformName.java
    hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/server/datanode/FSDataset.java
    hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
    hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
    hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h
    hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
    hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c
    hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h
    hadoop/common/branches/branch-1-win/src/native/src/org_apache_hadoop.h
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestFileConcurrentReader.java
    hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/nativeio/TestNativeIO.java

Modified: hadoop/common/branches/branch-1-win/CHANGES.branch-1-win.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/CHANGES.branch-1-win.txt?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/CHANGES.branch-1-win.txt (original)
+++ hadoop/common/branches/branch-1-win/CHANGES.branch-1-win.txt Thu Oct 18 00:43:56 2012
@@ -172,7 +172,3 @@ Branch-hadoop-1-win - unreleased
 
     HDFS-4065. TestDFSShell.testGet sporadically fails attempting to corrupt 
     block files due to race condition. (Chris Nauroth via suresh)
-
-    HADOOP-8564. Port and extend Hadoop native libraries for Windows to 
-    address datanode concurrent reading and writing issue.
-    (Chuan Liu via suresh)

Modified: hadoop/common/branches/branch-1-win/bin/hadoop-config.cmd
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/bin/hadoop-config.cmd?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/bin/hadoop-config.cmd (original)
+++ hadoop/common/branches/branch-1-win/bin/hadoop-config.cmd Thu Oct 18 00:43:56 2012
@@ -182,29 +182,6 @@ if not defined HADOOP_POLICYFILE (
   set HADOOP_POLICYFILE=hadoop-policy.xml
 )
 
-@rem
-@rem Determine the JAVA_PLATFORM
-@rem
-
-for /f "delims=" %%A in ('%JAVA% -Xmx32m %HADOOP_JAVA_PLATFORM_OPTS% -classpath "%CLASSPATH%" org.apache.hadoop.util.PlatformName') do set JAVA_PLATFORM=%%A
-@rem replace space with underscore
-set JAVA_PLATFORM=%JAVA_PLATFORM: =_%
-
-@rem
-@rem setup 'java.library.path' for native hadoop code if necessary
-@rem
-
-@rem Check if we're running hadoop directly from the build
-set JAVA_LIBRARY_PATH=
-if exist %HADOOP_CORE_HOME%\build\native (
-  set JAVA_LIBRARY_PATH=%HADOOP_CORE_HOME%\build\native\%JAVA_PLATFORM%\lib
-)
-
-@rem For the disto case, check the lib\native folder
-if exist %HADOOP_CORE_HOME%\lib\native (
-  set JAVA_LIBRARY_PATH=%JAVA_LIBRARY_PATH%;%HADOOP_CORE_HOME%\lib\native\%JAVA_PLATFORM%
-)
-
 set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.log.dir=%HADOOP_LOG_DIR%
 set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.log.file=%HADOOP_LOGFILE%
 set HADOOP_OPTS=%HADOOP_OPTS% -Dhadoop.home.dir=%HADOOP_CORE_HOME%

Modified: hadoop/common/branches/branch-1-win/build.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/build.xml?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/build.xml (original)
+++ hadoop/common/branches/branch-1-win/build.xml Thu Oct 18 00:43:56 2012
@@ -23,8 +23,6 @@
 
   <!-- Load all the default properties, and any the user wants    -->
   <!-- to contribute (without having to type -D or edit this file -->
-  <property environment="env" />
-
   <property file="${user.home}/build.properties" />
   <property file="${basedir}/build.properties" />
  
@@ -86,19 +84,12 @@
         outputproperty="nonspace.os">
      <arg value="s/ /_/g"/>
   </exec>
-  <condition property="build.platform"
-    value="${env.OS}-${os.arch}-${sun.arch.data.model}"
-    else="${nonspace.os}-${os.arch}-${sun.arch.data.model}">
-    <os family="windows" />
-  </condition>
+  <property name="build.platform" 
+            value="${nonspace.os}-${os.arch}-${sun.arch.data.model}"/>
   <property name="jvm.arch" 
             value="${sun.arch.data.model}"/>
-  <property name="build.winutils" value="${build.dir}/winutils/${env.OS}-${env.Platform}"/>
+  <property name="build.winutils" value="${build.dir}/winutils/${os.arch}"/>
   <property name="build.native" value="${build.dir}/native/${build.platform}"/>
-  <!-- 'build.native.win' is the path under which Hadoop native libraries for Windows are built -->
-  <!-- We need a special path on Windows because we need a path that can be referenced from MSBuild -->
-  <!-- 'sun.arch.data.model' in build.platform make it fail to satisfy the above requirement -->
-  <property name="build.native.win" value="${build.dir}/native/${env.OS}-${env.Platform}"/>
   <property name="build.c++" value="${build.dir}/c++-build/${build.platform}"/>
   <property name="build.c++.utils" value="${build.c++}/utils"/>
   <property name="build.c++.pipes" value="${build.c++}/pipes"/>
@@ -650,42 +641,7 @@
   <target name="compile-native">
     <antcall target="compile-core-native">
       <param name="compile.native" value="true"/>
-    </antcall>
-  </target>
-
-  <target name="compile-win-core-native" depends="compile-core-classes"
-    if="windows">
-
-    <mkdir dir="${build.native}/lib"/>
-    <mkdir dir="${build.native.win}/src/org/apache/hadoop/io/compress/zlib"/>
-    <mkdir dir="${build.native.win}/src/org/apache/hadoop/io/nativeio"/>
-
-    <javah
-      classpath="${build.classes}"
-      destdir="${build.native.win}/src/org/apache/hadoop/io/compress/zlib"
-      force="yes"
-      verbose="yes"
-      >
-      <class name="org.apache.hadoop.io.compress.zlib.ZlibCompressor" />
-      <class name="org.apache.hadoop.io.compress.zlib.ZlibDecompressor" />
-    </javah>
-
-    <javah
-      classpath="${build.classes}"
-      destdir="${build.native.win}/src/org/apache/hadoop/io/nativeio"
-      force="yes"
-      verbose="yes"
-      >
-      <class name="org.apache.hadoop.io.nativeio.NativeIO" />
-      <class name="org.apache.hadoop.io.nativeio.NativeIO$POSIX" />
-      <class name="org.apache.hadoop.io.nativeio.NativeIO$Windows" />
-    </javah>
-
-    <exec dir="${build.native.win}" executable="${msbuild.cmd}" failonerror="true">
-      <arg line="${native.src.dir}/native.sln /p:Configuration=Release;OutDir=${build.native.win}/"/>
-    </exec>
-
-    <copy file="${build.native.win}/hadoop.dll" todir="${build.native}/lib"/>
+    </antcall> 
   </target>
 
   <target name="create-native-configure">
@@ -695,7 +651,7 @@
     </exec>
   </target>
 
-  <target name="compile-core-native" if="compile.native" unless="windows">
+  <target name="compile-core-native" if="compile.native">
     <antcall target="compile-core-classes"/>
     <antcall target="create-native-configure"/>
     <mkdir dir="${build.native}/lib"/>
@@ -731,8 +687,6 @@
           verbose="yes"
           >
           <class name="org.apache.hadoop.io.nativeio.NativeIO" />
-          <class name="org.apache.hadoop.io.nativeio.NativeIO$POSIX" />
-          <class name="org.apache.hadoop.io.nativeio.NativeIO$Windows" />
         </javah>
   	<javah
   	  classpath="${build.classes}"
@@ -763,7 +717,7 @@
     <exec dir="${build.native}" executable="${make.cmd}" failonerror="true">
       <env key="OS_NAME" value="${os.name}"/>
       <env key="OS_ARCH" value="${os.arch}"/>
-      <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
+  	  <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
   	  <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
     </exec>
 
@@ -775,8 +729,7 @@
 
   <target name="compile-core"
           depends="clover,compile-core-classes,compile-mapred-classes,
-          compile-hdfs-classes,compile-core-native,compile-c++,compile-ms-winutils,
-          compile-win-core-native"
+  	compile-hdfs-classes,compile-core-native,compile-c++,compile-ms-winutils" 
   	description="Compile core only">
   </target>
 
@@ -1155,7 +1108,7 @@
                      value="@{test.krb5.conf.filename}"/>
         <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml" />
         <sysproperty key="java.library.path"
-          value="${build.native}/lib${path.separator}${lib.dir}/native/${build.platform}${path.separator}${lib.file.path}${path.separator}${snappy.lib}" />
+                     value="${build.native}/lib:${lib.dir}/native/${build.platform}:${lib.file.path}:${snappy.lib}" />
         <sysproperty key="install.c++.examples"
                      value="${install.c++.examples}" />
         <sysproperty key="testjar"

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileUtil.java?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileUtil.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/fs/FileUtil.java Thu Oct 18 00:43:56 2012
@@ -716,7 +716,7 @@ public class FileUtil {
                                         FsPermission permission
                                        )  throws IOException {
     if (NativeIO.isAvailable()) {
-      NativeIO.POSIX.chmod(f.getCanonicalPath(), permission.toShort());
+      NativeIO.chmod(f.getCanonicalPath(), permission.toShort());
     } else {
       execCommand(f, Shell.getSetPermissionCommand(
                   String.format("%04o", permission.toShort()), false));

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/SecureIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/SecureIOUtils.java?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/SecureIOUtils.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/SecureIOUtils.java Thu Oct 18 00:43:56 2012
@@ -22,7 +22,6 @@ import java.io.FileDescriptor;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import java.util.Arrays;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
@@ -156,28 +155,28 @@ public class SecureIOUtils {
     if (skipSecurity) {
       return insecureCreateForWrite(f, permissions);
     } else {
-      return NativeIO.getCreateForWriteFileOutputStream(f, permissions);
+      // Use the native wrapper around open(2)
+      try {
+        FileDescriptor fd = NativeIO.open(f.getAbsolutePath(),
+          NativeIO.O_WRONLY | NativeIO.O_CREAT | NativeIO.O_EXCL,
+          permissions);
+        return new FileOutputStream(fd);
+      } catch (NativeIOException nioe) {
+        if (nioe.getErrno() == Errno.EEXIST) {
+          throw new AlreadyExistsException(nioe);
+        }
+        throw nioe;
+      }
     }
   }
 
   private static void checkStat(File f, String owner, String expectedOwner) throws IOException {
-    boolean success = true;
     if (expectedOwner != null &&
         !expectedOwner.equals(owner)) {
-      if (Shell.WINDOWS) {
-        UserGroupInformation ugi =
-            UserGroupInformation.createRemoteUser(expectedOwner);
-        final String adminsGroupString = "Administrators";
-        success = owner.equals(adminsGroupString)
-        && Arrays.asList(ugi.getGroupNames()).contains(adminsGroupString);
-      } else {
-        success = false;
-      }
-    }
-    if (!success)
       throw new IOException(
-          "Owner '" + owner + "' for path " + f + " did not match " +
-          "expected owner '" + expectedOwner + "'");
+        "Owner '" + owner + "' for path " + f + " did not match " +
+        "expected owner '" + expectedOwner + "'");
+    }
   }
 
   private static void checkStatFileSystem(File f, String expectedOwner)

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/nativeio/NativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/nativeio/NativeIO.java?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/nativeio/NativeIO.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/nativeio/NativeIO.java Thu Oct 18 00:43:56 2012
@@ -17,19 +17,13 @@
  */
 package org.apache.hadoop.io.nativeio;
 
-import java.io.File;
 import java.io.FileDescriptor;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
 import java.io.IOException;
-import java.io.RandomAccessFile;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.SecureIOUtils.AlreadyExistsException;
 import org.apache.hadoop.util.NativeCodeLoader;
-import org.apache.hadoop.util.Shell;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -39,158 +33,20 @@ import org.apache.commons.logging.LogFac
  * more portable mechanism.
  */
 public class NativeIO {
-  public static class POSIX {
-    // Flags for open() call from bits/fcntl.h
-    public static final int O_RDONLY = 00;
-    public static final int O_WRONLY = 01;
-    public static final int O_RDWR = 02;
-    public static final int O_CREAT = 0100;
-    public static final int O_EXCL = 0200;
-    public static final int O_NOCTTY = 0400;
-    public static final int O_TRUNC = 01000;
-    public static final int O_APPEND = 02000;
-    public static final int O_NONBLOCK = 04000;
-    public static final int O_SYNC = 010000;
-    public static final int O_ASYNC = 020000;
-    public static final int O_FSYNC = O_SYNC;
-    public static final int O_NDELAY = O_NONBLOCK;
-    
-    /** Wrapper around open(2) */
-    public static native FileDescriptor open(String path, int flags, int mode) throws IOException;
-    
-    /** Wrapper around fstat(2) */
-    //TODO: fstat is an old implementation. Doesn't use the cache. This should be 
-    //changed to use the cache.
-    public static native Stat fstat(FileDescriptor fd) throws IOException;
-
-    /** Native chmod implementation. On Linux, it is a wrapper around chmod(2) */
-    private static native void chmodImpl(String path, int mode) throws IOException;
-
-    public static void chmod(String path, int mode) throws IOException {
-      if (!Shell.WINDOWS) {
-        chmodImpl(path, mode);
-      } else {
-        try {
-          chmodImpl(path, mode);
-        } catch (NativeIOException nioe) {
-          if (nioe.getErrorCode() == 3) {
-            throw new NativeIOException("No such file or directory",
-                Errno.ENOENT);
-          } else {
-            LOG.warn(String.format("NativeIO.chmod error (%d): %s",
-                nioe.getErrorCode(), nioe.getMessage()));
-            throw new NativeIOException("Unknown error", Errno.UNKNOWN);
-          }
-        }
-      }
-    }
-
-    /** Linux only methods used for getOwner() implementation */
-    private static native long getUIDforFDOwnerforOwner(FileDescriptor fd) throws IOException;
-    private static native String getUserName(long uid) throws IOException;
-    
-    /**
-     * Result type of the fstat call
-     */
-    public static class Stat {
-      private String owner;
-      private int mode;
-
-      // Mode constants
-      public static final int S_IFMT = 0170000;      /* type of file */
-      public static final int   S_IFIFO  = 0010000;  /* named pipe (fifo) */
-      public static final int   S_IFCHR  = 0020000;  /* character special */
-      public static final int   S_IFDIR  = 0040000;  /* directory */
-      public static final int   S_IFBLK  = 0060000;  /* block special */
-      public static final int   S_IFREG  = 0100000;  /* regular */
-      public static final int   S_IFLNK  = 0120000;  /* symbolic link */
-      public static final int   S_IFSOCK = 0140000;  /* socket */
-      public static final int   S_IFWHT  = 0160000;  /* whiteout */
-      public static final int S_ISUID = 0004000;  /* set user id on execution */
-      public static final int S_ISGID = 0002000;  /* set group id on execution */
-      public static final int S_ISVTX = 0001000;  /* save swapped text even after use */
-      public static final int S_IRUSR = 0000400;  /* read permission, owner */
-      public static final int S_IWUSR = 0000200;  /* write permission, owner */
-      public static final int S_IXUSR = 0000100;  /* execute/search permission, owner */
-
-      public Stat(String owner, int mode) {
-        this.owner = owner;
-        this.mode = mode;
-      }
-
-      public String toString() {
-        return "Stat(owner='" + owner + "'" +
-          ", mode=" + mode + ")";
-      }
-
-      public String getOwner() {
-        return owner;
-      }
-      public int getMode() {
-        return mode;
-      }
-    }
-
-    static {
-      if (NativeCodeLoader.isNativeCodeLoaded()) {
-        try {
-          initNative();
-          nativeLoaded = true;
-        } catch (Throwable t) {
-          // This can happen if the user has an older version of libhadoop.so
-          // installed - in this case we can continue without native IO
-          // after warning
-          LOG.error("Unable to initialize NativeIO libraries", t);
-        }
-      }
-    }
-  }
-  
-  public static class Windows {
-    // Flags for CreateFile() call on Windows
-    public static final long GENERIC_READ = 0x80000000L;
-    public static final long GENERIC_WRITE = 0x40000000L;
-
-    public static final long FILE_SHARE_READ = 0x00000001L;
-    public static final long FILE_SHARE_WRITE = 0x00000002L;
-    public static final long FILE_SHARE_DELETE = 0x00000004L;
-
-    public static final long CREATE_NEW = 1;
-    public static final long CREATE_ALWAYS = 2;
-    public static final long OPEN_EXISTING = 3;
-    public static final long OPEN_ALWAYS = 4;
-    public static final long TRUNCATE_EXISTING = 5;
-    
-    public static final long FILE_BEGIN = 0;
-    public static final long FILE_CURRENT = 1;
-    public static final long FILE_END = 2;
-
-    /** Wrapper around CreateFile() on Windows */
-    public static native FileDescriptor createFile(String path,
-        long desiredAccess, long shareMode, long creationDisposition)
-        throws IOException;
-
-    /** Wrapper around SetFilePointer() on Windows */
-    public static native long setFilePointer(FileDescriptor fd,
-        long distanceToMove, long moveMethod) throws IOException;
-    
-    /** Windows only methods used for getOwner() implementation */
-    private static native String getOwner(FileDescriptor fd) throws IOException;
-
-    static {
-      if (NativeCodeLoader.isNativeCodeLoaded()) {
-        try {
-          initNative();
-          nativeLoaded = true;
-        } catch (Throwable t) {
-          // This can happen if the user has an older version of libhadoop.so
-          // installed - in this case we can continue without native IO
-          // after warning
-          LOG.error("Unable to initialize NativeIO libraries", t);
-        }
-      }
-    }
-  }
+  // Flags for open() call from bits/fcntl.h
+  public static final int O_RDONLY   =    00;
+  public static final int O_WRONLY   =    01;
+  public static final int O_RDWR     =    02;
+  public static final int O_CREAT    =  0100;
+  public static final int O_EXCL     =  0200;
+  public static final int O_NOCTTY   =  0400;
+  public static final int O_TRUNC    = 01000;
+  public static final int O_APPEND   = 02000;
+  public static final int O_NONBLOCK = 04000;
+  public static final int O_SYNC   =  010000;
+  public static final int O_ASYNC  =  020000;
+  public static final int O_FSYNC = O_SYNC;
+  public static final int O_NDELAY = O_NONBLOCK;
 
   private static final Log LOG = LogFactory.getLog(NativeIO.class);
 
@@ -217,9 +73,19 @@ public class NativeIO {
     return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
   }
 
+  /** Wrapper around open(2) */
+  public static native FileDescriptor open(String path, int flags, int mode) throws IOException;
+  /** Wrapper around fstat(2) */
+  //TODO: fstat is an old implementation. Doesn't use the cache. This should be 
+  //changed to use the cache.
+  public static native Stat fstat(FileDescriptor fd) throws IOException;
+
+  private static native long getUIDforFDOwnerforOwner(FileDescriptor fd) throws IOException;
+  private static native String getUserName(long uid) throws IOException;
   /** Initialize the JNI method ID and class ID cache */
   private static native void initNative();
-
+  /** Wrapper around chmod(2) */
+  public static native void chmod(String path, int mode) throws IOException;
   private static class CachedUid {
     final long timestamp;
     final String username;
@@ -235,142 +101,71 @@ public class NativeIO {
   
   public static String getOwner(FileDescriptor fd) throws IOException {
     ensureInitialized();
-    if (Shell.WINDOWS) {
-      String owner = Windows.getOwner(fd);
-      int i = owner.indexOf('\\');
-      if (i != -1)
-        owner = owner.substring(i + 1);
-      return owner;
-    } else {
-      long uid = POSIX.getUIDforFDOwnerforOwner(fd);
-      CachedUid cUid = uidCache.get(uid);
-      long now = System.currentTimeMillis();
-      if (cUid != null && (cUid.timestamp + cacheTimeout) > now) {
-        return cUid.username;
-      }
-      String user = POSIX.getUserName(uid);
-      LOG.info("Got UserName " + user + " for UID " + uid
-          + " from the native implementation");
-      cUid = new CachedUid(user, now);
-      uidCache.put(uid, cUid);
-      return user;
-    }
+    long uid = getUIDforFDOwnerforOwner(fd);
+    CachedUid cUid = uidCache.get(uid);
+    long now = System.currentTimeMillis();
+    if (cUid != null && (cUid.timestamp + cacheTimeout) > now) {
+      return cUid.username;
+    }
+    String user = getUserName(uid);
+    LOG.info("Got UserName " + user + " for UID " + uid + 
+        " from the native implementation");
+    cUid = new CachedUid(user, now);
+    uidCache.put(uid, cUid);
+    return user;
   }
-
-  /**
-   * Create a FileInputStream that shares delete permission on the
-   * file opened, i.e. other process can delete the file the
-   * FileInputStream is reading. Only Windows implementation uses
-   * the native interface.
-   */
-  public static FileInputStream getShareDeleteFileInputStream(File f)
-      throws IOException {
-    if (!Shell.WINDOWS) {
-      // On Linux the default FileInputStream shares delete permission
-      // on the file opened.
-      //
-      return new FileInputStream(f);
-    } else {
-      // Use Windows native interface to create a FileInputStream that
-      // shares delete permission on the file opened.
-      //
-      FileDescriptor fd = Windows.createFile(
-          f.getAbsolutePath(),
-          Windows.GENERIC_READ,
-          Windows.FILE_SHARE_READ |
-          Windows.FILE_SHARE_WRITE |
-          Windows.FILE_SHARE_DELETE,
-          Windows.OPEN_EXISTING);
-      return new FileInputStream(fd);
+    
+  private synchronized static void ensureInitialized() {
+    if (!initialized) {
+      cacheTimeout = 
+        new Configuration().getLong("hadoop.security.uid.cache.secs", 
+                                     4*60*60) * 1000;
+      LOG.info("Initialized cache for UID to User mapping with a cache" +
+      		" timeout of " + cacheTimeout/1000 + " seconds.");
+      initialized = true;
     }
   }
 
-  /**
-   * Create a FileInputStream that shares delete permission on the
-   * file opened at a given offset, i.e. other process can delete
-   * the file the FileInputStream is reading. Only Windows implementation
-   * uses the native interface.
-   */
-  public static FileInputStream getShareDeleteFileInputStream(File f, long seekOffset)
-      throws IOException {
-    if (!Shell.WINDOWS) {
-      RandomAccessFile rf = new RandomAccessFile(f, "r");
-      if (seekOffset > 0) {
-        rf.seek(seekOffset);
-      }
-      return new FileInputStream(rf.getFD());
-    } else {
-      // Use Windows native interface to create a FileInputStream that
-      // shares delete permission on the file opened, and set it to the
-      // given offset.
-      //
-      FileDescriptor fd = NativeIO.Windows.createFile(
-          f.getAbsolutePath(),
-          NativeIO.Windows.GENERIC_READ,
-          NativeIO.Windows.FILE_SHARE_READ |
-          NativeIO.Windows.FILE_SHARE_WRITE |
-          NativeIO.Windows.FILE_SHARE_DELETE,
-          NativeIO.Windows.OPEN_EXISTING);
-      if (seekOffset > 0)
-        NativeIO.Windows.setFilePointer(fd, seekOffset, NativeIO.Windows.FILE_BEGIN);
-      return new FileInputStream(fd);
-    }
-  }
 
   /**
-   * Create the specified File for write access, ensuring that it does not exist.
-   * @param f the file that we want to create
-   * @param permissions we want to have on the file (if security is enabled)
-   *
-   * @throws AlreadyExistsException if the file already exists
-   * @throws IOException if any other error occurred
+   * Result type of the fstat call
    */
-  public static FileOutputStream getCreateForWriteFileOutputStream(File f, int permissions)
-  throws IOException {
-    if (!Shell.WINDOWS) {
-      // Use the native wrapper around open(2)
-      try {
-        FileDescriptor fd = NativeIO.POSIX.open(f.getAbsolutePath(),
-            NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT
-                | NativeIO.POSIX.O_EXCL, permissions);
-        return new FileOutputStream(fd);
-      } catch (NativeIOException nioe) {
-        if (nioe.getErrno() == Errno.EEXIST) {
-          throw new AlreadyExistsException(nioe);
-        }
-        throw nioe;
-      }
-    } else {
-      // Use the Windows native APIs to create equivalent FileOutputStream
-      try {
-        FileDescriptor fd = NativeIO.Windows.createFile(f.getCanonicalPath(),
-            NativeIO.Windows.GENERIC_WRITE,
-            NativeIO.Windows.FILE_SHARE_DELETE
-                | NativeIO.Windows.FILE_SHARE_READ
-                | NativeIO.Windows.FILE_SHARE_WRITE,
-            NativeIO.Windows.CREATE_NEW);
-        NativeIO.POSIX.chmod(f.getCanonicalPath(), permissions);
-        return new FileOutputStream(fd);
-      } catch (NativeIOException nioe) {
-        if (nioe.getErrorCode() == 80) {
-          // ERROR_FILE_EXISTS
-          // 80 (0x50)
-          // The file exists
-          throw new AlreadyExistsException(nioe);
-        }
-        throw nioe;
-      }
+  public static class Stat {
+    private String owner;
+    private int mode;
+
+    // Mode constants
+    public static final int S_IFMT = 0170000;      /* type of file */
+    public static final int   S_IFIFO  = 0010000;  /* named pipe (fifo) */
+    public static final int   S_IFCHR  = 0020000;  /* character special */
+    public static final int   S_IFDIR  = 0040000;  /* directory */
+    public static final int   S_IFBLK  = 0060000;  /* block special */
+    public static final int   S_IFREG  = 0100000;  /* regular */
+    public static final int   S_IFLNK  = 0120000;  /* symbolic link */
+    public static final int   S_IFSOCK = 0140000;  /* socket */
+    public static final int   S_IFWHT  = 0160000;  /* whiteout */
+    public static final int S_ISUID = 0004000;  /* set user id on execution */
+    public static final int S_ISGID = 0002000;  /* set group id on execution */
+    public static final int S_ISVTX = 0001000;  /* save swapped text even after use */
+    public static final int S_IRUSR = 0000400;  /* read permission, owner */
+    public static final int S_IWUSR = 0000200;  /* write permission, owner */
+    public static final int S_IXUSR = 0000100;  /* execute/search permission, owner */
+
+    Stat(String owner, int mode) {
+      this.owner = owner;
+      this.mode = mode;
+    }
+
+    public String toString() {
+      return "Stat(owner='" + owner + "'" +
+        ", mode=" + mode + ")";
     }
-  }
 
-  private synchronized static void ensureInitialized() {
-    if (!initialized) {
-      cacheTimeout = 
-        new Configuration().getLong("hadoop.security.uid.cache.secs", 
-                                     4*60*60) * 1000;
-      LOG.info("Initialized cache for UID to User mapping with a cache" +
-        " timeout of " + cacheTimeout/1000 + " seconds.");
-      initialized = true;
+    public String getOwner() {
+      return owner;
+    }
+    public int getMode() {
+      return mode;
     }
   }
 }

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/nativeio/NativeIOException.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/nativeio/NativeIOException.java?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/nativeio/NativeIOException.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/io/nativeio/NativeIOException.java Thu Oct 18 00:43:56 2012
@@ -19,38 +19,18 @@ package org.apache.hadoop.io.nativeio;
 
 import java.io.IOException;
 
-import org.apache.hadoop.util.Shell;
-
 /**
  * An exception generated by a call to the native IO code.
  *
- * These exceptions simply wrap <i>errno</i> result codes on Linux,
- * or the System Error Code on Windows.
+ * These exceptions simply wrap <i>errno</i> result codes.
  */
 public class NativeIOException extends IOException {
   private static final long serialVersionUID = -6615764817732323232L;
   private Errno errno;
-  
-  // Java has no unsigned primitive error code. Use a signed 32-bit
-  // integer to hold the unsigned 32-bit integer.
-  private int errorCode;
 
   public NativeIOException(String msg, Errno errno) {
     super(msg);
     this.errno = errno;
-    // Windows error code is always set to ERROR_SUCCESS on Linux,
-    // i.e. no failure on Windows
-    this.errorCode = 0;
-  }
-  
-  public NativeIOException(String msg, int errorCode) {
-    super(msg);
-    this.errorCode = errorCode;
-    this.errno = Errno.UNKNOWN;
-  }
-  
-  public long getErrorCode() {
-    return errorCode;
   }
 
   public Errno getErrno() {
@@ -58,10 +38,7 @@ public class NativeIOException extends I
   }
 
   public String toString() {
-    if (Shell.WINDOWS)
-      return errorCode + ": " + super.getMessage();
-    else
-      return errno.toString() + ": " + super.getMessage();
+    return errno.toString() + ": " + super.getMessage();
   }
 }
 

Modified: hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/PlatformName.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/PlatformName.java?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/PlatformName.java (original)
+++ hadoop/common/branches/branch-1-win/src/core/org/apache/hadoop/util/PlatformName.java Thu Oct 18 00:43:56 2012
@@ -27,11 +27,10 @@ public class PlatformName {
    * The complete platform 'name' to identify the platform as 
    * per the java-vm.
    */
-  private static final String platformName =
-      (Shell.WINDOWS ? System.getenv("os") : System.getProperty("os.name"))
-      + "-" + System.getProperty("os.arch")
-      + "-" + System.getProperty("sun.arch.data.model");
-
+  private static final String platformName = System.getProperty("os.name") + "-" + 
+    System.getProperty("os.arch") + "-" +
+    System.getProperty("sun.arch.data.model");
+  
   /**
    * Get the complete platform as per the java-vm.
    * @return returns the complete platform as per the java-vm.

Modified: hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/server/datanode/FSDataset.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/server/datanode/FSDataset.java?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/server/datanode/FSDataset.java (original)
+++ hadoop/common/branches/branch-1-win/src/hdfs/org/apache/hadoop/hdfs/server/datanode/FSDataset.java Thu Oct 18 00:43:56 2012
@@ -50,12 +50,10 @@ import org.apache.hadoop.metrics2.util.M
 import org.apache.hadoop.hdfs.server.datanode.metrics.FSDatasetMBean;
 import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryInfo;
 import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol;
-import org.apache.hadoop.io.nativeio.NativeIO;
 import org.apache.hadoop.util.DataChecksum;
 import org.apache.hadoop.util.DiskChecker;
 import org.apache.hadoop.util.DiskChecker.DiskErrorException;
 import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
-import org.apache.hadoop.util.Shell;
 import org.mortbay.log.Log;
 
 /**************************************************
@@ -64,15 +62,8 @@ import org.mortbay.log.Log;
  *
  ***************************************************/
 public class FSDataset implements FSConstants, FSDatasetInterface {
-  private final static boolean isNativeIOAvailable;
-  static {
-    isNativeIOAvailable = NativeIO.isAvailable();
-    if (Shell.WINDOWS && !isNativeIOAvailable) {
-      Log.warn("Data node cannot fully support concurrent reading"
-          + " and writing without native code extensions on Windows.");
-    }
-  }
   
+
   /** Find the metadata file for the specified block file.
    * Return the generation stamp from the name of the metafile.
    */
@@ -942,14 +933,8 @@ public class FSDataset implements FSCons
   public MetaDataInputStream getMetaDataInputStream(Block b)
       throws IOException {
     File checksumFile = getMetaFile( b );
-    if (isNativeIOAvailable) {
-      return new MetaDataInputStream(
-          NativeIO.getShareDeleteFileInputStream(checksumFile),
-          checksumFile.length());
-    } else {
-      return new MetaDataInputStream(new FileInputStream(checksumFile),
-          checksumFile.length());
-    }
+    return new MetaDataInputStream(new FileInputStream(checksumFile),
+                                                    checksumFile.length());
   }
 
   FSVolumeSet volumes;
@@ -1099,24 +1084,17 @@ public class FSDataset implements FSCons
   }
   
   public synchronized InputStream getBlockInputStream(Block b) throws IOException {
-    if (isNativeIOAvailable) {
-      return NativeIO.getShareDeleteFileInputStream(getBlockFile(b));
-    } else {
-      return new FileInputStream(getBlockFile(b));
-    }
+    return new FileInputStream(getBlockFile(b));
   }
 
   public synchronized InputStream getBlockInputStream(Block b, long seekOffset) throws IOException {
+
     File blockFile = getBlockFile(b);
-    if (isNativeIOAvailable) {
-      return NativeIO.getShareDeleteFileInputStream(blockFile, seekOffset);
-    } else {
-      RandomAccessFile blockInFile = new RandomAccessFile(blockFile, "r");
-      if (seekOffset > 0) {
-        blockInFile.seek(seekOffset);
-      }
-      return new FileInputStream(blockInFile.getFD());
+    RandomAccessFile blockInFile = new RandomAccessFile(blockFile, "r");
+    if (seekOffset > 0) {
+      blockInFile.seek(seekOffset);
     }
+    return new FileInputStream(blockInFile.getFD());
   }
 
   /**

Modified: hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c (original)
+++ hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c Thu Oct 18 00:43:56 2012
@@ -16,7 +16,6 @@
  * limitations under the License.
  */
 
-#ifdef UNIX
 #if defined HAVE_CONFIG_H
   #include <config.h>
 #endif
@@ -44,7 +43,6 @@
 #else
   #error 'dlfcn.h not found'
 #endif  
-#endif
 
 #include "org_apache_hadoop_io_compress_zlib.h"
 #include "org_apache_hadoop_io_compress_zlib_ZlibCompressor.h"
@@ -59,65 +57,30 @@ static jfieldID ZlibCompressor_directBuf
 static jfieldID ZlibCompressor_finish;
 static jfieldID ZlibCompressor_finished;
 
-#ifdef UNIX
 static int (*dlsym_deflateInit2_)(z_streamp, int, int, int, int, int, const char *, int);
 static int (*dlsym_deflate)(z_streamp, int);
 static int (*dlsym_deflateSetDictionary)(z_streamp, const Bytef *, uInt);
 static int (*dlsym_deflateReset)(z_streamp);
 static int (*dlsym_deflateEnd)(z_streamp);
-#endif
-
-#ifdef WINDOWS
-typedef int (__cdecl *__dlsym_deflateInit2_) (z_streamp, int, int, int, int, int, const char *, int);
-typedef int (__cdecl *__dlsym_deflate) (z_streamp, int);
-typedef int (__cdecl *__dlsym_deflateSetDictionary) (z_streamp, const Bytef *, uInt);
-typedef int (__cdecl *__dlsym_deflateReset) (z_streamp);
-typedef int (__cdecl *__dlsym_deflateEnd) (z_streamp);
-static __dlsym_deflateInit2_ dlsym_deflateInit2_;
-static __dlsym_deflate dlsym_deflate;
-static __dlsym_deflateSetDictionary dlsym_deflateSetDictionary;
-static __dlsym_deflateReset dlsym_deflateReset;
-static __dlsym_deflateEnd dlsym_deflateEnd;
-#endif
 
 JNIEXPORT void JNICALL
 Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_initIDs(
 	JNIEnv *env, jclass class
 	) {
-#ifdef UNIX
 	// Load libz.so
 	void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
-  if (!libz) {
+	if (!libz) {
 		THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
 	  	return;
 	}
-#endif
-
-#ifdef WINDOWS
-    HMODULE libz = LoadLibrary(HADOOP_ZLIB_LIBRARY);
-  if (!libz) {
-		THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load zlib1.dll");
-	  	return;
-	}
-#endif
 
 	// Locate the requisite symbols from libz.so
-#ifdef UNIX
 	dlerror();                                 // Clear any existing error
-  LOAD_DYNAMIC_SYMBOL(dlsym_deflateInit2_, env, libz, "deflateInit2_");
-  LOAD_DYNAMIC_SYMBOL(dlsym_deflate, env, libz, "deflate");
-  LOAD_DYNAMIC_SYMBOL(dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
-  LOAD_DYNAMIC_SYMBOL(dlsym_deflateReset, env, libz, "deflateReset");
-  LOAD_DYNAMIC_SYMBOL(dlsym_deflateEnd, env, libz, "deflateEnd");
-#endif
-
-#ifdef WINDOWS
-  LOAD_DYNAMIC_SYMBOL(__dlsym_deflateInit2_, dlsym_deflateInit2_, env, libz, "deflateInit2_");
-	LOAD_DYNAMIC_SYMBOL(__dlsym_deflate, dlsym_deflate, env, libz, "deflate");
-	LOAD_DYNAMIC_SYMBOL(__dlsym_deflateSetDictionary, dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
-	LOAD_DYNAMIC_SYMBOL(__dlsym_deflateReset, dlsym_deflateReset, env, libz, "deflateReset");
-	LOAD_DYNAMIC_SYMBOL(__dlsym_deflateEnd, dlsym_deflateEnd, env, libz, "deflateEnd");
-#endif
+	LOAD_DYNAMIC_SYMBOL(dlsym_deflateInit2_, env, libz, "deflateInit2_");
+	LOAD_DYNAMIC_SYMBOL(dlsym_deflate, env, libz, "deflate");
+	LOAD_DYNAMIC_SYMBOL(dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
+	LOAD_DYNAMIC_SYMBOL(dlsym_deflateReset, env, libz, "deflateReset");
+	LOAD_DYNAMIC_SYMBOL(dlsym_deflateEnd, env, libz, "deflateEnd");
 
 	// Initialize the requisite fieldIds
     ZlibCompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz", 
@@ -143,8 +106,6 @@ JNIEXPORT jlong JNICALL
 Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_init(
 	JNIEnv *env, jclass class, jint level, jint strategy, jint windowBits
 	) {
-    int rv = 0;
-	  static const int memLevel = 8; 							// See zconf.h
 	// Create a z_stream
     z_stream *stream = malloc(sizeof(z_stream));
     if (!stream) {
@@ -154,7 +115,8 @@ Java_org_apache_hadoop_io_compress_zlib_
     memset((void*)stream, 0, sizeof(z_stream));
 
 	// Initialize stream
-    rv = (*dlsym_deflateInit2_)(stream, level, Z_DEFLATED, windowBits,
+	static const int memLevel = 8; 							// See zconf.h
+    int rv = (*dlsym_deflateInit2_)(stream, level, Z_DEFLATED, windowBits,
     			memLevel, strategy, ZLIB_VERSION, sizeof(z_stream));
     			
     if (rv != Z_OK) {
@@ -189,12 +151,11 @@ Java_org_apache_hadoop_io_compress_ZlibC
 	JNIEnv *env, jclass class, jlong stream, 
 	jarray b, jint off, jint len
 	) {
-    int rv = 0;
     Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
     if (!buf) {
         return;
     }
-    rv = dlsym_deflateSetDictionary(ZSTREAM(stream), buf + off, len);
+    int rv = dlsym_deflateSetDictionary(ZSTREAM(stream), buf + off, len);
     (*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
     
     if (rv != Z_OK) {
@@ -218,17 +179,6 @@ JNIEXPORT jint JNICALL
 Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_deflateBytesDirect(
 	JNIEnv *env, jobject this
 	) {
-    jobject clazz = NULL;
-    jobject uncompressed_direct_buf = NULL;
-    jint uncompressed_direct_buf_off = 0;
-    jint uncompressed_direct_buf_len = 0;
-    jobject compressed_direct_buf = NULL;
-    jint compressed_direct_buf_len = 0;
-    jboolean finish;
-    Bytef* uncompressed_bytes = NULL;
-    Bytef* compressed_bytes = NULL;
-    int rv = 0;
-    jint no_compressed_bytes = 0;
 	// Get members of ZlibCompressor
     z_stream *stream = ZSTREAM(
     						(*env)->GetLongField(env, this, 
@@ -240,25 +190,25 @@ Java_org_apache_hadoop_io_compress_zlib_
     } 
 
     // Get members of ZlibCompressor
-    clazz = (*env)->GetStaticObjectField(env, this, 
+    jobject clazz = (*env)->GetStaticObjectField(env, this, 
                                                  ZlibCompressor_clazz);
-	uncompressed_direct_buf = (*env)->GetObjectField(env, this, 
+	jobject uncompressed_direct_buf = (*env)->GetObjectField(env, this, 
 									ZlibCompressor_uncompressedDirectBuf);
-	uncompressed_direct_buf_off = (*env)->GetIntField(env, this, 
+	jint uncompressed_direct_buf_off = (*env)->GetIntField(env, this, 
 									ZlibCompressor_uncompressedDirectBufOff);
-	uncompressed_direct_buf_len = (*env)->GetIntField(env, this, 
+	jint uncompressed_direct_buf_len = (*env)->GetIntField(env, this, 
 									ZlibCompressor_uncompressedDirectBufLen);
 
-	compressed_direct_buf = (*env)->GetObjectField(env, this, 
+	jobject compressed_direct_buf = (*env)->GetObjectField(env, this, 
 									ZlibCompressor_compressedDirectBuf);
-	compressed_direct_buf_len = (*env)->GetIntField(env, this, 
+	jint compressed_direct_buf_len = (*env)->GetIntField(env, this, 
 									ZlibCompressor_directBufferSize);
 
-	finish = (*env)->GetBooleanField(env, this, ZlibCompressor_finish);
+	jboolean finish = (*env)->GetBooleanField(env, this, ZlibCompressor_finish);
 
     // Get the input direct buffer
     LOCK_CLASS(env, clazz, "ZlibCompressor");
-	uncompressed_bytes = (*env)->GetDirectBufferAddress(env, 
+	Bytef* uncompressed_bytes = (*env)->GetDirectBufferAddress(env, 
 											uncompressed_direct_buf);
     UNLOCK_CLASS(env, clazz, "ZlibCompressor");
     
@@ -268,7 +218,7 @@ Java_org_apache_hadoop_io_compress_zlib_
 	
     // Get the output direct buffer
     LOCK_CLASS(env, clazz, "ZlibCompressor");
-	compressed_bytes = (*env)->GetDirectBufferAddress(env, 
+	Bytef* compressed_bytes = (*env)->GetDirectBufferAddress(env, 
 										compressed_direct_buf);
     UNLOCK_CLASS(env, clazz, "ZlibCompressor");
 
@@ -283,8 +233,9 @@ Java_org_apache_hadoop_io_compress_zlib_
 	stream->avail_out = compressed_direct_buf_len;
 	
 	// Compress
-	rv = dlsym_deflate(stream, finish ? Z_FINISH : Z_NO_FLUSH);
+	int rv = dlsym_deflate(stream, finish ? Z_FINISH : Z_NO_FLUSH);
 
+	jint no_compressed_bytes = 0;
 	switch (rv) {
     	// Contingency? - Report error by throwing appropriate exceptions
   		case Z_STREAM_END:

Modified: hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c (original)
+++ hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c Thu Oct 18 00:43:56 2012
@@ -16,7 +16,6 @@
  * limitations under the License.
  */
 
-#ifdef UNIX
 #if defined HAVE_CONFIG_H
   #include <config.h>
 #endif
@@ -44,7 +43,6 @@
 #else
   #error 'dlfcn.h not found'
 #endif  
-#endif
 
 #include "org_apache_hadoop_io_compress_zlib.h"
 #include "org_apache_hadoop_io_compress_zlib_ZlibDecompressor.h"
@@ -59,69 +57,32 @@ static jfieldID ZlibDecompressor_directB
 static jfieldID ZlibDecompressor_needDict;
 static jfieldID ZlibDecompressor_finished;
 
-#ifdef UNIX
 static int (*dlsym_inflateInit2_)(z_streamp, int, const char *, int);
 static int (*dlsym_inflate)(z_streamp, int);
 static int (*dlsym_inflateSetDictionary)(z_streamp, const Bytef *, uInt);
 static int (*dlsym_inflateReset)(z_streamp);
 static int (*dlsym_inflateEnd)(z_streamp);
-#endif
-
-#ifdef WINDOWS
-typedef int (__cdecl *__dlsym_inflateInit2_)(z_streamp, int, const char *, int);
-typedef int (__cdecl *__dlsym_inflate)(z_streamp, int);
-typedef int (__cdecl *__dlsym_inflateSetDictionary)(z_streamp, const Bytef *, uInt);
-typedef int (__cdecl *__dlsym_inflateReset)(z_streamp);
-typedef int (__cdecl *__dlsym_inflateEnd)(z_streamp);
-static __dlsym_inflateInit2_ dlsym_inflateInit2_;
-static __dlsym_inflate dlsym_inflate;
-static __dlsym_inflateSetDictionary dlsym_inflateSetDictionary;
-static __dlsym_inflateReset dlsym_inflateReset;
-static __dlsym_inflateEnd dlsym_inflateEnd;
-#endif
 
 JNIEXPORT void JNICALL
 Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_initIDs(
-JNIEnv *env, jclass class
+	JNIEnv *env, jclass class
 	) {
 	// Load libz.so
-#ifdef UNIX
-  void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
+    void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
 	if (!libz) {
 	  THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
 	  return;
 	} 
-#endif
-
-#ifdef WINDOWS
-  HMODULE libz = LoadLibrary(HADOOP_ZLIB_LIBRARY);
-	if (!libz) {
-	  THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load zlib1.dll");
-	  return;
-	} 
-#endif
-
 
 	// Locate the requisite symbols from libz.so
-#ifdef UNIX
 	dlerror();                                 // Clear any existing error
 	LOAD_DYNAMIC_SYMBOL(dlsym_inflateInit2_, env, libz, "inflateInit2_");
 	LOAD_DYNAMIC_SYMBOL(dlsym_inflate, env, libz, "inflate");
 	LOAD_DYNAMIC_SYMBOL(dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
 	LOAD_DYNAMIC_SYMBOL(dlsym_inflateReset, env, libz, "inflateReset");
 	LOAD_DYNAMIC_SYMBOL(dlsym_inflateEnd, env, libz, "inflateEnd");
-#endif
-
-#ifdef WINDOWS
-	LOAD_DYNAMIC_SYMBOL(__dlsym_inflateInit2_, dlsym_inflateInit2_, env, libz, "inflateInit2_");
-	LOAD_DYNAMIC_SYMBOL(__dlsym_inflate, dlsym_inflate, env, libz, "inflate");
-	LOAD_DYNAMIC_SYMBOL(__dlsym_inflateSetDictionary, dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
-	LOAD_DYNAMIC_SYMBOL(__dlsym_inflateReset, dlsym_inflateReset, env, libz, "inflateReset");
-	LOAD_DYNAMIC_SYMBOL(__dlsym_inflateEnd, dlsym_inflateEnd, env, libz, "inflateEnd");
-#endif
-
 
-  // Initialize the requisite fieldIds
+	// Initialize the requisite fieldIds
     ZlibDecompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz", 
                                                       "Ljava/lang/Class;");
     ZlibDecompressor_stream = (*env)->GetFieldID(env, class, "stream", "J");
@@ -145,7 +106,6 @@ JNIEXPORT jlong JNICALL
 Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_init(
 	JNIEnv *env, jclass cls, jint windowBits
 	) {
-    int rv = 0;
     z_stream *stream = malloc(sizeof(z_stream));
     memset((void*)stream, 0, sizeof(z_stream));
 
@@ -154,7 +114,7 @@ Java_org_apache_hadoop_io_compress_zlib_
 		return (jlong)0;
     } 
     
-    rv = dlsym_inflateInit2_(stream, windowBits, ZLIB_VERSION, sizeof(z_stream));
+    int rv = dlsym_inflateInit2_(stream, windowBits, ZLIB_VERSION, sizeof(z_stream));
 
 	if (rv != Z_OK) {
 	    // Contingency - Report error by throwing appropriate exceptions
@@ -183,13 +143,12 @@ Java_org_apache_hadoop_io_compress_zlib_
 	JNIEnv *env, jclass cls, jlong stream,
 	jarray b, jint off, jint len
 	) {
-    int rv = 0;
     Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
     if (!buf) {
 		THROW(env, "java/lang/InternalError", NULL);
         return;
     }
-    rv = dlsym_inflateSetDictionary(ZSTREAM(stream), buf + off, len);
+    int rv = dlsym_inflateSetDictionary(ZSTREAM(stream), buf + off, len);
     (*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
     
     if (rv != Z_OK) {
@@ -215,16 +174,6 @@ JNIEXPORT jint JNICALL
 Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_inflateBytesDirect(
 	JNIEnv *env, jobject this
 	) {
-    jobject clazz = NULL;
-    jarray compressed_direct_buf = NULL;
-    jint compressed_direct_buf_off = 0;
-    jint compressed_direct_buf_len = 0;
-    jarray uncompressed_direct_buf = NULL;
-    jint uncompressed_direct_buf_len = 0;
-    Bytef *compressed_bytes = NULL;
-    Bytef *uncompressed_bytes = NULL;
-    int rv = 0;
-    int no_decompressed_bytes = 0;
 	// Get members of ZlibDecompressor
     z_stream *stream = ZSTREAM(
     						(*env)->GetLongField(env, this, 
@@ -236,23 +185,23 @@ Java_org_apache_hadoop_io_compress_zlib_
     } 
 
     // Get members of ZlibDecompressor
-    clazz = (*env)->GetStaticObjectField(env, this, 
+    jobject clazz = (*env)->GetStaticObjectField(env, this, 
                                                  ZlibDecompressor_clazz);
-	compressed_direct_buf = (jarray)(*env)->GetObjectField(env, this, 
+	jarray compressed_direct_buf = (jarray)(*env)->GetObjectField(env, this, 
 											ZlibDecompressor_compressedDirectBuf);
-	compressed_direct_buf_off = (*env)->GetIntField(env, this, 
+	jint compressed_direct_buf_off = (*env)->GetIntField(env, this, 
 									ZlibDecompressor_compressedDirectBufOff);
-	compressed_direct_buf_len = (*env)->GetIntField(env, this, 
+	jint compressed_direct_buf_len = (*env)->GetIntField(env, this, 
 									ZlibDecompressor_compressedDirectBufLen);
 
-	uncompressed_direct_buf = (jarray)(*env)->GetObjectField(env, this, 
+	jarray uncompressed_direct_buf = (jarray)(*env)->GetObjectField(env, this, 
 											ZlibDecompressor_uncompressedDirectBuf);
-	uncompressed_direct_buf_len = (*env)->GetIntField(env, this, 
+	jint uncompressed_direct_buf_len = (*env)->GetIntField(env, this, 
 										ZlibDecompressor_directBufferSize);
 
     // Get the input direct buffer
     LOCK_CLASS(env, clazz, "ZlibDecompressor");
-	compressed_bytes = (*env)->GetDirectBufferAddress(env, 
+	Bytef *compressed_bytes = (*env)->GetDirectBufferAddress(env, 
 										compressed_direct_buf);
     UNLOCK_CLASS(env, clazz, "ZlibDecompressor");
     
@@ -262,7 +211,7 @@ Java_org_apache_hadoop_io_compress_zlib_
 	
     // Get the output direct buffer
     LOCK_CLASS(env, clazz, "ZlibDecompressor");
-	uncompressed_bytes = (*env)->GetDirectBufferAddress(env, 
+	Bytef *uncompressed_bytes = (*env)->GetDirectBufferAddress(env, 
 											uncompressed_direct_buf);
     UNLOCK_CLASS(env, clazz, "ZlibDecompressor");
 
@@ -277,9 +226,10 @@ Java_org_apache_hadoop_io_compress_zlib_
 	stream->avail_out = uncompressed_direct_buf_len;
 	
 	// Decompress
-	rv = dlsym_inflate(stream, Z_PARTIAL_FLUSH);
+	int rv = dlsym_inflate(stream, Z_PARTIAL_FLUSH);
 
 	// Contingency? - Report error by throwing appropriate exceptions
+	int no_decompressed_bytes = 0;	
 	switch (rv) {
 		case Z_STREAM_END:
 		{

Modified: hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h (original)
+++ hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h Thu Oct 18 00:43:56 2012
@@ -19,10 +19,6 @@
 #if !defined ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
 #define ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
 
-#include "org_apache_hadoop.h"
-
-#ifdef UNIX
-
 #if defined HAVE_CONFIG_H
   #include <config.h>
 #endif
@@ -56,13 +52,8 @@
 #else
   #error 'jni.h not found'
 #endif
-#endif
 
-#ifdef WINDOWS
-#define HADOOP_ZLIB_LIBRARY L"zlib1.dll"
-#include <zlib.h>
-#include <zconf.h>
-#endif
+#include "org_apache_hadoop.h"
 
 /* A helper macro to convert the java 'stream-handle' to a z_stream pointer. */
 #define ZSTREAM(stream) ((z_stream*)((ptrdiff_t)(stream)))

Modified: hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c (original)
+++ hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c Thu Oct 18 00:43:56 2012
@@ -17,32 +17,24 @@
  */
 
 // get the autoconf settings
+#include "config.h"
+
 #include <assert.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <grp.h>
 #include <jni.h>
+#include <pwd.h>
 #include <stdlib.h>
 #include <string.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
 
 #include "org_apache_hadoop.h"
 #include "org_apache_hadoop_io_nativeio_NativeIO.h"
 #include "file_descriptor.h"
-
-#ifdef UNIX
-#include "config.h"
-#include <grp.h>
-#include <pwd.h>
-#include <unistd.h>
-#include <errno.h>
-#include <fcntl.h>
-#include <sys/stat.h>
-#include <sys/types.h>
 #include "errno_enum.h"
-#endif
-
-#ifdef WINDOWS
-#include <Windows.h>
-#include "winutils.h"
-#endif
-
 
 // the NativeIO$Stat inner class and its constructor
 static jclass stat_clazz;
@@ -54,15 +46,12 @@ static jmethodID nioe_ctor;
 
 // Internal functions
 static void throw_ioe(JNIEnv* env, int errnum);
-#ifdef UNIX
 static ssize_t get_pw_buflen();
-#endif
 
-#ifdef UNIX
+
 static void stat_init(JNIEnv *env) {
   // Init Stat
-  jclass clazz = (*env)->FindClass(env,
-    "org/apache/hadoop/io/nativeio/NativeIO$POSIX$Stat");
+  jclass clazz = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/NativeIO$Stat");
   PASS_EXCEPTIONS(env);
   stat_clazz = (*env)->NewGlobalRef(env, clazz);
   stat_ctor = (*env)->GetMethodID(env, stat_clazz, "<init>",
@@ -75,7 +64,6 @@ static void stat_deinit(JNIEnv *env) {
     stat_clazz = NULL;
   }
 }
-#endif
 
 static void nioe_init(JNIEnv *env) {
   // Init NativeIOException
@@ -84,16 +72,8 @@ static void nioe_init(JNIEnv *env) {
   PASS_EXCEPTIONS(env);
 
   nioe_clazz = (*env)->NewGlobalRef(env, nioe_clazz);
-
-#ifdef UNIX
   nioe_ctor = (*env)->GetMethodID(env, nioe_clazz, "<init>",
     "(Ljava/lang/String;Lorg/apache/hadoop/io/nativeio/Errno;)V");
-#endif
-
-#ifdef WINDOWS
-  nioe_ctor = (*env)->GetMethodID(env, nioe_clazz, "<init>",
-    "(Ljava/lang/String;I)V");
-#endif
 }
 
 static void nioe_deinit(JNIEnv *env) {
@@ -114,37 +94,32 @@ static void nioe_deinit(JNIEnv *env) {
 JNIEXPORT void JNICALL
 Java_org_apache_hadoop_io_nativeio_NativeIO_initNative(
 	JNIEnv *env, jclass clazz) {
+
+  stat_init(env);
+  PASS_EXCEPTIONS_GOTO(env, error);
   nioe_init(env);
   PASS_EXCEPTIONS_GOTO(env, error);
   fd_init(env);
   PASS_EXCEPTIONS_GOTO(env, error);
-#ifdef UNIX
-  stat_init(env);
-  PASS_EXCEPTIONS_GOTO(env, error);
   errno_enum_init(env);
   PASS_EXCEPTIONS_GOTO(env, error);
-#endif
   return;
 error:
   // these are all idempodent and safe to call even if the
   // class wasn't initted yet
+  stat_deinit(env);
   nioe_deinit(env);
   fd_deinit(env);
-#ifdef UNIX
-  stat_deinit(env);
   errno_enum_deinit(env);
-#endif
 }
 
 /*
- * Class:     org_apache_hadoop_io_nativeio_NativeIO_POSIX
- * Method:    fstat
- * Signature: (Ljava/io/FileDescriptor;)Lorg/apache/hadoop/io/nativeio/NativeIO$POSIX$Stat;
+ * public static native Stat fstat(FileDescriptor fd);
  */
-JNIEXPORT jobject JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_fstat
-  (JNIEnv *env, jclass clazz, jobject fd_object)
+JNIEXPORT jobject JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_fstat(
+  JNIEnv *env, jclass clazz, jobject fd_object)
 {
-#ifdef UNIX
   jobject ret = NULL;
   char *pw_buf = NULL;
 
@@ -193,24 +168,17 @@ JNIEXPORT jobject JNICALL Java_org_apach
 cleanup:
   if (pw_buf != NULL) free(pw_buf);
   return ret;
-#endif
-
-#ifdef WINDOWS
-  THROW(env, "java/io/IOException",
-    "The function POSIX.fstat() is not supported on Windows");
-  return NULL;
-#endif
 }
 
+
 /*
- * Class:     org_apache_hadoop_io_nativeio_NativeIO_POSIX
- * Method:    open
- * Signature: (Ljava/lang/String;II)Ljava/io/FileDescriptor;
+ * public static native FileDescriptor open(String path, int flags, int mode);
  */
-JNIEXPORT jobject JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_open
-  (JNIEnv *env, jclass clazz, jstring j_path, jint flags, jint mode)
+JNIEXPORT jobject JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_open(
+  JNIEnv *env, jclass clazz, jstring j_path,
+  jint flags, jint mode)
 {
-#ifdef UNIX
   jobject ret = NULL;
 
   const char *path = (*env)->GetStringUTFChars(env, j_path, NULL);
@@ -235,87 +203,14 @@ cleanup:
     (*env)->ReleaseStringUTFChars(env, j_path, path);
   }
   return ret;
-#endif
-
-#ifdef WINDOWS
-  THROW(env, "java/io/IOException",
-    "The function POSIX.open() is not supported on Windows");
-  return NULL;
-#endif
-}
-
-/*
- * Class:     org_apache_hadoop_io_nativeio_NativeIO_Windows
- * Method:    createFile
- * Signature: (Ljava/lang/String;JJJ)Ljava/io/FileDescriptor;
- */
-JNIEXPORT jobject JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_createFile
-  (JNIEnv *env, jclass clazz, jstring j_path,
-  jlong desiredAccess, jlong shareMode, jlong creationDisposition)
-{
-#ifdef UNIX
-  THROW(env, "java/io/IOException",
-    "The function Windows.createFile() is not supported on Unix");
-  return NULL;
-#endif
-
-#ifdef WINDOWS
-  DWORD dwRtnCode = ERROR_SUCCESS;
-  BOOL isSymlink = FALSE;
-  BOOL isJunction = FALSE;
-  DWORD dwFlagsAndAttributes = FILE_ATTRIBUTE_NORMAL | FILE_FLAG_BACKUP_SEMANTICS;
-  jobject ret = (jobject) NULL;
-  HANDLE hFile = INVALID_HANDLE_VALUE;
-  WCHAR *path = (WCHAR *) (*env)->GetStringChars(env, j_path, (jboolean*)NULL);
-  if (path == NULL) goto cleanup;
-
-  // Set the flag for a symbolic link or a junctions point only when it exists.
-  // According to MSDN if the call to CreateFile() function creates a file,
-  // there is no change in behavior. So we do not throw if no file is found.
-  //
-  dwRtnCode = SymbolicLinkCheck(path, &isSymlink);
-  if (dwRtnCode != ERROR_SUCCESS && dwRtnCode != ERROR_FILE_NOT_FOUND) {
-    throw_ioe(env, dwRtnCode);
-    goto cleanup;
-  }
-  dwRtnCode = JunctionPointCheck(path, &isJunction);
-  if (dwRtnCode != ERROR_SUCCESS && dwRtnCode != ERROR_FILE_NOT_FOUND) {
-    throw_ioe(env, dwRtnCode);
-    goto cleanup;
-  }
-  if (isSymlink || isJunction)
-    dwFlagsAndAttributes |= FILE_FLAG_OPEN_REPARSE_POINT;
-
-  hFile = CreateFile(path,
-    (DWORD) desiredAccess,
-    (DWORD) shareMode,
-    (LPSECURITY_ATTRIBUTES ) NULL,
-    (DWORD) creationDisposition,
-    dwFlagsAndAttributes,
-    NULL);
-  if (hFile == INVALID_HANDLE_VALUE) {
-    throw_ioe(env, GetLastError());
-    goto cleanup;
-  }
-
-  ret = fd_create(env, (long) hFile);
-cleanup:
-  if (path != NULL) {
-    (*env)->ReleaseStringChars(env, j_path, (const jchar*)path);
-  }
-  return (jobject) ret;
-#endif
 }
 
 /*
- * Class:     org_apache_hadoop_io_nativeio_NativeIO_POSIX
- * Method:    getUIDforFDOwnerforOwner
- * Signature: (Ljava/io/FileDescriptor;)J
+ * private static native long getUIDforFDOwnerforOwner(FileDescriptor fd);
  */
-JNIEXPORT jlong JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_getUIDforFDOwnerforOwner
-  (JNIEnv *env, jclass clazz, jobject fd_object)
-{
-#ifdef UNIX
+JNIEXPORT jlong JNICALL 
+Java_org_apache_hadoop_io_nativeio_NativeIO_getUIDforFDOwnerforOwner(JNIEnv *env, jclass clazz,
+ jobject fd_object) {
   int fd = fd_get(env, fd_object);
   PASS_EXCEPTIONS_GOTO(env, cleanup);
 
@@ -328,24 +223,15 @@ JNIEXPORT jlong JNICALL Java_org_apache_
   return (jlong)(s.st_uid);
 cleanup:
   return (jlong)(-1);
-#endif
-
-#ifdef WINDOWS
-  THROW(env, "java/io/IOException",
-    "The function POSIX.getUIDforFDOwnerforOwner() is not supported on Windows");
-  return (jlong)(-1);
-#endif
 }
 
 /*
- * Class:     org_apache_hadoop_io_nativeio_NativeIO_POSIX
- * Method:    getUserName
- * Signature: (J)Ljava/lang/String;
+ * private static native String getUserName(long uid);
  */
-JNIEXPORT jstring JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_getUserName
-  (JNIEnv *env, jclass clazz, jlong uid)
-{
-#ifdef UNIX   
+JNIEXPORT jstring JNICALL 
+Java_org_apache_hadoop_io_nativeio_NativeIO_getUserName(JNIEnv *env, 
+jclass clazz, jlong uid) {
+   
   char *pw_buf = NULL;
   int rc;
   size_t pw_buflen = get_pw_buflen();
@@ -378,24 +264,16 @@ JNIEXPORT jstring JNICALL Java_org_apach
 cleanup:
   if (pw_buf != NULL) free(pw_buf);
   return jstr_username;
-#endif
-
-#ifdef WINDOWS
-  THROW(env, "java/io/IOException",
-    "The function POSIX.getUserName() is not supported on Windows");
-  return NULL;
-#endif
 }
 
-/*
- * Class:     org_apache_hadoop_io_nativeio_NativeIO_POSIX
- * Method:    chmod
- * Signature: (Ljava/lang/String;I)V
+/**
+ * public static native void chmod(String path, int mode) throws IOException;
  */
-JNIEXPORT void JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_chmodImpl
-  (JNIEnv *env, jclass clazz, jstring j_path, jint mode)
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_chmod(
+  JNIEnv *env, jclass clazz, jstring j_path,
+  jint mode)
 {
-#ifdef UNIX
   const char *path = (*env)->GetStringUTFChars(env, j_path, NULL);
   if (path == NULL) return; // JVM throws Exception for us
 
@@ -404,20 +282,6 @@ JNIEXPORT void JNICALL Java_org_apache_h
   }
 
   (*env)->ReleaseStringUTFChars(env, j_path, path);
-#endif
-
-#ifdef WINDOWS
-  DWORD dwRtnCode = ERROR_SUCCESS;
-  LPCWSTR path = (LPCWSTR) (*env)->GetStringChars(env, j_path, NULL);
-  if (path == NULL) return; // JVM throws Exception for us
-  
-  if ((dwRtnCode = ChangeFileModeByMask((LPCWSTR) path, mode)) != ERROR_SUCCESS)
-  {
-    throw_ioe(env, dwRtnCode);
-  }
-
-  (*env)->ReleaseStringChars(env, j_path, (const jchar*) path);
-#endif
 }
 
 /*
@@ -425,7 +289,6 @@ JNIEXPORT void JNICALL Java_org_apache_h
  */
 static void throw_ioe(JNIEnv* env, int errnum)
 {
-#ifdef UNIX
   const char* message;
   char buffer[80];
   jstring jstr_message;
@@ -458,51 +321,9 @@ static void throw_ioe(JNIEnv* env, int e
 err:
   if (jstr_message != NULL)
     (*env)->ReleaseStringUTFChars(env, jstr_message, message);
-#endif
-
-#ifdef WINDOWS
-  DWORD len = 0;
-  LPWSTR buffer = NULL;
-  const jchar* message = NULL;
-  jstring jstr_message = NULL;
-  jthrowable obj = NULL;
-
-  len = FormatMessageW(
-    FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM,
-    NULL, *(DWORD*) (&errnum), // reinterpret cast
-    MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
-    (LPWSTR) &buffer, 0, NULL);
-
-  if (len > 0)
-  {
-    message = (const jchar*) buffer;
-  }
-  else
-  {
-    message = (const jchar*) L"Unknown error.";
-  }
-
-  if ((jstr_message = (*env)->NewString(env, message, len)) == NULL)
-    goto err;
-  LocalFree(buffer);
-  buffer = NULL; // Set buffer to NULL to avoid double free
-
-  obj = (jthrowable)(*env)->NewObject(env, nioe_clazz, nioe_ctor,
-    jstr_message, errnum);
-  if (obj == NULL) goto err;
-
-  (*env)->Throw(env, obj);
-  return;
-
-err:
-  if (jstr_message != NULL)
-    (*env)->ReleaseStringChars(env, jstr_message, message);
-  LocalFree(buffer);
-  return;
-#endif
 }
 
-#ifdef UNIX
+
 /*
  * Determine how big a buffer we need for reentrant getpwuid_r and getgrnam_r
  */
@@ -512,98 +333,7 @@ ssize_t get_pw_buflen() {
   ret = sysconf(_SC_GETPW_R_SIZE_MAX);
   #endif
   return (ret > 512) ? ret : 512;
-
 }
-#endif
-
-/*
- * Class:     org_apache_hadoop_io_nativeio_NativeIO_Windows
- * Method:    getOwnerOnWindows
- * Signature: (Ljava/io/FileDescriptor;)Ljava/lang/String;
- */
-JNIEXPORT jstring JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_getOwner
-  (JNIEnv *env, jclass clazz, jobject fd_object)
-{
-#ifdef UNIX
-  THROW(env, "java/io/IOException",
-    "The function Windows.getOwner() is not supported on Unix");
-  return NULL;
-#endif
-
-#ifdef WINDOWS
-  PSID pSidOwner = NULL;
-  PSECURITY_DESCRIPTOR pSD = NULL;
-  LPWSTR ownerName = (LPWSTR)NULL;
-  DWORD dwRtnCode = ERROR_SUCCESS;
-  jstring jstr_username = NULL;
-  HANDLE hFile = (HANDLE) fd_get(env, fd_object);
-  PASS_EXCEPTIONS_GOTO(env, cleanup);
-
-  dwRtnCode = GetSecurityInfo(
-    hFile,
-    SE_FILE_OBJECT,
-    OWNER_SECURITY_INFORMATION,
-    &pSidOwner,
-    NULL,
-    NULL,
-    NULL,
-    &pSD);
-  if (dwRtnCode != ERROR_SUCCESS) {
-    throw_ioe(env, dwRtnCode);
-    goto cleanup;
-  }
-
-  dwRtnCode = GetAccntNameFromSid(pSidOwner, &ownerName);
-  if (dwRtnCode != ERROR_SUCCESS) {
-    throw_ioe(env, dwRtnCode);
-    goto cleanup;
-  }
-
-  jstr_username = (*env)->NewString(env, ownerName, (jsize) wcslen(ownerName));
-  if (jstr_username == NULL) goto cleanup;
-
-cleanup:
-  LocalFree(ownerName);
-  LocalFree(pSD);
-  return jstr_username;
-#endif
-}
-
-/*
- * Class:     org_apache_hadoop_io_nativeio_NativeIO_Windows
- * Method:    setFilePointer
- * Signature: (Ljava/io/FileDescriptor;JJ)J
- */
-JNIEXPORT jlong JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_setFilePointer
-  (JNIEnv *env, jclass clazz, jobject fd_object, jlong distanceToMove, jlong moveMethod)
-{
-#ifdef UNIX
-  THROW(env, "java/io/IOException",
-    "The function setFilePointer(FileDescriptor) is not supported on Unix");
-  return NULL;
-#endif
-
-#ifdef WINDOWS
-  DWORD distanceToMoveLow = (DWORD) distanceToMove;
-  LONG distanceToMoveHigh = (LONG) (distanceToMove >> 32);
-  DWORD distanceMovedLow = 0;
-  HANDLE hFile = (HANDLE) fd_get(env, fd_object);
-  PASS_EXCEPTIONS_GOTO(env, cleanup);
-
-  distanceMovedLow = SetFilePointer(hFile,
-    distanceToMoveLow, &distanceToMoveHigh, (DWORD) moveMethod);
-
-  if (distanceMovedLow == INVALID_SET_FILE_POINTER) {
-     throw_ioe(env, GetLastError());
-     return -1;
-  }
-
-cleanup:
-
-  return ((jlong) distanceToMoveHigh << 32) | (jlong) distanceMovedLow;
-#endif
-}
-
 /**
  * vim: sw=2: ts=2: et:
  */

Modified: hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c (original)
+++ hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c Thu Oct 18 00:43:56 2012
@@ -26,10 +26,6 @@ static jfieldID fd_descriptor;
 // the no-argument constructor
 static jmethodID fd_constructor;
 
-#ifdef WINDOWS
-// the internal field for the long handle
-static jfieldID fd_handle;
-#endif
 
 void fd_init(JNIEnv* env)
 {
@@ -41,12 +37,6 @@ void fd_init(JNIEnv* env)
 
   fd_descriptor = (*env)->GetFieldID(env, fd_class, "fd", "I");
   PASS_EXCEPTIONS(env);
-
-#ifdef WINDOWS
-  fd_handle = (*env)->GetFieldID(env, fd_class, "handle", "J");
-  PASS_EXCEPTIONS(env);
-#endif
-
   fd_constructor = (*env)->GetMethodID(env, fd_class, "<init>", "()V");
 }
 
@@ -56,13 +46,9 @@ void fd_deinit(JNIEnv *env) {
     fd_class = NULL;
   }
   fd_descriptor = NULL;
-#ifdef WINDOWS
-  fd_handle = NULL;
-#endif
   fd_constructor = NULL;
 }
 
-#ifdef UNIX
 /*
  * Given an instance 'obj' of java.io.FileDescriptor, return the
  * underlying fd, or throw if unavailable
@@ -81,25 +67,3 @@ jobject fd_create(JNIEnv *env, int fd) {
   (*env)->SetIntField(env, obj, fd_descriptor, fd);
   return obj;
 } 
-#endif
-
-#ifdef WINDOWS
-/*
- * Given an instance 'obj' of java.io.FileDescriptor, return the
- * underlying fd, or throw if unavailable
- */
-long fd_get(JNIEnv* env, jobject obj) {
-  return (long) (*env)->GetLongField(env, obj, fd_handle);
-}
-
-/*
- * Create a FileDescriptor object corresponding to the given int fd
- */
-jobject fd_create(JNIEnv *env, long fd) {
-  jobject obj = (*env)->NewObject(env, fd_class, fd_constructor);
-  PASS_EXCEPTIONS_RET(env, (jobject) NULL);
-
-  (*env)->SetLongField(env, obj, fd_handle, fd);
-  return obj;
-}
-#endif
\ No newline at end of file

Modified: hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h (original)
+++ hadoop/common/branches/branch-1-win/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h Thu Oct 18 00:43:56 2012
@@ -18,19 +18,11 @@
 #define FILE_DESCRIPTOR_H
 
 #include <jni.h>
-#include "org_apache_hadoop.h"
 
 void fd_init(JNIEnv *env);
 void fd_deinit(JNIEnv *env);
 
-#ifdef UNIX
 int fd_get(JNIEnv* env, jobject obj);
 jobject fd_create(JNIEnv *env, int fd);
-#endif
-
-#ifdef WINDOWS
-long fd_get(JNIEnv* env, jobject obj);
-jobject fd_create(JNIEnv *env, long fd);
-#endif
 
 #endif

Modified: hadoop/common/branches/branch-1-win/src/native/src/org_apache_hadoop.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/native/src/org_apache_hadoop.h?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/native/src/org_apache_hadoop.h (original)
+++ hadoop/common/branches/branch-1-win/src/native/src/org_apache_hadoop.h Thu Oct 18 00:43:56 2012
@@ -24,12 +24,20 @@
 #if !defined ORG_APACHE_HADOOP_H
 #define ORG_APACHE_HADOOP_H
 
-#if defined(_WIN32)
-#undef UNIX
-#define WINDOWS
+#if defined HAVE_CONFIG_H
+  #include <config.h>
+#endif
+
+#if defined HAVE_DLFCN_H
+  #include <dlfcn.h>
+#else
+  #error "dlfcn.h not found"
+#endif  
+
+#if defined HAVE_JNI_H    
+  #include <jni.h>
 #else
-#undef WINDOWS
-#define UNIX
+  #error 'jni.h not found'
 #endif
 
 /* A helper macro to 'throw' a java exception. */ 
@@ -58,26 +66,6 @@
     if ((*env)->ExceptionCheck(env)) return (ret); \
   }
 
-/**
- * Unix definitions
- */
-#ifdef UNIX
-#if defined HAVE_CONFIG_H
-  #include <config.h>
-#endif
-
-#if defined HAVE_DLFCN_H
-  #include <dlfcn.h>
-#else
-  #error "dlfcn.h not found"
-#endif
-
-#if defined HAVE_JNI_H
-  #include <jni.h>
-#else
-  #error 'jni.h not found'
-#endif
-
 /** 
  * A helper function to dlsym a 'symbol' from a given library-handle. 
  * 
@@ -106,60 +94,6 @@ static void *do_dlsym(JNIEnv *env, void 
   if ((func_ptr = do_dlsym(env, handle, symbol)) == NULL) { \
     return; \
   }
-#endif
-// Unix part end
-
-
-/**
- * Windows definitions
- */
-#ifdef WINDOWS
-
-/* Force using Unicode throughout the code */ 
-#ifndef UNICODE
-#define UNICODE
-#endif
-
-#include <Windows.h>
-#include <stdio.h>
-#include <jni.h>
-
-#define snprintf(a, b ,c, d) _snprintf_s((a), (b), _TRUNCATE, (c), (d))
-
-/* A helper macro to dlsym the requisite dynamic symbol and bail-out on error. */
-#define LOAD_DYNAMIC_SYMBOL(func_type, func_ptr, env, handle, symbol) \
-  if ((func_ptr = (func_type) do_dlsym(env, handle, symbol)) == NULL) { \
-    return; \
-  }
-
-/** 
- * A helper function to dynamic load a 'symbol' from a given library-handle. 
- * 
- * @param env jni handle to report contingencies.
- * @param handle handle to the dynamic library.
- * @param symbol symbol to load.
- * @return returns the address where the symbol is loaded in memory, 
- *         <code>NULL</code> on error.
- */
-static FARPROC WINAPI do_dlsym(JNIEnv *env, HMODULE handle, LPCSTR symbol) {
-  DWORD dwErrorCode = ERROR_SUCCESS;
-  FARPROC func_ptr = NULL;
-
-  if (!env || !handle || !symbol) {
-    THROW(env, "java/lang/InternalError", NULL);
-    return NULL;
-  }
-
-  func_ptr = GetProcAddress(handle, symbol);
-  if (func_ptr == NULL)
-  {
-    THROW(env, "java/lang/UnsatisfiedLinkError", symbol);
-  }
-  return func_ptr;
-}
-#endif
-// Windows part end
-
 
 #define LOCK_CLASS(env, clazz, classname) \
   if ((*env)->MonitorEnter(env, clazz) != 0) { \

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestFileConcurrentReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestFileConcurrentReader.java?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestFileConcurrentReader.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/hdfs/TestFileConcurrentReader.java Thu Oct 18 00:43:56 2012
@@ -198,7 +198,8 @@ public class TestFileConcurrentReader ex
     runTestUnfinishedBlockCRCError(true, SyncType.SYNC, SMALL_WRITE_SIZE);
   }
 
-  public void testUnfinishedBlockCRCErrorTransferToAppend() throws IOException {
+  // fails due to issue w/append, disable 
+  public void _testUnfinishedBlockCRCErrorTransferToAppend() throws IOException {
     runTestUnfinishedBlockCRCError(true, SyncType.APPEND, DEFAULT_WRITE_SIZE);
   }
   
@@ -210,8 +211,9 @@ public class TestFileConcurrentReader ex
     throws IOException {
     runTestUnfinishedBlockCRCError(false, SyncType.SYNC, SMALL_WRITE_SIZE);
   }
-   
-  public void testUnfinishedBlockCRCErrorNormalTransferAppend() 
+  
+  // fails due to issue w/append, disable 
+  public void _testUnfinishedBlockCRCErrorNormalTransferAppend() 
     throws IOException {
     runTestUnfinishedBlockCRCError(false, SyncType.APPEND, DEFAULT_WRITE_SIZE);
   }
@@ -240,35 +242,34 @@ public class TestFileConcurrentReader ex
       final AtomicBoolean writerDone = new AtomicBoolean(false);
       final AtomicBoolean writerStarted = new AtomicBoolean(false);
       final AtomicBoolean error = new AtomicBoolean(false);
-
+      final FSDataOutputStream initialOutputStream = fileSystem.create(file);
       Thread writer = new Thread(new Runnable() {
+        private FSDataOutputStream outputStream = initialOutputStream;
+
         @Override
         public void run() {
           try {
-            FSDataOutputStream outputStream = fileSystem.create(file);
-            if (syncType == SyncType.APPEND) {
-              outputStream.close();
-              outputStream = fileSystem.append(file);
-            }
-            try {
-              for (int i = 0; !error.get() && i < numWrites; i++) {
-                final byte[] writeBuf = generateSequentialBytes(i * writeSize,
-                    writeSize);
+            for (int i = 0; !error.get() && i < numWrites; i++) {
+              try {
+                final byte[] writeBuf = 
+                  generateSequentialBytes(i * writeSize, writeSize);              
                 outputStream.write(writeBuf);
                 if (syncType == SyncType.SYNC) {
                   outputStream.sync();
+                } else { // append
+                  outputStream.close();
+                  outputStream = fileSystem.append(file);
                 }
                 writerStarted.set(true);
-              }
-            } catch (IOException e) {
-              error.set(true);
-              LOG.error(String.format("error writing to file"));
-            } finally {
-              outputStream.close();
+              } catch (IOException e) {
+                error.set(true);
+                LOG.error(String.format("error writing to file"));
+              } 
             }
+            
+            outputStream.close();
             writerDone.set(true);
           } catch (Exception e) {
-            error.set(true);
             LOG.error("error in writer", e);
             
             throw new RuntimeException(e);
@@ -317,6 +318,7 @@ public class TestFileConcurrentReader ex
         
         Thread.currentThread().interrupt();
       }
+      initialOutputStream.close();
     } finally {
       cluster.shutdown();
     }

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1399496&r1=1399495&r2=1399496&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/nativeio/TestNativeIO.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/io/nativeio/TestNativeIO.java Thu Oct 18 00:43:56 2012
@@ -17,16 +17,10 @@
  */
 package org.apache.hadoop.io.nativeio;
 
-import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileDescriptor;
-import java.io.FileInputStream;
 import java.io.FileOutputStream;
-import java.io.FileReader;
-import java.io.FileWriter;
 import java.io.IOException;
-import java.io.InputStreamReader;
-
 import org.junit.Before;
 import org.junit.Test;
 import static org.junit.Assume.*;
@@ -40,7 +34,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.NativeCodeLoader;
-import org.apache.hadoop.util.Shell;
 
 public class TestNativeIO {
   static final Log LOG = LogFactory.getLog(TestNativeIO.class);
@@ -61,155 +54,55 @@ public class TestNativeIO {
 
   @Test
   public void testFstat() throws Exception {
-    if (Shell.WINDOWS)
-      return;
     FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testfstat"));
-    NativeIO.POSIX.Stat stat = NativeIO.POSIX.fstat(fos.getFD());
+    NativeIO.Stat stat = NativeIO.fstat(fos.getFD());
     fos.close();
     LOG.info("Stat: " + String.valueOf(stat));
 
     assertEquals(System.getProperty("user.name"), stat.getOwner());
-    assertEquals(NativeIO.POSIX.Stat.S_IFREG, stat.getMode() & NativeIO.POSIX.Stat.S_IFMT);
+    assertEquals(NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT);
   }
   
   @Test
   public void testGetOwner() throws Exception {
-    // get the user name
-    String username = System.getProperty("user.name");
-    File testFile = new File(TEST_DIR, "testfstat");
-    FileOutputStream fos = new FileOutputStream(testFile);
+    FileOutputStream fos = new FileOutputStream(
+      new File(TEST_DIR, "testfstat"));
+    String owner = NativeIO.getOwner(fos.getFD());
     fos.close();
-    FileUtil.setOwner(testFile, username, null);
-    FileInputStream fis = new FileInputStream(testFile);
-    String owner = NativeIO.getOwner(fis.getFD());
-    fis.close();
     LOG.info("Owner: " + owner);
-    // On Windows, the user names are case insensitive. We do not 
-    // take cases into consideration during user name comparison.
-    if (Shell.WINDOWS)
-      assertEquals(username.toLowerCase(), owner.toLowerCase());
-    else
-      assertEquals(username, owner);
+
+    assertEquals(System.getProperty("user.name"), owner);
   }
 
   @Test
   public void testFstatClosedFd() throws Exception {
-    if (Shell.WINDOWS)
-      return;
     FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testfstat2"));
     fos.close();
     try {
-      NativeIO.POSIX.Stat stat = NativeIO.POSIX.fstat(fos.getFD());
+      NativeIO.Stat stat = NativeIO.fstat(fos.getFD());
     } catch (IOException e) {
       LOG.info("Got expected exception", e);
     }
   }
-  
-  @Test
-  public void testSetFilePointer() throws Exception {
-    if (!Shell.WINDOWS)
-      return;
-    LOG.info("Set a file pointer on Windows");
-    try {
-      File testfile = new File(TEST_DIR, "testSetFilePointer");
-      assertTrue("Create test subject",
-          testfile.exists() || testfile.createNewFile());
-      FileWriter writer = new FileWriter(testfile);
-      try {
-        for (int i = 0; i < 200; i++)
-          if (i < 100)
-            writer.write('a');
-          else
-            writer.write('b');
-        writer.flush();
-      } catch (Exception writerException) {
-        fail("Got unexpected exception: " + writerException.getMessage());
-      } finally {
-        writer.close();
-      }
-      
-      FileDescriptor fd = NativeIO.Windows.createFile(
-          testfile.getCanonicalPath(),
-          NativeIO.Windows.GENERIC_READ,
-          NativeIO.Windows.FILE_SHARE_READ |
-          NativeIO.Windows.FILE_SHARE_WRITE |
-          NativeIO.Windows.FILE_SHARE_DELETE,
-          NativeIO.Windows.OPEN_EXISTING);
-      NativeIO.Windows.setFilePointer(fd, 120, NativeIO.Windows.FILE_BEGIN);
-      FileReader reader = new FileReader(fd);
-      try {
-        int c = reader.read();
-        assertTrue("Unexpected character: " + c, c == 'b');
-      } catch (Exception readerException) {
-        fail("Got unexpected exception: " + readerException.getMessage());
-      } finally {
-        reader.close();
-      }
-    } catch (Exception e) {
-      fail("Got unexpected exception: " + e.getMessage());
-    }
-  }
-  
-  @Test
-  public void testCreateFile() throws Exception {
-    if (!Shell.WINDOWS)
-      return;
-    LOG.info("Open a file on Windows with SHARE_DELETE shared mode");
-    try {
-      File testfile = new File(TEST_DIR, "testCreateFile");
-      assertTrue("Create test subject",
-          testfile.exists() || testfile.createNewFile());
-      
-      FileDescriptor fd = NativeIO.Windows.createFile(
-          testfile.getCanonicalPath(),
-          NativeIO.Windows.GENERIC_READ,
-          NativeIO.Windows.FILE_SHARE_READ |
-          NativeIO.Windows.FILE_SHARE_WRITE |
-          NativeIO.Windows.FILE_SHARE_DELETE,
-          NativeIO.Windows.OPEN_EXISTING);
-      
-      FileInputStream fin = new FileInputStream(fd);
-      try {
-        fin.read();
-
-        File newfile = new File(TEST_DIR, "testRenamedFile");
-
-        boolean renamed = testfile.renameTo(newfile);
-        assertTrue("Rename failed.", renamed);
-
-        fin.read();
-      } catch (Exception e) {
-        fail("Got unexpected exception: " + e.getMessage());
-      }
-      finally {
-        fin.close();
-      }
-    } catch (Exception e) {
-      fail("Got unexpected exception: " + e.getMessage());
-    }
-
-  }
 
   @Test
   public void testOpen() throws Exception {
-    if (Shell.WINDOWS)
-      return;
     LOG.info("Open a missing file without O_CREAT and it should fail");
     try {
-      FileDescriptor fd = NativeIO.POSIX.open(
+      FileDescriptor fd = NativeIO.open(
         new File(TEST_DIR, "doesntexist").getAbsolutePath(),
-        NativeIO.POSIX.O_WRONLY, 0700);
+        NativeIO.O_WRONLY, 0700);
       fail("Able to open a new file without O_CREAT");
     } catch (IOException ioe) {
       // expected
     }
 
     LOG.info("Test creating a file with O_CREAT");
-    FileDescriptor fd = NativeIO.POSIX.open(
+    FileDescriptor fd = NativeIO.open(
       new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
-      NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT, 0700);
+      NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
     assertNotNull(true);
     assertTrue(fd.valid());
     FileOutputStream fos = new FileOutputStream(fd);
@@ -220,9 +113,9 @@ public class TestNativeIO {
 
     LOG.info("Test exclusive create");
     try {
-      fd = NativeIO.POSIX.open(
+      fd = NativeIO.open(
         new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
-        NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT | NativeIO.POSIX.O_EXCL, 0700);
+        NativeIO.O_WRONLY | NativeIO.O_CREAT | NativeIO.O_EXCL, 0700);
       fail("Was able to create existing file with O_EXCL");
     } catch (IOException ioe) {
       // expected
@@ -235,12 +128,10 @@ public class TestNativeIO {
    */
   @Test
   public void testFDDoesntLeak() throws IOException {
-    if (Shell.WINDOWS)
-      return;
     for (int i = 0; i < 10000; i++) {
-      FileDescriptor fd = NativeIO.POSIX.open(
+      FileDescriptor fd = NativeIO.open(
         new File(TEST_DIR, "testNoFdLeak").getAbsolutePath(),
-        NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT, 0700);
+        NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
       assertNotNull(true);
       assertTrue(fd.valid());
       FileOutputStream fos = new FileOutputStream(fd);
@@ -255,7 +146,7 @@ public class TestNativeIO {
   @Test
   public void testChmod() throws Exception {
     try {
-      NativeIO.POSIX.chmod("/this/file/doesnt/exist", 777);
+      NativeIO.chmod("/this/file/doesnt/exist", 777);
       fail("Chmod of non-existent file didn't fail");
     } catch (NativeIOException nioe) {
       assertEquals(Errno.ENOENT, nioe.getErrno());
@@ -264,11 +155,11 @@ public class TestNativeIO {
     File toChmod = new File(TEST_DIR, "testChmod");
     assertTrue("Create test subject",
                toChmod.exists() || toChmod.mkdir());
-    NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0777);
+    NativeIO.chmod(toChmod.getAbsolutePath(), 0777);
     assertPermissions(toChmod, 0777);
-    NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0000);
+    NativeIO.chmod(toChmod.getAbsolutePath(), 0000);
     assertPermissions(toChmod, 0000);
-    NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0644);
+    NativeIO.chmod(toChmod.getAbsolutePath(), 0644);
     assertPermissions(toChmod, 0644);
   }