You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by wa...@apache.org on 2013/08/15 02:15:13 UTC

svn commit: r1514105 [1/2] - in /hadoop/common/branches/HDFS-4949/hadoop-common-project: ./ hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/ hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/ hadoop-a...

Author: wang
Date: Thu Aug 15 00:15:11 2013
New Revision: 1514105

URL: http://svn.apache.org/r1514105
Log:
merge trunk into HDFS-4949 branch

Added:
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/util/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/util/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/util/PlatformName.java
      - copied unchanged from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/util/PlatformName.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java
      - copied unchanged from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java
      - copied unchanged from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestStat.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/   (props changed)
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/pom.xml
      - copied unchanged from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/pom.xml
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/java/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/java/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/java/org/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/java/org/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/server/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/server/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/server/kerberos/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/server/kerberos/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/server/kerberos/shared/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/server/kerberos/shared/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/server/kerberos/shared/keytab/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/server/kerberos/shared/keytab/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/server/kerberos/shared/keytab/HackedKeytab.java
      - copied unchanged from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/server/kerberos/shared/keytab/HackedKeytab.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/server/kerberos/shared/keytab/HackedKeytabEncoder.java
      - copied unchanged from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/directory/server/kerberos/shared/keytab/HackedKeytabEncoder.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/KerberosSecurityTestcase.java
      - copied unchanged from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/KerberosSecurityTestcase.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java
      - copied unchanged from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/resources/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/resources/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/resources/log4j.properties
      - copied unchanged from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/resources/log4j.properties
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc-krb5.conf
      - copied unchanged from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc-krb5.conf
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc.ldiff
      - copied unchanged from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/main/resources/minikdc.ldiff
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/test/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/test/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/test/java/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/test/java/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/test/java/org/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/test/java/org/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/
      - copied from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java
      - copied unchanged from r1514104, hadoop/common/trunk/hadoop-common-project/hadoop-minikdc/src/test/java/org/apache/hadoop/minikdc/TestMiniKdc.java
Removed:
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java
Modified:
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/pom.xml
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/resources/common-version-info.properties
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/site/apt/CLIMiniCluster.apt.vm
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/site/apt/FileSystemShell.apt.vm
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/site/apt/SingleCluster.apt.vm
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHarFileSystemBasics.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFS.java
    hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml
    hadoop/common/branches/HDFS-4949/hadoop-common-project/pom.xml

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/client/KerberosAuthenticator.java Thu Aug 15 00:15:11 2013
@@ -37,6 +37,8 @@ import java.security.PrivilegedException
 import java.util.HashMap;
 import java.util.Map;
 
+import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
+
 /**
  * The {@link KerberosAuthenticator} implements the Kerberos SPNEGO authentication sequence.
  * <p/>
@@ -75,15 +77,31 @@ public class KerberosAuthenticator imple
 
     private static final String OS_LOGIN_MODULE_NAME;
     private static final boolean windows = System.getProperty("os.name").startsWith("Windows");
+    private static final boolean is64Bit = System.getProperty("os.arch").contains("64");
+    private static final boolean aix = System.getProperty("os.name").equals("AIX");
 
-    static {
-      if (windows) {
-        OS_LOGIN_MODULE_NAME = "com.sun.security.auth.module.NTLoginModule";
+    /* Return the OS login module class name */
+    private static String getOSLoginModuleName() {
+      if (IBM_JAVA) {
+        if (windows) {
+          return is64Bit ? "com.ibm.security.auth.module.Win64LoginModule"
+              : "com.ibm.security.auth.module.NTLoginModule";
+        } else if (aix) {
+          return is64Bit ? "com.ibm.security.auth.module.AIX64LoginModule"
+              : "com.ibm.security.auth.module.AIXLoginModule";
+        } else {
+          return "com.ibm.security.auth.module.LinuxLoginModule";
+        }
       } else {
-        OS_LOGIN_MODULE_NAME = "com.sun.security.auth.module.UnixLoginModule";
+        return windows ? "com.sun.security.auth.module.NTLoginModule"
+            : "com.sun.security.auth.module.UnixLoginModule";
       }
     }
 
+    static {
+      OS_LOGIN_MODULE_NAME = getOSLoginModuleName();
+    }
+
     private static final AppConfigurationEntry OS_SPECIFIC_LOGIN =
       new AppConfigurationEntry(OS_LOGIN_MODULE_NAME,
                                 AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
@@ -92,13 +110,22 @@ public class KerberosAuthenticator imple
     private static final Map<String, String> USER_KERBEROS_OPTIONS = new HashMap<String, String>();
 
     static {
-      USER_KERBEROS_OPTIONS.put("doNotPrompt", "true");
-      USER_KERBEROS_OPTIONS.put("useTicketCache", "true");
-      USER_KERBEROS_OPTIONS.put("renewTGT", "true");
       String ticketCache = System.getenv("KRB5CCNAME");
+      if (IBM_JAVA) {
+        USER_KERBEROS_OPTIONS.put("useDefaultCcache", "true");
+      } else {
+        USER_KERBEROS_OPTIONS.put("doNotPrompt", "true");
+        USER_KERBEROS_OPTIONS.put("useTicketCache", "true");
+      }
       if (ticketCache != null) {
-        USER_KERBEROS_OPTIONS.put("ticketCache", ticketCache);
+        if (IBM_JAVA) {
+          // The first value searched when "useDefaultCcache" is used.
+          System.setProperty("KRB5CCNAME", ticketCache);
+        } else {
+          USER_KERBEROS_OPTIONS.put("ticketCache", ticketCache);
+        }
       }
+      USER_KERBEROS_OPTIONS.put("renewTGT", "true");
     }
 
     private static final AppConfigurationEntry USER_KERBEROS_LOGIN =

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java Thu Aug 15 00:15:11 2013
@@ -21,6 +21,7 @@ import org.apache.hadoop.security.authen
 import org.ietf.jgss.GSSContext;
 import org.ietf.jgss.GSSCredential;
 import org.ietf.jgss.GSSManager;
+import org.ietf.jgss.Oid;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -44,6 +45,8 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
 
+import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
+
 /**
  * The {@link KerberosAuthenticationHandler} implements the Kerberos SPNEGO authentication mechanism for HTTP.
  * <p/>
@@ -77,18 +80,33 @@ public class KerberosAuthenticationHandl
     @Override
     public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
       Map<String, String> options = new HashMap<String, String>();
-      options.put("keyTab", keytab);
-      options.put("principal", principal);
-      options.put("useKeyTab", "true");
-      options.put("storeKey", "true");
-      options.put("doNotPrompt", "true");
-      options.put("useTicketCache", "true");
-      options.put("renewTGT", "true");
+      if (IBM_JAVA) {
+        options.put("useKeytab",
+            keytab.startsWith("file://") ? keytab : "file://" + keytab);
+        options.put("principal", principal);
+        options.put("credsType", "acceptor");
+      } else {
+        options.put("keyTab", keytab);
+        options.put("principal", principal);
+        options.put("useKeyTab", "true");
+        options.put("storeKey", "true");
+        options.put("doNotPrompt", "true");
+        options.put("useTicketCache", "true");
+        options.put("renewTGT", "true");
+        options.put("isInitiator", "false");
+      }
       options.put("refreshKrb5Config", "true");
-      options.put("isInitiator", "false");
       String ticketCache = System.getenv("KRB5CCNAME");
       if (ticketCache != null) {
-        options.put("ticketCache", ticketCache);
+        if (IBM_JAVA) {
+          options.put("useDefaultCcache", "true");
+          // The first value searched when "useDefaultCcache" is used.
+          System.setProperty("KRB5CCNAME", ticketCache);
+          options.put("renewTGT", "true");
+          options.put("credsType", "both");
+        } else {
+          options.put("ticketCache", ticketCache);
+        }
       }
       if (LOG.isDebugEnabled()) {
         options.put("debug", "true");
@@ -294,8 +312,18 @@ public class KerberosAuthenticationHandl
           public AuthenticationToken run() throws Exception {
             AuthenticationToken token = null;
             GSSContext gssContext = null;
+            GSSCredential gssCreds = null;
             try {
-              gssContext = gssManager.createContext((GSSCredential) null);
+              if (IBM_JAVA) {
+                // IBM JDK needs non-null credentials to be passed to createContext here, with
+                // SPNEGO mechanism specified, otherwise JGSS will use its default mechanism
+                // only, which is Kerberos V5.
+                gssCreds = gssManager.createCredential(null, GSSCredential.INDEFINITE_LIFETIME,
+                    new Oid[]{KerberosUtil.getOidInstance("GSS_SPNEGO_MECH_OID"),
+                        KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID")},
+                    GSSCredential.ACCEPT_ONLY);
+              }
+              gssContext = gssManager.createContext(gssCreds);
               byte[] serverToken = gssContext.acceptSecContext(clientToken, 0, clientToken.length);
               if (serverToken != null && serverToken.length > 0) {
                 String authenticate = base64.encodeToString(serverToken);
@@ -317,6 +345,9 @@ public class KerberosAuthenticationHandl
               if (gssContext != null) {
                 gssContext.dispose();
               }
+              if (gssCreds != null) {
+                gssCreds.dispose();
+              }
             }
             return token;
           }

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java Thu Aug 15 00:15:11 2013
@@ -27,6 +27,8 @@ import java.util.Locale;
 import org.ietf.jgss.GSSException;
 import org.ietf.jgss.Oid;
 
+import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
+
 public class KerberosUtil {
 
   /* Return the Kerberos login module name */
@@ -40,7 +42,11 @@ public class KerberosUtil {
       throws ClassNotFoundException, GSSException, NoSuchFieldException,
       IllegalAccessException {
     Class<?> oidClass;
-    if (System.getProperty("java.vendor").contains("IBM")) {
+    if (IBM_JAVA) {
+      if ("NT_GSS_KRB5_PRINCIPAL".equals(oidName)) {
+        // IBM JDK GSSUtil class does not have field for krb5 principal oid
+        return new Oid("1.2.840.113554.1.2.2.1");
+      }
       oidClass = Class.forName("com.ibm.security.jgss.GSSUtil");
     } else {
       oidClass = Class.forName("sun.security.jgss.GSSUtil");

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/CHANGES.txt Thu Aug 15 00:15:11 2013
@@ -56,9 +56,6 @@ Trunk (Unreleased)
     HADOOP-8719. Workaround for kerberos-related log errors upon running any
     hadoop command on OSX. (Jianbin Wei via harsh)
 
-    HADOOP-8814. Replace string equals "" by String#isEmpty().
-    (Brandon Li via suresh)
-
     HADOOP-8588. SerializationFactory shouldn't throw a
     NullPointerException if the serializations list is empty.
     (Sho Shimauchi via harsh)
@@ -271,6 +268,15 @@ Trunk (Unreleased)
     HADOOP-9433 TestLocalFileSystem#testHasFileDescriptor leaks file handle
     (Chris Nauroth via sanjay)
 
+    HADOOP-9583. test-patch gives +1 despite build failure when running tests.
+    (jlowe via kihwal)
+
+    HADOOP-9847. TestGlobPath symlink tests fail to cleanup properly.
+    (cmccabe via wang)
+
+    HADOOP-9740. Fix FsShell '-text' command to be able to read Avro
+    files stored in HDFS and other filesystems. (Allan Yan via cutting)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
@@ -285,6 +291,9 @@ Release 2.3.0 - UNRELEASED
 
   IMPROVEMENTS
 
+    HADOOP 9871. Fix intermittent findbugs warnings in DefaultMetricsSystem.
+    (Junping Du via llu)
+
     HADOOP-9319. Update bundled LZ4 source to r99. (Binglin Chang via llu)
 
     HADOOP-9241. DU refresh interval is not configurable (harsh)
@@ -304,6 +313,9 @@ Release 2.3.0 - UNRELEASED
     HADOOP-9758.  Provide configuration option for FileSystem/FileContext
     symlink resolution.  (Andrew Wang via Colin Patrick McCabe)
 
+    HADOOP-9848. Create a MiniKDC for use with security testing. 
+    (ywskycn via tucu)
+
   OPTIMIZATIONS
 
     HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
@@ -319,6 +331,10 @@ Release 2.3.0 - UNRELEASED
     HADOOP-9817. FileSystem#globStatus and FileContext#globStatus need to work
     with symlinks. (Colin Patrick McCabe via Andrew Wang)
 
+    HADOOP-9652.  RawLocalFs#getFileLinkStatus does not fill in the link owner
+    and mode.  (Andrew Wang via Colin Patrick McCabe)
+
+
 Release 2.1.1-beta - UNRELEASED
 
   INCOMPATIBLE CHANGES
@@ -326,6 +342,8 @@ Release 2.1.1-beta - UNRELEASED
   NEW FEATURES
 
   IMPROVEMENTS
+
+    HADOOP-9446. Support Kerberos SPNEGO for IBM JDK. (Yu Gao via llu)
  
     HADOOP-9787. ShutdownHelper util to shutdown threads and threadpools.
     (Karthik Kambatla via Sandy Ryza)
@@ -340,6 +358,11 @@ Release 2.1.1-beta - UNRELEASED
 
     HADOOP-9789. Support server advertised kerberos principals (daryn)
 
+    HADOOP-8814. Replace string equals "" by String#isEmpty().
+    (Brandon Li via suresh)
+
+    HADOOP-9802. Support Snappy codec on Windows. (cnauroth)
+
   OPTIMIZATIONS
 
   BUG FIXES
@@ -361,6 +384,13 @@ Release 2.1.1-beta - UNRELEASED
     HADOOP-9675. use svn:eol-style native for html to prevent line ending
     issues (Colin Patrick McCabe)
 
+    HADOOP-9757. Har metadata cache can grow without limit (Cristina Abad via daryn)
+
+    HADOOP-9857. Tests block and sometimes timeout on Windows due to invalid
+    entropy source. (cnauroth)
+
+    HADOOP-9381. Document dfs cp -f option. (Keegan Witt, suresh via suresh)
+
 Release 2.1.0-beta - 2013-08-06
 
   INCOMPATIBLE CHANGES
@@ -558,6 +588,10 @@ Release 2.1.0-beta - 2013-08-06
     HADOOP-9150. Avoid unnecessary DNS resolution attempts for logical URIs
     (todd)
 
+    HADOOP-9845. Update protobuf to 2.5 from 2.4.x. (tucu)
+
+    HADOOP-9872. Improve protoc version handling and detection. (tucu)
+
   BUG FIXES
 
     HADOOP-9294. GetGroupsTestBase fails on Windows. (Chris Nauroth via suresh)

Propchange: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1512448-1514104

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/pom.xml?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/pom.xml (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/pom.xml Thu Aug 15 00:15:11 2013
@@ -308,6 +308,8 @@
               <goal>protoc</goal>
             </goals>
             <configuration>
+              <protocVersion>${protobuf.version}</protocVersion>
+              <protocCommand>${protoc.path}</protocCommand>
               <imports>
                 <param>${basedir}/src/main/proto</param>
               </imports>
@@ -336,6 +338,8 @@
               <goal>protoc</goal>
             </goals>
             <configuration>
+              <protocVersion>${protobuf.version}</protocVersion>
+              <protocCommand>${protoc.path}</protocCommand>
               <imports>
                 <param>${basedir}/src/test/proto</param>
               </imports>
@@ -586,6 +590,13 @@
           <family>Windows</family>
         </os>
       </activation>
+      <properties>
+        <snappy.prefix></snappy.prefix>
+        <snappy.lib></snappy.lib>
+        <snappy.include></snappy.include>
+        <require.snappy>false</require.snappy>
+        <bundle.snappy.in.bin>true</bundle.snappy.in.bin>
+      </properties>
       <build>
         <plugins>
           <plugin>
@@ -670,6 +681,10 @@
                     <argument>/nologo</argument>
                     <argument>/p:Configuration=Release</argument>
                     <argument>/p:OutDir=${project.build.directory}/bin/</argument>
+                    <argument>/p:CustomSnappyPrefix=${snappy.prefix}</argument>
+                    <argument>/p:CustomSnappyLib=${snappy.lib}</argument>
+                    <argument>/p:CustomSnappyInclude=${snappy.include}</argument>
+                    <argument>/p:RequireSnappy=${require.snappy}</argument>
                   </arguments>
                 </configuration>
               </execution>

Propchange: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1512448-1514104

Propchange: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1512448-1514104

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java Thu Aug 15 00:15:11 2013
@@ -113,7 +113,14 @@ public abstract class DelegateToFileSyst
 
   @Override
   public FileStatus getFileLinkStatus(final Path f) throws IOException {
-    return getFileStatus(f);
+    FileStatus status = fsImpl.getFileLinkStatus(f);
+    // FileSystem#getFileLinkStatus qualifies the link target
+    // AbstractFileSystem needs to return it plain since it's qualified
+    // in FileContext, so re-get and set the plain target
+    if (status.isSymlink()) {
+      status.setSymlink(fsImpl.getLinkTarget(f));
+    }
+    return status;
   }
 
   @Override
@@ -199,22 +206,18 @@ public abstract class DelegateToFileSyst
 
   @Override
   public boolean supportsSymlinks() {
-    return false;
+    return fsImpl.supportsSymlinks();
   }  
   
   @Override
   public void createSymlink(Path target, Path link, boolean createParent) 
       throws IOException { 
-    throw new IOException("File system does not support symlinks");
+    fsImpl.createSymlink(target, link, createParent);
   } 
   
   @Override
   public Path getLinkTarget(final Path f) throws IOException {
-    /* We should never get here. Any file system that threw an 
-     * UnresolvedLinkException, causing this function to be called,
-     * should override getLinkTarget. 
-     */
-    throw new AssertionError();
+    return fsImpl.getLinkTarget(f);
   }
 
   @Override //AbstractFileSystem

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java Thu Aug 15 00:15:11 2013
@@ -24,11 +24,12 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URLDecoder;
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
+import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.TreeMap;
 import java.util.HashMap;
-import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -56,10 +57,12 @@ public class HarFileSystem extends Filte
 
   private static final Log LOG = LogFactory.getLog(HarFileSystem.class);
 
+  public static final String METADATA_CACHE_ENTRIES_KEY = "fs.har.metadatacache.entries";
+  public static final int METADATA_CACHE_ENTRIES_DEFAULT = 10;
+
   public static final int VERSION = 3;
 
-  private static final Map<URI, HarMetaData> harMetaCache =
-      new ConcurrentHashMap<URI, HarMetaData>();
+  private static Map<URI, HarMetaData> harMetaCache;
 
   // uri representation of this Har filesystem
   private URI uri;
@@ -98,7 +101,14 @@ public class HarFileSystem extends Filte
   public HarFileSystem(FileSystem fs) {
     super(fs);
   }
-  
+ 
+  private synchronized void initializeMetadataCache(Configuration conf) {
+    if (harMetaCache == null) {
+      int cacheSize = conf.getInt(METADATA_CACHE_ENTRIES_KEY, METADATA_CACHE_ENTRIES_DEFAULT);
+      harMetaCache = Collections.synchronizedMap(new LruCache<URI, HarMetaData>(cacheSize));
+    }
+  }
+ 
   /**
    * Initialize a Har filesystem per har archive. The 
    * archive home directory is the top level directory
@@ -114,6 +124,9 @@ public class HarFileSystem extends Filte
    */
   @Override
   public void initialize(URI name, Configuration conf) throws IOException {
+    // initialize the metadata cache, if needed
+    initializeMetadataCache(conf);
+
     // decode the name
     URI underLyingURI = decodeHarURI(name, conf);
     // we got the right har Path- now check if this is 
@@ -1117,4 +1130,18 @@ public class HarFileSystem extends Filte
   HarMetaData getMetadata() {
     return metadata;
   }
+
+  private static class LruCache<K, V> extends LinkedHashMap<K, V> {
+    private final int MAX_ENTRIES;
+
+    public LruCache(int maxEntries) {
+        super(maxEntries + 1, 1.0f, true);
+        MAX_ENTRIES = maxEntries;
+    }
+
+    @Override
+    protected boolean removeEldestEntry(Map.Entry<K, V> eldest) {
+        return size() > MAX_ENTRIES;
+    }
+  }
 }

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java Thu Aug 15 00:15:11 2013
@@ -41,15 +41,6 @@ import org.apache.hadoop.util.Shell;
  */
 public class HardLink { 
 
-  public enum OSType {
-    OS_TYPE_UNIX,
-    OS_TYPE_WIN,
-    OS_TYPE_SOLARIS,
-    OS_TYPE_MAC,
-    OS_TYPE_FREEBSD
-  }
-  
-  public static OSType osType;
   private static HardLinkCommandGetter getHardLinkCommand;
   
   public final LinkStats linkStats; //not static
@@ -57,19 +48,18 @@ public class HardLink { 
   //initialize the command "getters" statically, so can use their 
   //methods without instantiating the HardLink object
   static { 
-    osType = getOSType();
-    if (osType == OSType.OS_TYPE_WIN) {
+    if (Shell.WINDOWS) {
       // Windows
       getHardLinkCommand = new HardLinkCGWin();
     } else {
-      // Unix
+      // Unix or Linux
       getHardLinkCommand = new HardLinkCGUnix();
       //override getLinkCountCommand for the particular Unix variant
       //Linux is already set as the default - {"stat","-c%h", null}
-      if (osType == OSType.OS_TYPE_MAC || osType == OSType.OS_TYPE_FREEBSD) {
+      if (Shell.MAC || Shell.FREEBSD) {
         String[] linkCountCmdTemplate = {"/usr/bin/stat","-f%l", null};
         HardLinkCGUnix.setLinkCountCmdTemplate(linkCountCmdTemplate);
-      } else if (osType == OSType.OS_TYPE_SOLARIS) {
+      } else if (Shell.SOLARIS) {
         String[] linkCountCmdTemplate = {"ls","-l", null};
         HardLinkCGUnix.setLinkCountCmdTemplate(linkCountCmdTemplate);        
       }
@@ -80,26 +70,6 @@ public class HardLink { 
     linkStats = new LinkStats();
   }
   
-  static private OSType getOSType() {
-    String osName = System.getProperty("os.name");
-    if (Shell.WINDOWS) {
-      return OSType.OS_TYPE_WIN;
-    }
-    else if (osName.contains("SunOS") 
-            || osName.contains("Solaris")) {
-       return OSType.OS_TYPE_SOLARIS;
-    }
-    else if (osName.contains("Mac")) {
-       return OSType.OS_TYPE_MAC;
-    }
-    else if (osName.contains("FreeBSD")) {
-       return OSType.OS_TYPE_FREEBSD;
-    }
-    else {
-      return OSType.OS_TYPE_UNIX;
-    }
-  }
-  
   /**
    * This abstract class bridges the OS-dependent implementations of the 
    * needed functionality for creating hardlinks and querying link counts.
@@ -548,7 +518,7 @@ public class HardLink { 
       if (inpMsg == null || exitValue != 0) {
         throw createIOException(fileName, inpMsg, errMsg, exitValue, null);
       }
-      if (osType == OSType.OS_TYPE_SOLARIS) {
+      if (Shell.SOLARIS) {
         String[] result = inpMsg.split("\\s+");
         return Integer.parseInt(result[1]);
       } else {

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java Thu Aug 15 00:15:11 2013
@@ -51,6 +51,7 @@ import org.apache.hadoop.util.StringUtil
 public class RawLocalFileSystem extends FileSystem {
   static final URI NAME = URI.create("file:///");
   private Path workingDir;
+  private static final boolean useDeprecatedFileStatus = !Stat.isAvailable();
   
   public RawLocalFileSystem() {
     workingDir = getInitialWorkingDirectory();
@@ -385,8 +386,11 @@ public class RawLocalFileSystem extends 
       throw new FileNotFoundException("File " + f + " does not exist");
     }
     if (localf.isFile()) {
+      if (!useDeprecatedFileStatus) {
+        return new FileStatus[] { getFileStatus(f) };
+      }
       return new FileStatus[] {
-        new RawLocalFileStatus(localf, getDefaultBlockSize(f), this) };
+        new DeprecatedRawLocalFileStatus(localf, getDefaultBlockSize(f), this)};
     }
 
     File[] names = localf.listFiles();
@@ -516,15 +520,22 @@ public class RawLocalFileSystem extends 
   
   @Override
   public FileStatus getFileStatus(Path f) throws IOException {
+    return getFileLinkStatusInternal(f, true);
+  }
+
+  @Deprecated
+  private FileStatus deprecatedGetFileStatus(Path f) throws IOException {
     File path = pathToFile(f);
     if (path.exists()) {
-      return new RawLocalFileStatus(pathToFile(f), getDefaultBlockSize(f), this);
+      return new DeprecatedRawLocalFileStatus(pathToFile(f),
+          getDefaultBlockSize(f), this);
     } else {
       throw new FileNotFoundException("File " + f + " does not exist");
     }
   }
 
-  static class RawLocalFileStatus extends FileStatus {
+  @Deprecated
+  static class DeprecatedRawLocalFileStatus extends FileStatus {
     /* We can add extra fields here. It breaks at least CopyFiles.FilePair().
      * We recognize if the information is already loaded by check if
      * onwer.equals("").
@@ -533,7 +544,7 @@ public class RawLocalFileSystem extends 
       return !super.getOwner().isEmpty(); 
     }
     
-    RawLocalFileStatus(File f, long defaultBlockSize, FileSystem fs) { 
+    DeprecatedRawLocalFileStatus(File f, long defaultBlockSize, FileSystem fs) {
       super(f.length(), f.isDirectory(), 1, defaultBlockSize,
           f.lastModified(), new Path(f.getPath()).makeQualified(fs.getUri(),
             fs.getWorkingDirectory()));
@@ -699,7 +710,7 @@ public class RawLocalFileSystem extends 
    */
   @Override
   public FileStatus getFileLinkStatus(final Path f) throws IOException {
-    FileStatus fi = getFileLinkStatusInternal(f);
+    FileStatus fi = getFileLinkStatusInternal(f, false);
     // getFileLinkStatus is supposed to return a symlink with a
     // qualified path
     if (fi.isSymlink()) {
@@ -710,7 +721,35 @@ public class RawLocalFileSystem extends 
     return fi;
   }
 
-  private FileStatus getFileLinkStatusInternal(final Path f) throws IOException {
+  /**
+   * Public {@link FileStatus} methods delegate to this function, which in turn
+   * either call the new {@link Stat} based implementation or the deprecated
+   * methods based on platform support.
+   * 
+   * @param f Path to stat
+   * @param dereference whether to dereference the final path component if a
+   *          symlink
+   * @return FileStatus of f
+   * @throws IOException
+   */
+  private FileStatus getFileLinkStatusInternal(final Path f,
+      boolean dereference) throws IOException {
+    if (!useDeprecatedFileStatus) {
+      return getNativeFileLinkStatus(f, dereference);
+    } else if (dereference) {
+      return deprecatedGetFileStatus(f);
+    } else {
+      return deprecatedGetFileLinkStatusInternal(f);
+    }
+  }
+
+  /**
+   * Deprecated. Remains for legacy support. Should be removed when {@link Stat}
+   * gains support for Windows and other operating systems.
+   */
+  @Deprecated
+  private FileStatus deprecatedGetFileLinkStatusInternal(final Path f)
+      throws IOException {
     String target = FileUtil.readLink(new File(f.toString()));
 
     try {
@@ -746,10 +785,31 @@ public class RawLocalFileSystem extends 
       throw e;
     }
   }
+  /**
+   * Calls out to platform's native stat(1) implementation to get file metadata
+   * (permissions, user, group, atime, mtime, etc). This works around the lack
+   * of lstat(2) in Java 6.
+   * 
+   *  Currently, the {@link Stat} class used to do this only supports Linux
+   *  and FreeBSD, so the old {@link #deprecatedGetFileLinkStatusInternal(Path)}
+   *  implementation (deprecated) remains further OS support is added.
+   *
+   * @param f File to stat
+   * @param dereference whether to dereference symlinks
+   * @return FileStatus of f
+   * @throws IOException
+   */
+  private FileStatus getNativeFileLinkStatus(final Path f,
+      boolean dereference) throws IOException {
+    checkPath(f);
+    Stat stat = new Stat(f, getDefaultBlockSize(f), dereference, this);
+    FileStatus status = stat.getFileStatus();
+    return status;
+  }
 
   @Override
   public Path getLinkTarget(Path f) throws IOException {
-    FileStatus fi = getFileLinkStatusInternal(f);
+    FileStatus fi = getFileLinkStatusInternal(f, false);
     // return an unqualified symlink target
     return fi.getSymlink();
   }

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java Thu Aug 15 00:15:11 2013
@@ -17,8 +17,6 @@
  */
 package org.apache.hadoop.fs.local;
 
-import java.io.File;
-import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.URI;
 import java.net.URISyntaxException;
@@ -28,13 +26,9 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.AbstractFileSystem;
 import org.apache.hadoop.fs.DelegateToFileSystem;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.FsConstants;
 import org.apache.hadoop.fs.FsServerDefaults;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.RawLocalFileSystem;
-import org.apache.hadoop.fs.permission.FsPermission;
 
 /**
  * The RawLocalFs implementation of AbstractFileSystem.
@@ -72,90 +66,12 @@ public class RawLocalFs extends Delegate
   public FsServerDefaults getServerDefaults() throws IOException {
     return LocalConfigKeys.getServerDefaults();
   }
-  
-  @Override
-  public boolean supportsSymlinks() {
-    return true;
-  }
-
-  @Override
-  public void createSymlink(Path target, Path link, boolean createParent)
-      throws IOException {
-    final String targetScheme = target.toUri().getScheme();
-    if (targetScheme != null && !"file".equals(targetScheme)) {
-      throw new IOException("Unable to create symlink to non-local file "+
-          "system: "+target.toString());
-    }
-
-    if (createParent) {
-      mkdir(link.getParent(), FsPermission.getDirDefault(), true);
-    }
-
-    // NB: Use createSymbolicLink in java.nio.file.Path once available
-    int result = FileUtil.symLink(target.toString(), link.toString());
-    if (result != 0) {
-      throw new IOException("Error " + result + " creating symlink " +
-          link + " to " + target);
-    }
-  }
 
-  /**
-   * Return a FileStatus representing the given path. If the path refers 
-   * to a symlink return a FileStatus representing the link rather than
-   * the object the link refers to.
-   */
-  @Override
-  public FileStatus getFileLinkStatus(final Path f) throws IOException {
-    String target = FileUtil.readLink(new File(f.toString()));
-    try {
-      FileStatus fs = getFileStatus(f);
-      // If f refers to a regular file or directory      
-      if (target.isEmpty()) {
-        return fs;
-      }
-      // Otherwise f refers to a symlink
-      return new FileStatus(fs.getLen(), 
-          false,
-          fs.getReplication(), 
-          fs.getBlockSize(),
-          fs.getModificationTime(),
-          fs.getAccessTime(),
-          fs.getPermission(),
-          fs.getOwner(),
-          fs.getGroup(),
-          new Path(target),
-          f);
-    } catch (FileNotFoundException e) {
-      /* The exists method in the File class returns false for dangling 
-       * links so we can get a FileNotFoundException for links that exist.
-       * It's also possible that we raced with a delete of the link. Use
-       * the readBasicFileAttributes method in java.nio.file.attributes 
-       * when available.
-       */
-      if (!target.isEmpty()) {
-        return new FileStatus(0, false, 0, 0, 0, 0, FsPermission.getDefault(), 
-            "", "", new Path(target), f);        
-      }
-      // f refers to a file or directory that does not exist
-      throw e;
-    }
-  }
-  
-   @Override
-   public boolean isValidName(String src) {
-     // Different local file systems have different validation rules.  Skip
-     // validation here and just let the OS handle it.  This is consistent with
-     // RawLocalFileSystem.
-     return true;
-   }
-  
   @Override
-  public Path getLinkTarget(Path f) throws IOException {
-    /* We should never get here. Valid local links are resolved transparently
-     * by the underlying local file system and accessing a dangling link will 
-     * result in an IOException, not an UnresolvedLinkException, so FileContext
-     * should never call this function.
-     */
-    throw new AssertionError();
+  public boolean isValidName(String src) {
+    // Different local file systems have different validation rules. Skip
+    // validation here and just let the OS handle it. This is consistent with
+    // RawLocalFileSystem.
+    return true;
   }
 }

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java Thu Aug 15 00:15:11 2013
@@ -133,7 +133,8 @@ class CopyCommands {  
       "Copy files that match the file pattern <src> to a\n" +
       "destination.  When copying multiple files, the destination\n" +
       "must be a directory. Passing -p preserves access and\n" +
-      "modification times, ownership and the mode.\n";
+      "modification times, ownership and the mode. Passing -f\n" +
+      "overwrites the destination if it already exists.\n";
     
     @Override
     protected void processOptions(LinkedList<String> args) throws IOException {
@@ -186,7 +187,8 @@ class CopyCommands {  
       "into fs. Copying fails if the file already\n" +
       "exists, unless the -f flag is given. Passing\n" +
       "-p preserves access and modification times,\n" +
-      "ownership and the mode.\n";
+      "ownership and the mode. Passing -f overwrites\n" +
+      "the destination if it already exists.\n";
 
     @Override
     protected void processOptions(LinkedList<String> args) throws IOException {

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java Thu Aug 15 00:15:11 2013
@@ -35,8 +35,10 @@ import org.apache.avro.Schema;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.AvroFSInput;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileChecksum;
+import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathIsDirectoryException;
@@ -259,8 +261,9 @@ class Display extends FsCommand {
       pos = 0;
       buffer = new byte[0];
       GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
+      FileContext fc = FileContext.getFileContext(new Configuration());
       fileReader =
-        DataFileReader.openReader(new File(status.getPath().toUri()), reader);
+        DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
       Schema schema = fileReader.getSchema();
       writer = new GenericDatumWriter<Object>(schema);
       output = new ByteArrayOutputStream();

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java Thu Aug 15 00:15:11 2013
@@ -46,8 +46,8 @@ public enum DefaultMetricsSystem {
   @VisibleForTesting
   volatile boolean miniClusterMode = false;
   
-  final UniqueNames mBeanNames = new UniqueNames();
-  final UniqueNames sourceNames = new UniqueNames();
+  transient final UniqueNames mBeanNames = new UniqueNames();
+  transient final UniqueNames sourceNames = new UniqueNames();
 
   /**
    * Convenience method to initialize the metrics system

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Thu Aug 15 00:15:11 2013
@@ -439,7 +439,6 @@ public class UserGroupInformation {
       } else {
         USER_KERBEROS_OPTIONS.put("doNotPrompt", "true");
         USER_KERBEROS_OPTIONS.put("useTicketCache", "true");
-        USER_KERBEROS_OPTIONS.put("renewTGT", "true");
       }
       String ticketCache = System.getenv("KRB5CCNAME");
       if (ticketCache != null) {
@@ -450,6 +449,7 @@ public class UserGroupInformation {
           USER_KERBEROS_OPTIONS.put("ticketCache", ticketCache);
         }
       }
+      USER_KERBEROS_OPTIONS.put("renewTGT", "true");
       USER_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);
     }
     private static final AppConfigurationEntry USER_KERBEROS_LOGIN =
@@ -465,8 +465,8 @@ public class UserGroupInformation {
         KEYTAB_KERBEROS_OPTIONS.put("doNotPrompt", "true");
         KEYTAB_KERBEROS_OPTIONS.put("useKeyTab", "true");
         KEYTAB_KERBEROS_OPTIONS.put("storeKey", "true");
-        KEYTAB_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
       }
+      KEYTAB_KERBEROS_OPTIONS.put("refreshKrb5Config", "true");
       KEYTAB_KERBEROS_OPTIONS.putAll(BASIC_JAAS_OPTIONS);      
     }
     private static final AppConfigurationEntry KEYTAB_KERBEROS_LOGIN =
@@ -627,11 +627,17 @@ public class UserGroupInformation {
     }
     try {
       Map<String,String> krbOptions = new HashMap<String,String>();
-      krbOptions.put("doNotPrompt", "true");
-      krbOptions.put("useTicketCache", "true");
-      krbOptions.put("useKeyTab", "false");
+      if (IBM_JAVA) {
+        krbOptions.put("useDefaultCcache", "true");
+        // The first value searched when "useDefaultCcache" is used.
+        System.setProperty("KRB5CCNAME", ticketCache);
+      } else {
+        krbOptions.put("doNotPrompt", "true");
+        krbOptions.put("useTicketCache", "true");
+        krbOptions.put("useKeyTab", "false");
+        krbOptions.put("ticketCache", ticketCache);
+      }
       krbOptions.put("renewTGT", "false");
-      krbOptions.put("ticketCache", ticketCache);
       krbOptions.putAll(HadoopConfiguration.BASIC_JAAS_OPTIONS);
       AppConfigurationEntry ace = new AppConfigurationEntry(
           KerberosUtil.getKrb5LoginModuleName(),

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java Thu Aug 15 00:15:11 2013
@@ -58,6 +58,45 @@ abstract public class Shell {
   /** Windows CreateProcess synchronization object */
   public static final Object WindowsProcessLaunchLock = new Object();
 
+  // OSType detection
+
+  public enum OSType {
+    OS_TYPE_LINUX,
+    OS_TYPE_WIN,
+    OS_TYPE_SOLARIS,
+    OS_TYPE_MAC,
+    OS_TYPE_FREEBSD,
+    OS_TYPE_OTHER
+  }
+
+  public static final OSType osType = getOSType();
+
+  static private OSType getOSType() {
+    String osName = System.getProperty("os.name");
+    if (osName.startsWith("Windows")) {
+      return OSType.OS_TYPE_WIN;
+    } else if (osName.contains("SunOS") || osName.contains("Solaris")) {
+      return OSType.OS_TYPE_SOLARIS;
+    } else if (osName.contains("Mac")) {
+      return OSType.OS_TYPE_MAC;
+    } else if (osName.contains("FreeBSD")) {
+      return OSType.OS_TYPE_FREEBSD;
+    } else if (osName.startsWith("Linux")) {
+      return OSType.OS_TYPE_LINUX;
+    } else {
+      // Some other form of Unix
+      return OSType.OS_TYPE_OTHER;
+    }
+  }
+
+  // Helper static vars for each platform
+  public static final boolean WINDOWS = (osType == OSType.OS_TYPE_WIN);
+  public static final boolean SOLARIS = (osType == OSType.OS_TYPE_SOLARIS);
+  public static final boolean MAC     = (osType == OSType.OS_TYPE_MAC);
+  public static final boolean FREEBSD = (osType == OSType.OS_TYPE_FREEBSD);
+  public static final boolean LINUX   = (osType == OSType.OS_TYPE_LINUX);
+  public static final boolean OTHER   = (osType == OSType.OS_TYPE_OTHER);
+
   /** a Unix command to get the current user's groups list */
   public static String[] getGroupsCommand() {
     return (WINDOWS)? new String[]{"cmd", "/c", "groups"}
@@ -282,13 +321,6 @@ abstract public class Shell {
     return exeFile.getCanonicalPath();
   }
 
-  /** Set to true on Windows platforms */
-  public static final boolean WINDOWS /* borrowed from Path.WINDOWS */
-                = System.getProperty("os.name").startsWith("Windows");
-
-  public static final boolean LINUX
-                = System.getProperty("os.name").startsWith("Linux");
-  
   /** a Windows utility to emulate Unix commands */
   public static final String WINUTILS = getWinUtilsPath();
 
@@ -336,6 +368,7 @@ abstract public class Shell {
 
   private long    interval;   // refresh interval in msec
   private long    lastTime;   // last time the command was performed
+  final private boolean redirectErrorStream; // merge stdout and stderr
   private Map<String, String> environment; // env for the command execution
   private File dir;
   private Process process; // sub process used to execute the command
@@ -348,13 +381,18 @@ abstract public class Shell {
     this(0L);
   }
   
+  public Shell(long interval) {
+    this(interval, false);
+  }
+
   /**
    * @param interval the minimum duration to wait before re-executing the 
    *        command.
    */
-  public Shell( long interval ) {
+  public Shell(long interval, boolean redirectErrorStream) {
     this.interval = interval;
     this.lastTime = (interval<0) ? 0 : -interval;
+    this.redirectErrorStream = redirectErrorStream;
   }
   
   /** set the environment for the command 
@@ -393,6 +431,8 @@ abstract public class Shell {
     if (dir != null) {
       builder.directory(this.dir);
     }
+
+    builder.redirectErrorStream(redirectErrorStream);
     
     if (Shell.WINDOWS) {
       synchronized (WindowsProcessLaunchLock) {

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/VersionInfo.java Thu Aug 15 00:15:11 2013
@@ -90,6 +90,10 @@ public class VersionInfo {
       " source checksum " + _getSrcChecksum();
   }
 
+  protected String _getProtocVersion() {
+    return info.getProperty("protocVersion", "Unknown");
+  }
+
   private static VersionInfo COMMON_VERSION_INFO = new VersionInfo("common");
   /**
    * Get the Hadoop version.
@@ -153,12 +157,20 @@ public class VersionInfo {
   public static String getBuildVersion(){
     return COMMON_VERSION_INFO._getBuildVersion();
   }
-  
+
+  /**
+   * Returns the protoc version used for the build.
+   */
+  public static String getProtocVersion(){
+    return COMMON_VERSION_INFO._getProtocVersion();
+  }
+
   public static void main(String[] args) {
     LOG.debug("version: "+ getVersion());
     System.out.println("Hadoop " + getVersion());
     System.out.println("Subversion " + getUrl() + " -r " + getRevision());
     System.out.println("Compiled by " + getUser() + " on " + getDate());
+    System.out.println("Compiled with protoc " + getProtocVersion());
     System.out.println("From source with checksum " + getSrcChecksum());
     System.out.println("This command was run using " + 
         ClassUtil.findContainingJar(VersionInfo.class));

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj Thu Aug 15 00:15:11 2013
@@ -17,7 +17,7 @@
    limitations under the License.
 -->
 
-<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+<Project DefaultTargets="CheckRequireSnappy;Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
   <ItemGroup Label="ProjectConfigurations">
     <ProjectConfiguration Include="Release|x64">
       <Configuration>Release</Configuration>
@@ -49,6 +49,21 @@
     <IntDir>..\..\..\target\native\$(Configuration)\</IntDir>
     <TargetName>hadoop</TargetName>
   </PropertyGroup>
+  <PropertyGroup>
+    <SnappyLib Condition="Exists('$(CustomSnappyPrefix)\snappy.dll')">$(CustomSnappyPrefix)</SnappyLib>
+    <SnappyLib Condition="Exists('$(CustomSnappyPrefix)\lib\snappy.dll') And '$(SnappyLib)' == ''">$(CustomSnappyPrefix)\lib</SnappyLib>
+    <SnappyLib Condition="Exists('$(CustomSnappyLib)') And '$(SnappyLib)' == ''">$(CustomSnappyLib)</SnappyLib>
+    <SnappyInclude Condition="Exists('$(CustomSnappyPrefix)\snappy.h')">$(CustomSnappyPrefix)</SnappyInclude>
+    <SnappyInclude Condition="Exists('$(CustomSnappyPrefix)\include\snappy.h') And '$(SnappyInclude)' == ''">$(CustomSnappyPrefix)\include</SnappyInclude>
+    <SnappyInclude Condition="Exists('$(CustomSnappyInclude)') And '$(SnappyInclude)' == ''">$(CustomSnappyInclude)</SnappyInclude>
+    <SnappyEnabled Condition="'$(SnappyLib)' != '' And '$(SnappyInclude)' != ''">true</SnappyEnabled>
+    <IncludePath Condition="'$(SnappyEnabled)' == 'true'">$(SnappyInclude);$(IncludePath)</IncludePath>
+  </PropertyGroup>
+  <Target Name="CheckRequireSnappy">
+    <Error
+      Text="Required snappy library could not be found.  SnappyLibrary=$(SnappyLibrary), SnappyInclude=$(SnappyInclude), CustomSnappyLib=$(CustomSnappyLib), CustomSnappyInclude=$(CustomSnappyInclude), CustomSnappyPrefix=$(CustomSnappyPrefix)"
+      Condition="'$(RequireSnappy)' == 'true' And '$(SnappyEnabled)' != 'true'" />
+  </Target>
   <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
     <ClCompile>
       <WarningLevel>Level3</WarningLevel>
@@ -71,6 +86,12 @@
     </Link>
   </ItemDefinitionGroup>
   <ItemGroup>
+    <ClCompile Include="src\org\apache\hadoop\io\compress\snappy\SnappyCompressor.c" Condition="'$(SnappyEnabled)' == 'true'">
+      <AdditionalOptions>/D HADOOP_SNAPPY_LIBRARY=L\"snappy.dll\"</AdditionalOptions>
+    </ClCompile>
+    <ClCompile Include="src\org\apache\hadoop\io\compress\snappy\SnappyDecompressor.c" Condition="'$(SnappyEnabled)' == 'true'">
+      <AdditionalOptions>/D HADOOP_SNAPPY_LIBRARY=L\"snappy.dll\"</AdditionalOptions>
+    </ClCompile>
     <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\lz4.c" />
     <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\lz4hc.c" />
     <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Compressor.c" />
@@ -79,12 +100,15 @@
     <ClCompile Include="src\org\apache\hadoop\io\nativeio\NativeIO.c" />
     <ClCompile Include="src\org\apache\hadoop\security\JniBasedUnixGroupsMappingWin.c" />
     <ClCompile Include="src\org\apache\hadoop\util\bulk_crc32.c" />
-    <ClCompile Include="src\org\apache\hadoop\util\NativeCodeLoader.c" />
+    <ClCompile Include="src\org\apache\hadoop\util\NativeCodeLoader.c">
+      <AdditionalOptions Condition="'$(SnappyEnabled)' == 'true'">/D HADOOP_SNAPPY_LIBRARY=L\"snappy.dll\"</AdditionalOptions>
+    </ClCompile>
     <ClCompile Include="src\org\apache\hadoop\util\NativeCrc32.c" />
   </ItemGroup>
   <ItemGroup>
     <ClInclude Include="..\src\org\apache\hadoop\util\crc32c_tables.h" />
     <ClInclude Include="..\src\org\apache\hadoop\util\crc32_zlib_polynomial_tables.h" />
+    <ClInclude Include="src\org\apache\hadoop\io\compress\snappy\org_apache_hadoop_io_compress_snappy.h" />
     <ClInclude Include="src\org\apache\hadoop\io\nativeio\file_descriptor.h" />
     <ClInclude Include="src\org\apache\hadoop\util\bulk_crc32.h" />
     <ClInclude Include="src\org\apache\hadoop\util\crc32c_tables.h" />

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c Thu Aug 15 00:15:11 2013
@@ -30,6 +30,10 @@
 #include "config.h"
 #endif // UNIX
 
+#ifdef WINDOWS
+#include "winutils.h"
+#endif
+
 #include "org_apache_hadoop_io_compress_snappy_SnappyCompressor.h"
 
 #define JINT_MAX 0x7fffffff
@@ -40,11 +44,18 @@ static jfieldID SnappyCompressor_uncompr
 static jfieldID SnappyCompressor_compressedDirectBuf;
 static jfieldID SnappyCompressor_directBufferSize;
 
+#ifdef UNIX
 static snappy_status (*dlsym_snappy_compress)(const char*, size_t, char*, size_t*);
+#endif
+
+#ifdef WINDOWS
+typedef snappy_status (__cdecl *__dlsym_snappy_compress)(const char*, size_t, char*, size_t*);
+static __dlsym_snappy_compress dlsym_snappy_compress;
+#endif
 
 JNIEXPORT void JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyCompressor_initIDs
 (JNIEnv *env, jclass clazz){
-
+#ifdef UNIX
   // Load libsnappy.so
   void *libsnappy = dlopen(HADOOP_SNAPPY_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
   if (!libsnappy) {
@@ -53,10 +64,25 @@ JNIEXPORT void JNICALL Java_org_apache_h
     THROW(env, "java/lang/UnsatisfiedLinkError", msg);
     return;
   }
+#endif
+
+#ifdef WINDOWS
+  HMODULE libsnappy = LoadLibrary(HADOOP_SNAPPY_LIBRARY);
+  if (!libsnappy) {
+    THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load snappy.dll");
+    return;
+  }
+#endif
 
   // Locate the requisite symbols from libsnappy.so
+#ifdef UNIX
   dlerror();                                 // Clear any existing error
   LOAD_DYNAMIC_SYMBOL(dlsym_snappy_compress, env, libsnappy, "snappy_compress");
+#endif
+
+#ifdef WINDOWS
+  LOAD_DYNAMIC_SYMBOL(__dlsym_snappy_compress, dlsym_snappy_compress, env, libsnappy, "snappy_compress");
+#endif
 
   SnappyCompressor_clazz = (*env)->GetStaticFieldID(env, clazz, "clazz",
                                                  "Ljava/lang/Class;");
@@ -74,6 +100,9 @@ JNIEXPORT void JNICALL Java_org_apache_h
 
 JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyCompressor_compressBytesDirect
 (JNIEnv *env, jobject thisj){
+  const char* uncompressed_bytes;
+  char* compressed_bytes;
+  snappy_status ret;
   // Get members of SnappyCompressor
   jobject clazz = (*env)->GetStaticObjectField(env, thisj, SnappyCompressor_clazz);
   jobject uncompressed_direct_buf = (*env)->GetObjectField(env, thisj, SnappyCompressor_uncompressedDirectBuf);
@@ -84,7 +113,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
 
   // Get the input direct buffer
   LOCK_CLASS(env, clazz, "SnappyCompressor");
-  const char* uncompressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
+  uncompressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
   UNLOCK_CLASS(env, clazz, "SnappyCompressor");
 
   if (uncompressed_bytes == 0) {
@@ -93,7 +122,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
 
   // Get the output direct buffer
   LOCK_CLASS(env, clazz, "SnappyCompressor");
-  char* compressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
+  compressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
   UNLOCK_CLASS(env, clazz, "SnappyCompressor");
 
   if (compressed_bytes == 0) {
@@ -102,8 +131,8 @@ JNIEXPORT jint JNICALL Java_org_apache_h
 
   /* size_t should always be 4 bytes or larger. */
   buf_len = (size_t)compressed_direct_buf_len;
-  snappy_status ret = dlsym_snappy_compress(uncompressed_bytes,
-        uncompressed_direct_buf_len, compressed_bytes, &buf_len);
+  ret = dlsym_snappy_compress(uncompressed_bytes, uncompressed_direct_buf_len,
+        compressed_bytes, &buf_len);
   if (ret != SNAPPY_OK){
     THROW(env, "java/lang/InternalError", "Could not compress data. Buffer length is too small.");
     return 0;
@@ -128,8 +157,18 @@ Java_org_apache_hadoop_io_compress_snapp
       return (*env)->NewStringUTF(env, dl_info.dli_fname);
     }
   }
-#endif
+
   return (*env)->NewStringUTF(env, HADOOP_SNAPPY_LIBRARY);
-}
+#endif
 
+#ifdef WINDOWS
+  LPWSTR filename = NULL;
+  GetLibraryName(dlsym_snappy_compress, &filename);
+  if (filename != NULL) {
+    return (*env)->NewString(env, filename, (jsize) wcslen(filename));
+  } else {
+    return (*env)->NewStringUTF(env, "Unavailable");
+  }
+#endif
+}
 #endif //define HADOOP_SNAPPY_LIBRARY

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c Thu Aug 15 00:15:11 2013
@@ -37,12 +37,20 @@ static jfieldID SnappyDecompressor_compr
 static jfieldID SnappyDecompressor_uncompressedDirectBuf;
 static jfieldID SnappyDecompressor_directBufferSize;
 
+#ifdef UNIX
 static snappy_status (*dlsym_snappy_uncompress)(const char*, size_t, char*, size_t*);
+#endif
+
+#ifdef WINDOWS
+typedef snappy_status (__cdecl *__dlsym_snappy_uncompress)(const char*, size_t, char*, size_t*);
+static __dlsym_snappy_uncompress dlsym_snappy_uncompress;
+#endif
 
 JNIEXPORT void JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyDecompressor_initIDs
 (JNIEnv *env, jclass clazz){
 
   // Load libsnappy.so
+#ifdef UNIX
   void *libsnappy = dlopen(HADOOP_SNAPPY_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
   if (!libsnappy) {
     char* msg = (char*)malloc(1000);
@@ -50,11 +58,27 @@ JNIEXPORT void JNICALL Java_org_apache_h
     THROW(env, "java/lang/UnsatisfiedLinkError", msg);
     return;
   }
+#endif
+
+#ifdef WINDOWS
+  HMODULE libsnappy = LoadLibrary(HADOOP_SNAPPY_LIBRARY);
+  if (!libsnappy) {
+    THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load snappy.dll");
+    return;
+  }
+#endif
 
   // Locate the requisite symbols from libsnappy.so
+#ifdef UNIX
   dlerror();                                 // Clear any existing error
   LOAD_DYNAMIC_SYMBOL(dlsym_snappy_uncompress, env, libsnappy, "snappy_uncompress");
 
+#endif
+
+#ifdef WINDOWS
+  LOAD_DYNAMIC_SYMBOL(__dlsym_snappy_uncompress, dlsym_snappy_uncompress, env, libsnappy, "snappy_uncompress");
+#endif
+
   SnappyDecompressor_clazz = (*env)->GetStaticFieldID(env, clazz, "clazz",
                                                    "Ljava/lang/Class;");
   SnappyDecompressor_compressedDirectBuf = (*env)->GetFieldID(env,clazz,
@@ -71,6 +95,9 @@ JNIEXPORT void JNICALL Java_org_apache_h
 
 JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_snappy_SnappyDecompressor_decompressBytesDirect
 (JNIEnv *env, jobject thisj){
+  const char* compressed_bytes = NULL;
+  char* uncompressed_bytes = NULL;
+  snappy_status ret;
   // Get members of SnappyDecompressor
   jobject clazz = (*env)->GetStaticObjectField(env,thisj, SnappyDecompressor_clazz);
   jobject compressed_direct_buf = (*env)->GetObjectField(env,thisj, SnappyDecompressor_compressedDirectBuf);
@@ -80,7 +107,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
 
   // Get the input direct buffer
   LOCK_CLASS(env, clazz, "SnappyDecompressor");
-  const char* compressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
+  compressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
   UNLOCK_CLASS(env, clazz, "SnappyDecompressor");
 
   if (compressed_bytes == 0) {
@@ -89,14 +116,15 @@ JNIEXPORT jint JNICALL Java_org_apache_h
 
   // Get the output direct buffer
   LOCK_CLASS(env, clazz, "SnappyDecompressor");
-  char* uncompressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
+  uncompressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
   UNLOCK_CLASS(env, clazz, "SnappyDecompressor");
 
   if (uncompressed_bytes == 0) {
     return (jint)0;
   }
 
-  snappy_status ret = dlsym_snappy_uncompress(compressed_bytes, compressed_direct_buf_len, uncompressed_bytes, &uncompressed_direct_buf_len);
+  ret = dlsym_snappy_uncompress(compressed_bytes, compressed_direct_buf_len,
+        uncompressed_bytes, &uncompressed_direct_buf_len);
   if (ret == SNAPPY_BUFFER_TOO_SMALL){
     THROW(env, "java/lang/InternalError", "Could not decompress data. Buffer length is too small.");
   } else if (ret == SNAPPY_INVALID_INPUT){

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/org_apache_hadoop_io_compress_snappy.h Thu Aug 15 00:15:11 2013
@@ -21,7 +21,11 @@
 #define ORG_APACHE_HADOOP_IO_COMPRESS_SNAPPY_SNAPPY_H
 
 #include "org_apache_hadoop.h"
+
+#ifdef UNIX
 #include <dlfcn.h>
+#endif
+
 #include <jni.h>
 #include <snappy-c.h>
 #include <stddef.h>

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c Thu Aug 15 00:15:11 2013
@@ -23,6 +23,10 @@
 #include "config.h"
 #endif // UNIX
 
+#ifdef WINDOWS
+#include "winutils.h"
+#endif
+
 #include <jni.h>
 
 JNIEXPORT jboolean JNICALL Java_org_apache_hadoop_util_NativeCodeLoader_buildSupportsSnappy
@@ -47,32 +51,16 @@ JNIEXPORT jstring JNICALL Java_org_apach
 #endif
 
 #ifdef WINDOWS
-  SIZE_T ret = 0;
-  DWORD size = MAX_PATH;
   LPWSTR filename = NULL;
-  HMODULE mod = NULL;
-  DWORD err = ERROR_SUCCESS;
-
-  MEMORY_BASIC_INFORMATION mbi;
-  ret = VirtualQuery(Java_org_apache_hadoop_util_NativeCodeLoader_getLibraryName,
-    &mbi, sizeof(mbi));
-  if (ret == 0) goto cleanup;
-  mod = mbi.AllocationBase;
-
-  do {
-    filename = (LPWSTR) realloc(filename, size * sizeof(WCHAR));
-    if (filename == NULL) goto cleanup;
-    GetModuleFileName(mod, filename, size);
-    size <<= 1;
-    err = GetLastError();
-  } while (err == ERROR_INSUFFICIENT_BUFFER);
-  
-  if (err != ERROR_SUCCESS) goto cleanup;
-
-  return (*env)->NewString(env, filename, (jsize) wcslen(filename));
-
-cleanup:
-  if (filename != NULL) free(filename);
-  return (*env)->NewStringUTF(env, "Unavailable");
+  GetLibraryName(Java_org_apache_hadoop_util_NativeCodeLoader_getLibraryName,
+    &filename);
+  if (filename != NULL)
+  {
+    return (*env)->NewString(env, filename, (jsize) wcslen(filename));
+  }
+  else
+  {
+    return (*env)->NewStringUTF(env, "Unavailable");
+  }
 #endif
 }

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/resources/common-version-info.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/resources/common-version-info.properties?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/resources/common-version-info.properties (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/resources/common-version-info.properties Thu Aug 15 00:15:11 2013
@@ -23,3 +23,4 @@ user=${user.name}
 date=${version-info.build.time}
 url=${version-info.scm.uri}
 srcChecksum=${version-info.source.md5}
+protocVersion=${protobuf.version}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/winutils/include/winutils.h Thu Aug 15 00:15:11 2013
@@ -153,4 +153,6 @@ DWORD ChangeFileModeByMask(__in LPCWSTR 
 DWORD GetLocalGroupsForUser(__in LPCWSTR user,
   __out LPLOCALGROUP_USERS_INFO_0 *groups, __out LPDWORD entries);
 
-BOOL EnablePrivilege(__in LPCWSTR privilegeName);
\ No newline at end of file
+BOOL EnablePrivilege(__in LPCWSTR privilegeName);
+
+void GetLibraryName(__in LPCVOID lpAddress, __out LPWSTR *filename);

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c Thu Aug 15 00:15:11 2013
@@ -1709,3 +1709,51 @@ void ReportErrorCode(LPCWSTR func, DWORD
   }
   if (msg != NULL) LocalFree(msg);
 }
+
+//----------------------------------------------------------------------------
+// Function: GetLibraryName
+//
+// Description:
+//  Given an address, get the file name of the library from which it was loaded.
+//
+// Returns:
+//  None
+//
+// Notes:
+// - The function allocates heap memory and points the filename out parameter to
+//   the newly allocated memory, which will contain the name of the file.
+//
+// - If there is any failure, then the function frees the heap memory it
+//   allocated and sets the filename out parameter to NULL.
+//
+void GetLibraryName(LPCVOID lpAddress, LPWSTR *filename)
+{
+  SIZE_T ret = 0;
+  DWORD size = MAX_PATH;
+  HMODULE mod = NULL;
+  DWORD err = ERROR_SUCCESS;
+
+  MEMORY_BASIC_INFORMATION mbi;
+  ret = VirtualQuery(lpAddress, &mbi, sizeof(mbi));
+  if (ret == 0) goto cleanup;
+  mod = mbi.AllocationBase;
+
+  do {
+    *filename = (LPWSTR) realloc(*filename, size * sizeof(WCHAR));
+    if (*filename == NULL) goto cleanup;
+    GetModuleFileName(mod, *filename, size);
+    size <<= 1;
+    err = GetLastError();
+  } while (err == ERROR_INSUFFICIENT_BUFFER);
+
+  if (err != ERROR_SUCCESS) goto cleanup;
+
+  return;
+
+cleanup:
+  if (*filename != NULL)
+  {
+    free(*filename);
+    *filename = NULL;
+  }
+}

Modified: hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/site/apt/CLIMiniCluster.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/site/apt/CLIMiniCluster.apt.vm?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/site/apt/CLIMiniCluster.apt.vm (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-common-project/hadoop-common/src/site/apt/CLIMiniCluster.apt.vm Thu Aug 15 00:15:11 2013
@@ -42,7 +42,7 @@ Hadoop MapReduce Next Generation - CLI M
 $ mvn clean install -DskipTests
 $ mvn package -Pdist -Dtar -DskipTests -Dmaven.javadoc.skip
 +---+
-  <<NOTE:>> You will need protoc installed of version 2.4.1 or greater.
+  <<NOTE:>> You will need protoc 2.5.0 installed.
 
   The tarball should be available in <<<hadoop-dist/target/>>> directory.