You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by su...@apache.org on 2012/11/08 20:09:56 UTC

svn commit: r1407217 [1/2] - in /hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common: ./ src/ src/main/docs/ src/main/java/ src/main/java/org/apache/hadoop/conf/ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoo...

Author: suresh
Date: Thu Nov  8 19:09:46 2012
New Revision: 1407217

URL: http://svn.apache.org/viewvc?rev=1407217&view=rev
Log:
Merging trunk to branch-trunk-win branch

Added:
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringInterner.java
      - copied unchanged from r1407201, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringInterner.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUGIWithExternalKdc.java
      - copied unchanged from r1407201, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUGIWithExternalKdc.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringInterner.java
      - copied unchanged from r1407201, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestStringInterner.java
Modified:
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/CMakeLists.txt
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/getGroup.c
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/RpcPayloadHeader.proto
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/hadoop_rpc.proto
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFsTestSetup.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/SecurityUtilTestHelper.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestSecurityUtil.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUGIWithSecurityOn.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestUserGroupInformation.java
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/proto/test.proto
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto
    hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/resources/testConf.xml

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt Thu Nov  8 19:09:46 2012
@@ -129,6 +129,9 @@ Trunk (Unreleased)
     HADOOP-8776. Provide an option in test-patch that can enable/disable
     compiling native code. (Chris Nauroth via suresh)
 
+    HADOOP-9004. Allow security unit tests to use external KDC. (Stephen Chu
+    via suresh)
+
   BUG FIXES
 
     HADOOP-8177. MBeans shouldn't try to register when it fails to create MBeanName.
@@ -269,10 +272,13 @@ Trunk (Unreleased)
     HADOOP-8918. test-patch.sh is parsing modified files wrong.
     (Raja Aluri via suresh)
 
+    HADOOP-8589 ViewFs tests fail when tests and home dirs are nested (sanjay Radia)
+
   OPTIMIZATIONS
 
     HADOOP-7761. Improve the performance of raw comparisons. (todd)
 
+    HADOOP-8589 ViewFs tests fail when tests and home dirs are nested (sanjay Radia)
 Release 2.0.3-alpha - Unreleased 
 
   INCOMPATIBLE CHANGES
@@ -330,6 +336,22 @@ Release 2.0.3-alpha - Unreleased 
 
     HADOOP-8925. Remove the packaging. (eli)
 
+    HADOOP-8985. Add namespace declarations in .proto files for languages 
+    other than java. (Binglin Chan via suresh)
+
+    HADOOP-9009. Add SecurityUtil methods to get/set authentication method
+    (daryn via bobby)
+
+    HADOOP-9010. Map UGI authenticationMethod to RPC authMethod (daryn via
+    bobby)
+
+    HADOOP-9013. UGI should not hardcode loginUser's authenticationType (daryn
+    via bobby)
+
+    HADOOP-9014. Standardize creation of SaslRpcClients (daryn via bobby)
+
+    HADOOP-9015. Standardize creation of SaslRpcServers (daryn via bobby)
+
   OPTIMIZATIONS
 
     HADOOP-8866. SampleQuantiles#query is O(N^2) instead of O(N). (Andrew Wang
@@ -375,7 +397,7 @@ Release 2.0.3-alpha - Unreleased 
     (rkanter via tucu)
 
     HADOOP-8900. BuiltInGzipDecompressor throws IOException - stored gzip size
-    doesn't match decompressed size. (Slavik Krassovsky via suresh)
+    doesn't match decompressed size. (Andy Isaacson via suresh)
 
     HADOOP-8948. TestFileUtil.testGetDU fails on Windows due to incorrect
     assumption of line separator. (Chris Nauroth via suresh)
@@ -383,6 +405,11 @@ Release 2.0.3-alpha - Unreleased 
     HADOOP-8951. RunJar to fail with user-comprehensible error 
     message if jar missing. (stevel via suresh)
 
+    HADOOP-8713. TestRPCCompatibility fails intermittently with JDK7
+    (Trevor Robinson via tgraves)
+
+    HADOOP-9012. IPC Client sends wrong connection context (daryn via bobby)
+
 Release 2.0.2-alpha - 2012-09-07 
 
   INCOMPATIBLE CHANGES
@@ -1094,6 +1121,14 @@ Release 0.23.5 - UNRELEASED
     HADOOP-8906. paths with multiple globs are unreliable. (Daryn Sharp via
     jlowe)
 
+    HADOOP-8811. Compile hadoop native library in FreeBSD (Radim Kolar via
+    bobby)
+
+    HADOOP-8962. RawLocalFileSystem.listStatus fails when a child filename
+    contains a colon (jlowe via bobby)
+
+    HADOOP-8986. Server$Call object is never released after it is sent (bobby)
+
 Release 0.23.4 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1401063-1407201

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/CMakeLists.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/CMakeLists.txt?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/CMakeLists.txt (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/CMakeLists.txt Thu Nov  8 19:09:46 2012
@@ -67,6 +67,9 @@ macro(set_find_shared_library_version LV
     IF(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
         # Mac OS uses .dylib
         SET(CMAKE_FIND_LIBRARY_SUFFIXES ".${LVERS}.dylib")
+    ELSEIF(${CMAKE_SYSTEM_NAME} MATCHES "FreeBSD")
+        # FreeBSD has always .so installed.
+        SET(CMAKE_FIND_LIBRARY_SUFFIXES ".so")
     ELSEIF(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
         # Windows doesn't support finding shared libraries by version.
     ELSE()
@@ -95,8 +98,10 @@ GET_FILENAME_COMPONENT(HADOOP_ZLIB_LIBRA
 
 INCLUDE(CheckFunctionExists)
 INCLUDE(CheckCSourceCompiles)
+INCLUDE(CheckLibraryExists)
 CHECK_FUNCTION_EXISTS(sync_file_range HAVE_SYNC_FILE_RANGE)
 CHECK_FUNCTION_EXISTS(posix_fadvise HAVE_POSIX_FADVISE)
+CHECK_LIBRARY_EXISTS(dl dlopen "" NEED_LINK_DL)
 
 SET(STORED_CMAKE_FIND_LIBRARY_SUFFIXES CMAKE_FIND_LIBRARY_SUFFIXES)
 set_find_shared_library_version("1")
@@ -159,6 +164,9 @@ add_dual_library(hadoop
     ${D}/util/NativeCrc32.c
     ${D}/util/bulk_crc32.c
 )
+if (NEED_LINK_DL)
+   set(LIB_DL dl)
+endif (NEED_LINK_DL)
 
 IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
     #
@@ -171,7 +179,7 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux"
 ENDIF()
 
 target_link_dual_libraries(hadoop
-    dl
+    ${LIB_DL}
     ${JAVA_JVM_LIBRARY}
 )
 SET(LIBHADOOP_VERSION "1.0.0")

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/docs:r1401063-1407201

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1401063-1407201

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java Thu Nov  8 19:09:46 2012
@@ -75,6 +75,7 @@ import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableUtils;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.StringInterner;
 import org.apache.hadoop.util.StringUtils;
 import org.codehaus.jackson.JsonFactory;
 import org.codehaus.jackson.JsonGenerator;
@@ -2002,13 +2003,16 @@ public class Configuration implements It
             continue;
           Element field = (Element)fieldNode;
           if ("name".equals(field.getTagName()) && field.hasChildNodes())
-            attr = ((Text)field.getFirstChild()).getData().trim();
+            attr = StringInterner.weakIntern(
+                ((Text)field.getFirstChild()).getData().trim());
           if ("value".equals(field.getTagName()) && field.hasChildNodes())
-            value = ((Text)field.getFirstChild()).getData();
+            value = StringInterner.weakIntern(
+                ((Text)field.getFirstChild()).getData());
           if ("final".equals(field.getTagName()) && field.hasChildNodes())
             finalParameter = "true".equals(((Text)field.getFirstChild()).getData());
           if ("source".equals(field.getTagName()) && field.hasChildNodes())
-            source.add(((Text)field.getFirstChild()).getData());
+            source.add(StringInterner.weakIntern(
+                ((Text)field.getFirstChild()).getData()));
         }
         source.add(name);
         

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegateToFileSystem.java Thu Nov  8 19:09:46 2012
@@ -125,6 +125,11 @@ public abstract class DelegateToFileSyst
   public FsServerDefaults getServerDefaults() throws IOException {
     return fsImpl.getServerDefaults();
   }
+  
+  @Override
+  public Path getHomeDirectory() {
+    return fsImpl.getHomeDirectory();
+  }
 
   @Override
   public int getUriDefaultPort() {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java Thu Nov  8 19:09:46 2012
@@ -45,7 +45,8 @@ public class HardLink { 
     OS_TYPE_UNIX,
     OS_TYPE_WIN,
     OS_TYPE_SOLARIS,
-    OS_TYPE_MAC
+    OS_TYPE_MAC,
+    OS_TYPE_FREEBSD
   }
   
   public static OSType osType;
@@ -65,7 +66,7 @@ public class HardLink { 
       getHardLinkCommand = new HardLinkCGUnix();
       //override getLinkCountCommand for the particular Unix variant
       //Linux is already set as the default - {"stat","-c%h", null}
-      if (osType == OSType.OS_TYPE_MAC) {
+      if (osType == OSType.OS_TYPE_MAC || osType == OSType.OS_TYPE_FREEBSD) {
         String[] linkCountCmdTemplate = {"/usr/bin/stat","-f%l", null};
         HardLinkCGUnix.setLinkCountCmdTemplate(linkCountCmdTemplate);
       } else if (osType == OSType.OS_TYPE_SOLARIS) {
@@ -91,6 +92,9 @@ public class HardLink { 
     else if (osName.contains("Mac")) {
        return OSType.OS_TYPE_MAC;
     }
+    else if (osName.contains("FreeBSD")) {
+       return OSType.OS_TYPE_FREEBSD;
+    }
     else {
       return OSType.OS_TYPE_UNIX;
     }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java Thu Nov  8 19:09:46 2012
@@ -349,7 +349,7 @@ public class RawLocalFileSystem extends 
         new RawLocalFileStatus(localf, getDefaultBlockSize(f), this) };
     }
 
-    String[] names = localf.list();
+    File[] names = localf.listFiles();
     if (names == null) {
       return null;
     }
@@ -357,7 +357,7 @@ public class RawLocalFileSystem extends 
     int j = 0;
     for (int i = 0; i < names.length; i++) {
       try {
-        results[j] = getFileStatus(new Path(f, names[i]));
+        results[j] = getFileStatus(new Path(names[i].getAbsolutePath()));
         j++;
       } catch (FileNotFoundException e) {
         // ignore the files not found since the dir list may have have changed

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Test.java Thu Nov  8 19:09:46 2012
@@ -37,16 +37,21 @@ class Test extends FsCommand {  
   }
 
   public static final String NAME = "test";
-  public static final String USAGE = "-[ezd] <path>";
+  public static final String USAGE = "-[defsz] <path>";
   public static final String DESCRIPTION =
-    "If file exists, has zero length, is a directory\n" +
-    "then return 0, else return 1.";
+    "Answer various questions about <path>, with result via exit status.\n" +
+    "  -d  return 0 if <path> is a directory.\n" +
+    "  -e  return 0 if <path> exists.\n" +
+    "  -f  return 0 if <path> is a file.\n" +
+    "  -s  return 0 if file <path> is greater than zero bytes in size.\n" +
+    "  -z  return 0 if file <path> is zero bytes in size.\n" +
+    "else, return 1.";
 
   private char flag;
   
   @Override
   protected void processOptions(LinkedList<String> args) {
-    CommandFormat cf = new CommandFormat(1, 1, "e", "d", "z");
+    CommandFormat cf = new CommandFormat(1, 1, "e", "d", "f", "s", "z");
     cf.parse(args);
     
     String[] opts = cf.getOpts().toArray(new String[0]);
@@ -71,6 +76,12 @@ class Test extends FsCommand {  
       case 'd':
         test = item.stat.isDirectory();
         break;
+      case 'f':
+        test = item.stat.isFile();
+        break;
+      case 's':
+        test = (item.stat.getLen() > 0);
+        break;
       case 'z':
         test = (item.stat.getLen() == 0);
         break;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ChRootedFileSystem.java Thu Nov  8 19:09:46 2012
@@ -153,12 +153,6 @@ class ChRootedFileSystem extends FilterF
     return makeQualified(
         new Path(chRootPathPartString + f.toUri().toString()));
   }
-  
-  @Override
-  public Path getHomeDirectory() {
-    return  new Path("/user/"+System.getProperty("user.name")).makeQualified(
-          getUri(), null);
-  }
 
   @Override
   public Path getWorkingDirectory() {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java Thu Nov  8 19:09:46 2012
@@ -256,8 +256,9 @@ public class ViewFileSystem extends File
       if (base == null) {
         base = "/user";
       }
-      homeDir = 
-        this.makeQualified(new Path(base + "/" + ugi.getShortUserName()));
+      homeDir = (base.equals("/") ? 
+          this.makeQualified(new Path(base + ugi.getShortUserName())):
+          this.makeQualified(new Path(base + "/" + ugi.getShortUserName())));
     }
     return homeDir;
   }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java Thu Nov  8 19:09:46 2012
@@ -248,8 +248,9 @@ public class ViewFs extends AbstractFile
       if (base == null) {
         base = "/user";
       }
-      homeDir = 
-        this.makeQualified(new Path(base + "/" + ugi.getShortUserName()));
+      homeDir = (base.equals("/") ? 
+        this.makeQualified(new Path(base + ugi.getShortUserName())):
+        this.makeQualified(new Path(base + "/" + ugi.getShortUserName())));
     }
     return homeDir;
   }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Thu Nov  8 19:09:46 2012
@@ -69,6 +69,7 @@ import org.apache.hadoop.security.SaslRp
 import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.TokenInfo;
@@ -222,7 +223,6 @@ public class Client {
   private class Connection extends Thread {
     private InetSocketAddress server;             // server ip:port
     private String serverPrincipal;  // server's krb5 principal name
-    private IpcConnectionContextProto connectionContext;   // connection context
     private final ConnectionId remoteId;                // connection id
     private AuthMethod authMethod; // authentication method
     private Token<? extends TokenIdentifier> token;
@@ -295,16 +295,14 @@ public class Client {
       }
       
       if (token != null) {
-        authMethod = AuthMethod.DIGEST;
+        authMethod = AuthenticationMethod.TOKEN.getAuthMethod();
       } else if (UserGroupInformation.isSecurityEnabled()) {
+        // eventually just use the ticket's authMethod
         authMethod = AuthMethod.KERBEROS;
       } else {
         authMethod = AuthMethod.SIMPLE;
       }
       
-      connectionContext = ProtoUtil.makeIpcConnectionContext(
-          RPC.getProtocolName(protocol), ticket, authMethod);
-      
       if (LOG.isDebugEnabled())
         LOG.debug("Use " + authMethod + " authentication for protocol "
             + protocol.getSimpleName());
@@ -605,11 +603,6 @@ public class Client {
             } else {
               // fall back to simple auth because server told us so.
               authMethod = AuthMethod.SIMPLE;
-              // remake the connectionContext             
-              connectionContext = ProtoUtil.makeIpcConnectionContext(
-                  connectionContext.getProtocol(), 
-                  ProtoUtil.getUgi(connectionContext.getUserInfo()),
-                  authMethod);
             }
           }
         
@@ -620,7 +613,7 @@ public class Client {
             this.in = new DataInputStream(new BufferedInputStream(inStream));
           }
           this.out = new DataOutputStream(new BufferedOutputStream(outStream));
-          writeConnectionContext();
+          writeConnectionContext(remoteId, authMethod);
 
           // update last activity time
           touch();
@@ -742,10 +735,15 @@ public class Client {
     /* Write the connection context header for each connection
      * Out is not synchronized because only the first thread does this.
      */
-    private void writeConnectionContext() throws IOException {
+    private void writeConnectionContext(ConnectionId remoteId,
+                                        AuthMethod authMethod)
+                                            throws IOException {
       // Write out the ConnectionHeader
       DataOutputBuffer buf = new DataOutputBuffer();
-      connectionContext.writeTo(buf);
+      ProtoUtil.makeIpcConnectionContext(
+          RPC.getProtocolName(remoteId.getProtocol()),
+          remoteId.getTicket(),
+          authMethod).writeTo(buf);
       
       // Write out the payload length
       int bufLen = buf.getLength();

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java Thu Nov  8 19:09:46 2012
@@ -57,6 +57,7 @@ import java.util.concurrent.BlockingQueu
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.LinkedBlockingQueue;
 
+import javax.security.auth.callback.CallbackHandler;
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
 import javax.security.sasl.SaslServer;
@@ -87,6 +88,7 @@ import org.apache.hadoop.security.SaslRp
 import org.apache.hadoop.security.SaslRpcServer.SaslGssCallbackHandler;
 import org.apache.hadoop.security.SaslRpcServer.SaslStatus;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.security.authorize.PolicyProvider;
 import org.apache.hadoop.security.authorize.ProxyUsers;
@@ -974,6 +976,8 @@ public abstract class Server {
             return true;
           }
           if (!call.rpcResponse.hasRemaining()) {
+            //Clear out the response buffer so it can be collected
+            call.rpcResponse = null;
             call.connection.decRpcCount();
             if (numElements == 1) {    // last call fully processes.
               done = true;             // no more data for this channel.
@@ -1076,7 +1080,6 @@ public abstract class Server {
     
     IpcConnectionContextProto connectionContext;
     String protocolName;
-    boolean useSasl;
     SaslServer saslServer;
     private AuthMethod authMethod;
     private boolean saslContextEstablished;
@@ -1192,49 +1195,6 @@ public abstract class Server {
       if (!saslContextEstablished) {
         byte[] replyToken = null;
         try {
-          if (saslServer == null) {
-            switch (authMethod) {
-            case DIGEST:
-              if (secretManager == null) {
-                throw new AccessControlException(
-                    "Server is not configured to do DIGEST authentication.");
-              }
-              secretManager.checkAvailableForRead();
-              saslServer = Sasl.createSaslServer(AuthMethod.DIGEST
-                  .getMechanismName(), null, SaslRpcServer.SASL_DEFAULT_REALM,
-                  SaslRpcServer.SASL_PROPS, new SaslDigestCallbackHandler(
-                      secretManager, this));
-              break;
-            default:
-              UserGroupInformation current = UserGroupInformation
-                  .getCurrentUser();
-              String fullName = current.getUserName();
-              if (LOG.isDebugEnabled())
-                LOG.debug("Kerberos principal name is " + fullName);
-              final String names[] = SaslRpcServer.splitKerberosName(fullName);
-              if (names.length != 3) {
-                throw new AccessControlException(
-                    "Kerberos principal name does NOT have the expected "
-                        + "hostname part: " + fullName);
-              }
-              current.doAs(new PrivilegedExceptionAction<Object>() {
-                @Override
-                public Object run() throws SaslException {
-                  saslServer = Sasl.createSaslServer(AuthMethod.KERBEROS
-                      .getMechanismName(), names[0], names[1],
-                      SaslRpcServer.SASL_PROPS, new SaslGssCallbackHandler());
-                  return null;
-                }
-              });
-            }
-            if (saslServer == null)
-              throw new AccessControlException(
-                  "Unable to find SASL server implementation for "
-                      + authMethod.getMechanismName());
-            if (LOG.isDebugEnabled())
-              LOG.debug("Created SASL server with mechanism = "
-                  + authMethod.getMechanismName());
-          }
           if (LOG.isDebugEnabled())
             LOG.debug("Have read input token of size " + saslToken.length
                 + " for processing by saslServer.evaluateResponse()");
@@ -1373,38 +1333,27 @@ public abstract class Server {
           dataLengthBuffer.clear();
           if (authMethod == null) {
             throw new IOException("Unable to read authentication method");
-          }          
+          }
+          boolean useSaslServer = isSecurityEnabled;
           final boolean clientUsingSasl;
           switch (authMethod) {
             case SIMPLE: { // no sasl for simple
-              if (isSecurityEnabled) {
-                AccessControlException ae = new AccessControlException("Authorization ("
-                    + CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION
-                    + ") is enabled but authentication ("
-                    + CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION
-                    + ") is configured as simple. Please configure another method "
-                    + "like kerberos or digest.");
-                setupResponse(authFailedResponse, authFailedCall, RpcStatusProto.FATAL,
-                    null, ae.getClass().getName(), ae.getMessage());
-                responder.doRespond(authFailedCall);
-                throw ae;
-              }
               clientUsingSasl = false;
-              useSasl = false; 
               break;
             }
-            case DIGEST: {
+            case DIGEST: { // always allow tokens if there's a secret manager
+              useSaslServer |= (secretManager != null);
               clientUsingSasl = true;
-              useSasl = (secretManager != null);
               break;
             }
             default: {
               clientUsingSasl = true;
-              useSasl = isSecurityEnabled; 
               break;
             }
-          }          
-          if (clientUsingSasl && !useSasl) {
+          }
+          if (useSaslServer) {
+            saslServer = createSaslServer(authMethod);
+          } else if (clientUsingSasl) { // security is off
             doSaslReply(SaslStatus.SUCCESS, new IntWritable(
                 SaslRpcServer.SWITCH_TO_SIMPLE_AUTH), null, null);
             authMethod = AuthMethod.SIMPLE;
@@ -1446,7 +1395,7 @@ public abstract class Server {
             continue;
           }
           boolean isHeaderRead = connectionContextRead;
-          if (useSasl) {
+          if (saslServer != null) {
             saslReadAndProcess(data.array());
           } else {
             processOneRpc(data.array());
@@ -1460,6 +1409,84 @@ public abstract class Server {
       }
     }
 
+    private SaslServer createSaslServer(AuthMethod authMethod)
+        throws IOException {
+      try {
+        return createSaslServerInternal(authMethod);
+      } catch (IOException ioe) {
+        final String ioeClass = ioe.getClass().getName();
+        final String ioeMessage  = ioe.getLocalizedMessage();
+        if (authMethod == AuthMethod.SIMPLE) {
+          setupResponse(authFailedResponse, authFailedCall,
+              RpcStatusProto.FATAL, null, ioeClass, ioeMessage);
+          responder.doRespond(authFailedCall);
+        } else {
+          doSaslReply(SaslStatus.ERROR, null, ioeClass, ioeMessage);
+        }
+        throw ioe;
+      }
+    }
+
+    private SaslServer createSaslServerInternal(AuthMethod authMethod)
+        throws IOException {
+      SaslServer saslServer = null;
+      String hostname = null;
+      String saslProtocol = null;
+      CallbackHandler saslCallback = null;
+      
+      switch (authMethod) {
+        case SIMPLE: {
+          throw new AccessControlException("Authorization ("
+              + CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION
+              + ") is enabled but authentication ("
+              + CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION
+              + ") is configured as simple. Please configure another method "
+              + "like kerberos or digest.");
+        }
+        case DIGEST: {
+          if (secretManager == null) {
+            throw new AccessControlException(
+                "Server is not configured to do DIGEST authentication.");
+          }
+          secretManager.checkAvailableForRead();
+          hostname = SaslRpcServer.SASL_DEFAULT_REALM;
+          saslCallback = new SaslDigestCallbackHandler(secretManager, this);
+          break;
+        }
+        case KERBEROS: {
+          String fullName = UserGroupInformation.getCurrentUser().getUserName();
+          if (LOG.isDebugEnabled())
+            LOG.debug("Kerberos principal name is " + fullName);
+          KerberosName krbName = new KerberosName(fullName);
+          hostname = krbName.getHostName();
+          if (hostname == null) {
+            throw new AccessControlException(
+                "Kerberos principal name does NOT have the expected "
+                    + "hostname part: " + fullName);
+          }
+          saslProtocol = krbName.getServiceName();
+          saslCallback = new SaslGssCallbackHandler();
+          break;
+        }
+        default:
+          throw new AccessControlException(
+              "Server does not support SASL " + authMethod);
+      }
+      
+      String mechanism = authMethod.getMechanismName();
+      saslServer = Sasl.createSaslServer(
+          mechanism, saslProtocol, hostname,
+          SaslRpcServer.SASL_PROPS, saslCallback);
+      if (saslServer == null) {
+        throw new AccessControlException(
+            "Unable to find SASL server implementation for " + mechanism);
+      }
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Created SASL server with mechanism = " + mechanism);
+      }
+      return saslServer;
+    }
+    
     /**
      * Try to set up the response to indicate that the client version
      * is incompatible with the server. This can contain special-case
@@ -1521,14 +1548,14 @@ public abstract class Server {
           .getProtocol() : null;
 
       UserGroupInformation protocolUser = ProtoUtil.getUgi(connectionContext);
-      if (!useSasl) {
+      if (saslServer == null) {
         user = protocolUser;
         if (user != null) {
-          user.setAuthenticationMethod(AuthMethod.SIMPLE.authenticationMethod);
+          user.setAuthenticationMethod(AuthMethod.SIMPLE);
         }
       } else {
         // user is authenticated
-        user.setAuthenticationMethod(authMethod.authenticationMethod);
+        user.setAuthenticationMethod(authMethod);
         //Now we check if this is a proxy user case. If the protocol user is
         //different from the 'user', it is a proxy user scenario. However, 
         //this is not allowed if user authenticated with DIGEST.
@@ -1997,7 +2024,7 @@ public abstract class Server {
   
   private void wrapWithSasl(ByteArrayOutputStream response, Call call)
       throws IOException {
-    if (call.connection.useSasl) {
+    if (call.connection.saslServer != null) {
       byte[] token = response.toByteArray();
       // synchronization may be needed since there can be multiple Handler
       // threads using saslServer to wrap responses.

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java Thu Nov  8 19:09:46 2012
@@ -25,6 +25,7 @@ import java.io.DataOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.util.Map;
 
 import javax.security.auth.callback.Callback;
 import javax.security.auth.callback.CallbackHandler;
@@ -45,6 +46,7 @@ import org.apache.hadoop.io.WritableUtil
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
 import org.apache.hadoop.security.SaslRpcServer.SaslStatus;
+import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 
@@ -69,40 +71,48 @@ public class SaslRpcClient {
   public SaslRpcClient(AuthMethod method,
       Token<? extends TokenIdentifier> token, String serverPrincipal)
       throws IOException {
+    String saslUser = null;
+    String saslProtocol = null;
+    String saslServerName = null;
+    Map<String, String> saslProperties = SaslRpcServer.SASL_PROPS;
+    CallbackHandler saslCallback = null;
+    
     switch (method) {
-    case DIGEST:
-      if (LOG.isDebugEnabled())
-        LOG.debug("Creating SASL " + AuthMethod.DIGEST.getMechanismName()
-            + " client to authenticate to service at " + token.getService());
-      saslClient = Sasl.createSaslClient(new String[] { AuthMethod.DIGEST
-          .getMechanismName() }, null, null, SaslRpcServer.SASL_DEFAULT_REALM,
-          SaslRpcServer.SASL_PROPS, new SaslClientCallbackHandler(token));
-      break;
-    case KERBEROS:
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("Creating SASL " + AuthMethod.KERBEROS.getMechanismName()
-            + " client. Server's Kerberos principal name is "
-            + serverPrincipal);
-      }
-      if (serverPrincipal == null || serverPrincipal.length() == 0) {
-        throw new IOException(
-            "Failed to specify server's Kerberos principal name");
-      }
-      String names[] = SaslRpcServer.splitKerberosName(serverPrincipal);
-      if (names.length != 3) {
-        throw new IOException(
-          "Kerberos principal name does NOT have the expected hostname part: "
-                + serverPrincipal);
-      }
-      saslClient = Sasl.createSaslClient(new String[] { AuthMethod.KERBEROS
-          .getMechanismName() }, null, names[0], names[1],
-          SaslRpcServer.SASL_PROPS, null);
-      break;
-    default:
-      throw new IOException("Unknown authentication method " + method);
+      case DIGEST: {
+        saslServerName = SaslRpcServer.SASL_DEFAULT_REALM;
+        saslCallback = new SaslClientCallbackHandler(token);
+        break;
+      }
+      case KERBEROS: {
+        if (serverPrincipal == null || serverPrincipal.isEmpty()) {
+          throw new IOException(
+              "Failed to specify server's Kerberos principal name");
+        }
+        KerberosName name = new KerberosName(serverPrincipal);
+        saslProtocol = name.getServiceName();
+        saslServerName = name.getHostName();
+        if (saslServerName == null) {
+          throw new IOException(
+              "Kerberos principal name does NOT have the expected hostname part: "
+                  + serverPrincipal);
+        }
+        break;
+      }
+      default:
+        throw new IOException("Unknown authentication method " + method);
+    }
+    
+    String mechanism = method.getMechanismName();
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Creating SASL " + mechanism
+          + " client to authenticate to service at " + saslServerName);
     }
-    if (saslClient == null)
+    saslClient = Sasl.createSaslClient(
+        new String[] { mechanism }, saslUser, saslProtocol, saslServerName,
+        saslProperties, saslCallback);
+    if (saslClient == null) {
       throw new IOException("Unable to find SASL client implementation");
+    }
   }
 
   private static void readStatus(DataInputStream inStream) throws IOException {

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java Thu Nov  8 19:09:46 2012
@@ -42,7 +42,6 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.Server;
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
@@ -137,20 +136,17 @@ public class SaslRpcServer {
   /** Authentication method */
   @InterfaceStability.Evolving
   public static enum AuthMethod {
-    SIMPLE((byte) 80, "", AuthenticationMethod.SIMPLE),
-    KERBEROS((byte) 81, "GSSAPI", AuthenticationMethod.KERBEROS),
-    DIGEST((byte) 82, "DIGEST-MD5", AuthenticationMethod.TOKEN);
+    SIMPLE((byte) 80, ""),
+    KERBEROS((byte) 81, "GSSAPI"),
+    DIGEST((byte) 82, "DIGEST-MD5");
 
     /** The code for this method. */
     public final byte code;
     public final String mechanismName;
-    public final AuthenticationMethod authenticationMethod;
 
-    private AuthMethod(byte code, String mechanismName, 
-                       AuthenticationMethod authMethod) {
+    private AuthMethod(byte code, String mechanismName) { 
       this.code = code;
       this.mechanismName = mechanismName;
-      this.authenticationMethod = authMethod;
     }
 
     private static final int FIRST_CODE = values()[0].code;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java Thu Nov  8 19:09:46 2012
@@ -16,6 +16,8 @@
  */
 package org.apache.hadoop.security;
 
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION;
+
 import java.io.IOException;
 import java.net.InetAddress;
 import java.net.InetSocketAddress;
@@ -44,6 +46,7 @@ import org.apache.hadoop.fs.CommonConfig
 import org.apache.hadoop.http.HttpConfig;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
 import org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.ssl.SSLFactory;
@@ -665,4 +668,22 @@ public class SecurityUtil {
     }
   }
 
+  public static AuthenticationMethod getAuthenticationMethod(Configuration conf) {
+    String value = conf.get(HADOOP_SECURITY_AUTHENTICATION, "simple");
+    try {
+      return Enum.valueOf(AuthenticationMethod.class, value.toUpperCase());
+    } catch (IllegalArgumentException iae) {
+      throw new IllegalArgumentException("Invalid attribute value for " +
+          HADOOP_SECURITY_AUTHENTICATION + " of " + value);
+    }
+  }
+
+  public static void setAuthenticationMethod(
+      AuthenticationMethod authenticationMethod, Configuration conf) {
+    if (authenticationMethod == null) {
+      authenticationMethod = AuthenticationMethod.SIMPLE;
+    }
+    conf.set(HADOOP_SECURITY_AUTHENTICATION,
+             authenticationMethod.toString().toLowerCase());
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java Thu Nov  8 19:09:46 2012
@@ -59,6 +59,7 @@ import org.apache.hadoop.metrics2.annota
 import org.apache.hadoop.metrics2.annotation.Metrics;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.lib.MutableRate;
+import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
 import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.authentication.util.KerberosUtil;
 import org.apache.hadoop.security.token.Token;
@@ -236,15 +237,18 @@ public class UserGroupInformation {
    * @param conf the configuration to use
    */
   private static synchronized void initUGI(Configuration conf) {
-    String value = conf.get(HADOOP_SECURITY_AUTHENTICATION);
-    if (value == null || "simple".equals(value)) {
-      useKerberos = false;
-    } else if ("kerberos".equals(value)) {
-      useKerberos = true;
-    } else {
-      throw new IllegalArgumentException("Invalid attribute value for " +
-                                         HADOOP_SECURITY_AUTHENTICATION + 
-                                         " of " + value);
+    AuthenticationMethod auth = SecurityUtil.getAuthenticationMethod(conf);
+    switch (auth) {
+      case SIMPLE:
+        useKerberos = false;
+        break;
+      case KERBEROS:
+        useKerberos = true;
+        break;
+      default:
+        throw new IllegalArgumentException("Invalid attribute value for " +
+                                           HADOOP_SECURITY_AUTHENTICATION + 
+                                           " of " + auth);
     }
     try {
         kerberosMinSecondsBeforeRelogin = 1000L * conf.getLong(
@@ -636,19 +640,20 @@ public class UserGroupInformation {
       try {
         Subject subject = new Subject();
         LoginContext login;
+        AuthenticationMethod authenticationMethod;
         if (isSecurityEnabled()) {
+          authenticationMethod = AuthenticationMethod.KERBEROS;
           login = newLoginContext(HadoopConfiguration.USER_KERBEROS_CONFIG_NAME,
               subject, new HadoopConfiguration());
         } else {
+          authenticationMethod = AuthenticationMethod.SIMPLE;
           login = newLoginContext(HadoopConfiguration.SIMPLE_CONFIG_NAME, 
               subject, new HadoopConfiguration());
         }
         login.login();
         loginUser = new UserGroupInformation(subject);
         loginUser.setLogin(login);
-        loginUser.setAuthenticationMethod(isSecurityEnabled() ?
-                                          AuthenticationMethod.KERBEROS :
-                                          AuthenticationMethod.SIMPLE);
+        loginUser.setAuthenticationMethod(authenticationMethod);
         loginUser = new UserGroupInformation(login.getSubject());
         String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION);
         if (fileLocation != null) {
@@ -1019,13 +1024,34 @@ public class UserGroupInformation {
   @InterfaceAudience.Public
   @InterfaceStability.Evolving
   public static enum AuthenticationMethod {
-    SIMPLE,
-    KERBEROS,
-    TOKEN,
-    CERTIFICATE,
-    KERBEROS_SSL,
-    PROXY;
-  }
+    // currently we support only one auth per method, but eventually a 
+    // subtype is needed to differentiate, ex. if digest is token or ldap
+    SIMPLE(AuthMethod.SIMPLE),
+    KERBEROS(AuthMethod.KERBEROS),
+    TOKEN(AuthMethod.DIGEST),
+    CERTIFICATE(null),
+    KERBEROS_SSL(null),
+    PROXY(null);
+    
+    private final AuthMethod authMethod;
+    private AuthenticationMethod(AuthMethod authMethod) {
+      this.authMethod = authMethod;
+    }
+    
+    public AuthMethod getAuthMethod() {
+      return authMethod;
+    }
+    
+    public static AuthenticationMethod valueOf(AuthMethod authMethod) {
+      for (AuthenticationMethod value : values()) {
+        if (value.getAuthMethod() == authMethod) {
+          return value;
+        }
+      }
+      throw new IllegalArgumentException(
+          "no authentication method for " + authMethod);
+    }
+  };
 
   /**
    * Create a proxy user using username of the effective user and the ugi of the
@@ -1291,6 +1317,15 @@ public class UserGroupInformation {
   }
 
   /**
+   * Sets the authentication method in the subject
+   * 
+   * @param authMethod
+   */
+  public void setAuthenticationMethod(AuthMethod authMethod) {
+    user.setAuthenticationMethod(AuthenticationMethod.valueOf(authMethod));
+  }
+
+  /**
    * Get the authentication method from the subject
    * 
    * @return AuthenticationMethod in the subject, null if not present.

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c Thu Nov  8 19:09:46 2012
@@ -254,7 +254,11 @@ Java_org_apache_hadoop_io_nativeio_Nativ
 
   int err = 0;
   if ((err = posix_fadvise(fd, (off_t)offset, (off_t)len, flags))) {
+#ifdef __FreeBSD__
+    throw_ioe(env, errno);
+#else
     throw_ioe(env, err);
+#endif
   }
 #endif
 }
@@ -310,6 +314,22 @@ Java_org_apache_hadoop_io_nativeio_Nativ
 #endif
 }
 
+#ifdef __FreeBSD__
+static int toFreeBSDFlags(int flags)
+{
+  int rc = flags & 03;
+  if ( flags &  0100 ) rc |= O_CREAT;
+  if ( flags &  0200 ) rc |= O_EXCL;
+  if ( flags &  0400 ) rc |= O_NOCTTY;
+  if ( flags & 01000 ) rc |= O_TRUNC;
+  if ( flags & 02000 ) rc |= O_APPEND;
+  if ( flags & 04000 ) rc |= O_NONBLOCK;
+  if ( flags &010000 ) rc |= O_SYNC;
+  if ( flags &020000 ) rc |= O_ASYNC;
+  return rc;
+}
+#endif
+
 /*
  * public static native FileDescriptor open(String path, int flags, int mode);
  */
@@ -318,6 +338,9 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   JNIEnv *env, jclass clazz, jstring j_path,
   jint flags, jint mode)
 {
+#ifdef __FreeBSD__
+  flags = toFreeBSDFlags(flags);
+#endif
   jobject ret = NULL;
 
   const char *path = (*env)->GetStringUTFChars(env, j_path, NULL);
@@ -399,7 +422,7 @@ err:
  * Determine how big a buffer we need for reentrant getpwuid_r and getgrnam_r
  */
 ssize_t get_pw_buflen() {
-  size_t ret = 0;
+  long ret = 0;
   #ifdef _SC_GETPW_R_SIZE_MAX
   ret = sysconf(_SC_GETPW_R_SIZE_MAX);
   #endif

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c Thu Nov  8 19:09:46 2012
@@ -46,6 +46,7 @@ JNIEXPORT jobjectArray JNICALL 
 Java_org_apache_hadoop_security_JniBasedUnixGroupsNetgroupMapping_getUsersForNetgroupJNI
 (JNIEnv *env, jobject jobj, jstring jgroup) {
   UserList *userListHead = NULL;
+  UserList *current = NULL;
   int       userListSize = 0;
 
   // pointers to free at the end
@@ -72,8 +73,10 @@ Java_org_apache_hadoop_security_JniBased
   // was successful or not (as long as it was called we need to call
   // endnetgrent)
   setnetgrentCalledFlag = 1;
+#ifndef __FreeBSD__
   if(setnetgrent(cgroup) == 1) {
-    UserList *current = NULL;
+#endif
+    current = NULL;
     // three pointers are for host, user, domain, we only care
     // about user now
     char *p[3];
@@ -87,7 +90,9 @@ Java_org_apache_hadoop_security_JniBased
         userListSize++;
       }
     }
+#ifndef __FreeBSD__
   }
+#endif
 
   //--------------------------------------------------
   // build return data (java array)
@@ -101,7 +106,7 @@ Java_org_apache_hadoop_security_JniBased
     goto END;
   }
 
-  UserList * current = NULL;
+  current = NULL;
 
   // note that the loop iterates over list but also over array (i)
   int i = 0;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/getGroup.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/getGroup.c?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/getGroup.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/getGroup.c Thu Nov  8 19:09:46 2012
@@ -78,7 +78,7 @@ int getGroupIDList(const char *user, int
  */
 int getGroupDetails(gid_t group, char **grpBuf) {
   struct group * grp = NULL;
-  size_t currBufferSize = sysconf(_SC_GETGR_R_SIZE_MAX);
+  long currBufferSize = sysconf(_SC_GETGR_R_SIZE_MAX);
   if (currBufferSize < 1024) {
     currBufferSize = 1024;
   }
@@ -123,7 +123,7 @@ int getGroupDetails(gid_t group, char **
  */
 int getPW(const char *user, char **pwbuf) {
   struct passwd *pwbufp = NULL;
-  size_t currBufferSize = sysconf(_SC_GETPW_R_SIZE_MAX);
+  long currBufferSize = sysconf(_SC_GETPW_R_SIZE_MAX);
   if (currBufferSize < 1024) {
     currBufferSize = 1024;
   }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c Thu Nov  8 19:09:46 2012
@@ -32,7 +32,9 @@
 #include "bulk_crc32.h"
 #include "gcc_optimizations.h"
 
+#ifndef __FreeBSD__
 #define USE_PIPELINED
+#endif
 
 #define CRC_INITIAL_VAL 0xffffffff
 
@@ -260,7 +262,7 @@ static uint32_t crc32_zlib_sb8(
 // Begin code for SSE4.2 specific hardware support of CRC32C
 ///////////////////////////////////////////////////////////////////////////
 
-#if (defined(__amd64__) || defined(__i386)) && defined(__GNUC__)
+#if (defined(__amd64__) || defined(__i386)) && defined(__GNUC__) && !defined(__FreeBSD__)
 #  define SSE42_FEATURE_BIT (1 << 20)
 #  define CPUID_FEATURES 1
 /**

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/HAServiceProtocol.proto Thu Nov  8 19:09:46 2012
@@ -20,6 +20,7 @@ option java_package = "org.apache.hadoop
 option java_outer_classname = "HAServiceProtocolProtos";
 option java_generic_services = true;
 option java_generate_equals_and_hash = true;
+package hadoop.common;
 
 enum HAServiceStateProto {
   INITIALIZING = 0;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/IpcConnectionContext.proto Thu Nov  8 19:09:46 2012
@@ -18,6 +18,7 @@
 option java_package = "org.apache.hadoop.ipc.protobuf";
 option java_outer_classname = "IpcConnectionContextProtos";
 option java_generate_equals_and_hash = true;
+package hadoop.common;
 
 /**
  * Spec for UserInformationProto is specified in ProtoUtil#makeIpcConnectionContext

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ProtocolInfo.proto Thu Nov  8 19:09:46 2012
@@ -20,6 +20,7 @@ option java_package = "org.apache.hadoop
 option java_outer_classname = "ProtocolInfoProtos";
 option java_generic_services = true;
 option java_generate_equals_and_hash = true;
+package hadoop.common;
 
 /**
  * Request to get protocol versions for all supported rpc kinds.

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/RpcPayloadHeader.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/RpcPayloadHeader.proto?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/RpcPayloadHeader.proto (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/RpcPayloadHeader.proto Thu Nov  8 19:09:46 2012
@@ -18,6 +18,7 @@
 option java_package = "org.apache.hadoop.ipc.protobuf";
 option java_outer_classname = "RpcPayloadHeaderProtos";
 option java_generate_equals_and_hash = true;
+package hadoop.common;
 
 /**
  * This is the rpc payload header. It is sent with every rpc call.

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/ZKFCProtocol.proto Thu Nov  8 19:09:46 2012
@@ -20,6 +20,7 @@ option java_package = "org.apache.hadoop
 option java_outer_classname = "ZKFCProtocolProtos";
 option java_generic_services = true;
 option java_generate_equals_and_hash = true;
+package hadoop.common;
 
 message CedeActiveRequestProto {
   required uint32 millisToCede = 1;

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/hadoop_rpc.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/hadoop_rpc.proto?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/hadoop_rpc.proto (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/proto/hadoop_rpc.proto Thu Nov  8 19:09:46 2012
@@ -23,6 +23,7 @@
 option java_package = "org.apache.hadoop.ipc.protobuf";
 option java_outer_classname = "HadoopRpcProtos";
 option java_generate_equals_and_hash = true;
+package hadoop.common;
 
 /**
  * This message is used for Protobuf Rpc Engine.

Propchange: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1401063-1407201

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java Thu Nov  8 19:09:46 2012
@@ -61,19 +61,28 @@ public final class FileSystemTestHelper 
     return data;
   }
   
+  
+  /*
+   * get testRootPath qualified for fSys
+   */
   public static Path getTestRootPath(FileSystem fSys) {
     return fSys.makeQualified(new Path(TEST_ROOT_DIR));
   }
 
+  /*
+   * get testRootPath + pathString qualified for fSys
+   */
   public static Path getTestRootPath(FileSystem fSys, String pathString) {
     return fSys.makeQualified(new Path(TEST_ROOT_DIR, pathString));
   }
   
   
   // the getAbsolutexxx method is needed because the root test dir
-  // can be messed up by changing the working dir.
+  // can be messed up by changing the working dir since the TEST_ROOT_PATH
+  // is often relative to the working directory of process
+  // running the unit tests.
 
-  public static String getAbsoluteTestRootDir(FileSystem fSys)
+  static String getAbsoluteTestRootDir(FileSystem fSys)
       throws IOException {
     // NOTE: can't cache because of different filesystems!
     //if (absTestRootDir == null) 

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java Thu Nov  8 19:09:46 2012
@@ -350,8 +350,12 @@ public class TestHardLink {
     callCount = createHardLinkMult(src, fileNames, tgt_mult, maxLength);
     //check the request was completed in exactly two "chunks"
     assertEquals(2, callCount);
+    String[] tgt_multNames = tgt_mult.list();
+    //sort directory listings before comparsion
+    Arrays.sort(fileNames);
+    Arrays.sort(tgt_multNames);
     //and check the results were as expected in the dir tree
-    assertTrue(Arrays.deepEquals(fileNames, tgt_mult.list()));
+    assertArrayEquals(fileNames, tgt_multNames);
     
     //Test the case where maxlength is too small even for one filename.
     //It should go ahead and try the single files.
@@ -368,8 +372,12 @@ public class TestHardLink {
         maxLength);
     //should go ahead with each of the three single file names
     assertEquals(3, callCount);
-    //check the results were as expected in the dir tree
-    assertTrue(Arrays.deepEquals(fileNames, tgt_mult.list()));
+    tgt_multNames = tgt_mult.list();
+    //sort directory listings before comparsion
+    Arrays.sort(fileNames);
+    Arrays.sort(tgt_multNames);
+    //and check the results were as expected in the dir tree
+    assertArrayEquals(fileNames, tgt_multNames);
   }
   
   /*

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java Thu Nov  8 19:09:46 2012
@@ -249,6 +249,7 @@ public class TestLocalFileSystem {
     assertEquals(1, fileSchemeCount);
   }
 
+  @Test
   public void testHasFileDescriptor() throws IOException {
     Configuration conf = new Configuration();
     LocalFileSystem fs = FileSystem.getLocal(conf);
@@ -258,4 +259,17 @@ public class TestLocalFileSystem {
         new RawLocalFileSystem().new LocalFSFileInputStream(path), 1024);
     assertNotNull(bis.getFileDescriptor());
   }
+
+  @Test
+  public void testListStatusWithColons() throws IOException {
+    Configuration conf = new Configuration();
+    LocalFileSystem fs = FileSystem.getLocal(conf);
+    File colonFile = new File(TEST_ROOT_DIR, "foo:bar");
+    colonFile.mkdirs();
+    colonFile.createNewFile();
+    FileStatus[] stats = fs.listStatus(new Path(TEST_ROOT_DIR));
+    assertEquals("Unexpected number of stats", 1, stats.length);
+    assertEquals("Bad path from stat", colonFile.getAbsolutePath(),
+        stats[0].getPath().toUri().getPath());
+  }
 }

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFileSystem.java Thu Nov  8 19:09:46 2012
@@ -73,10 +73,10 @@ public class TestChRootedFileSystem {
     URI uri = fSys.getUri();
     Assert.assertEquals(chrootedTo.toUri(), uri);
     Assert.assertEquals(fSys.makeQualified(
-        new Path("/user/" + System.getProperty("user.name"))),
+        new Path(System.getProperty("user.home"))),
         fSys.getWorkingDirectory());
     Assert.assertEquals(fSys.makeQualified(
-        new Path("/user/" + System.getProperty("user.name"))),
+        new Path(System.getProperty("user.home"))),
         fSys.getHomeDirectory());
     /*
      * ChRootedFs as its uri like file:///chrootRoot.

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestChRootedFs.java Thu Nov  8 19:09:46 2012
@@ -70,10 +70,10 @@ public class TestChRootedFs {
     URI uri = fc.getDefaultFileSystem().getUri();
     Assert.assertEquals(chrootedTo.toUri(), uri);
     Assert.assertEquals(fc.makeQualified(
-        new Path("/user/" + System.getProperty("user.name"))),
+        new Path(System.getProperty("user.home"))),
         fc.getWorkingDirectory());
     Assert.assertEquals(fc.makeQualified(
-        new Path("/user/" + System.getProperty("user.name"))),
+        new Path(System.getProperty("user.home"))),
         fc.getHomeDirectory());
     /*
      * ChRootedFs as its uri like file:///chrootRoot.

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFcMainOperationsLocalFs.java Thu Nov  8 19:09:46 2012
@@ -39,44 +39,7 @@ public class TestFcMainOperationsLocalFs
   @Override
   @Before
   public void setUp() throws Exception {
-    /**
-     * create the test root on local_fs - the  mount table will point here
-     */
-    fclocal = FileContext.getLocalFSFileContext();
-    targetOfTests = FileContextTestHelper.getTestRootPath(fclocal);
-    // In case previous test was killed before cleanup
-    fclocal.delete(targetOfTests, true);
-    
-    fclocal.mkdir(targetOfTests, FileContext.DEFAULT_PERM, true);
-
-    
-    
-    
-    // We create mount table so that the test root on the viewFs points to 
-    // to the test root on the target.
-    // DOing this helps verify the FileStatus.path.
-    //
-    // The test root by default when running eclipse 
-    // is a test dir below the working directory. 
-    // (see FileContextTestHelper).
-    // Since viewFs has no built-in wd, its wd is /user/<username>.
-    // If this test launched via ant (build.xml) the test root is absolute path
-    
-    String srcTestRoot;
-    if (FileContextTestHelper.TEST_ROOT_DIR.startsWith("/")) {
-      srcTestRoot = FileContextTestHelper.TEST_ROOT_DIR;
-    } else {
-      srcTestRoot = "/user/"  + System.getProperty("user.name") + "/" +
-      FileContextTestHelper.TEST_ROOT_DIR;
-    }
-
-    Configuration conf = new Configuration();
-    ConfigUtil.addLink(conf, srcTestRoot,
-        targetOfTests.toUri());
-    
-    fc = FileContext.getFileContext(FsConstants.VIEWFS_URI, conf);
-    //System.out.println("SRCOfTests = "+ FileContextTestHelper.getTestRootPath(fc, "test"));
-    //System.out.println("TargetOfTests = "+ targetOfTests.toUri());
+    fc = ViewFsTestSetup.setupForViewFsLocalFs();
     super.setUp();
   }
   
@@ -84,6 +47,6 @@ public class TestFcMainOperationsLocalFs
   @After
   public void tearDown() throws Exception {
     super.tearDown();
-    fclocal.delete(targetOfTests, true);
+    ViewFsTestSetup.tearDownForViewFsLocalFs();
   }
-}
\ No newline at end of file
+}

Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java?rev=1407217&r1=1407216&r2=1407217&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/ViewFileSystemTestSetup.java Thu Nov  8 19:09:46 2012
@@ -17,7 +17,10 @@
  */
 package org.apache.hadoop.fs.viewfs;
 
+import java.net.URI;
+
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileSystemTestHelper;
 import org.apache.hadoop.fs.FsConstants;
@@ -32,14 +35,19 @@ import org.mortbay.log.Log;
  * 
  * If tests launched via ant (build.xml) the test root is absolute path
  * If tests launched via eclipse, the test root is 
- * is a test dir below the working directory. (see FileSystemTestHelper).
- * Since viewFs has no built-in wd, its wd is /user/<username> 
- *          (or /User/<username> on mac)
+ * is a test dir below the working directory. (see FileContextTestHelper)
+ * 
+ * We set a viewFileSystems with 3 mount points: 
+ * 1) /<firstComponent>" of testdir  pointing to same in  target fs
+ * 2)   /<firstComponent>" of home  pointing to same in  target fs 
+ * 3)  /<firstComponent>" of wd  pointing to same in  target fs
+ * (note in many cases the link may be the same - viewFileSytem handles this)
  * 
- * We set a viewFileSystems with mount point for 
- * /<firstComponent>" pointing to the target fs's  testdir 
+ * We also set the view file system's wd to point to the wd. 
  */
 public class ViewFileSystemTestSetup {
+  
+  static public String ViewFSTestDir = "/testDir";
 
   /**
    * 
@@ -56,24 +64,26 @@ public class ViewFileSystemTestSetup {
     fsTarget.delete(targetOfTests, true);
     fsTarget.mkdirs(targetOfTests);
 
-    // Setup a link from viewfs to targetfs for the first component of
-    // path of testdir.
+
+    // Set up viewfs link for test dir as described above
     String testDir = FileSystemTestHelper.getTestRootPath(fsTarget).toUri()
         .getPath();
-    int indexOf2ndSlash = testDir.indexOf('/', 1);
-    String testDirFirstComponent = testDir.substring(0, indexOf2ndSlash);
-    ConfigUtil.addLink(conf, testDirFirstComponent, fsTarget.makeQualified(
-        new Path(testDirFirstComponent)).toUri());
-
-    // viewFs://home => fsTarget://home
-    String homeDirRoot = fsTarget.getHomeDirectory()
-        .getParent().toUri().getPath();
-    ConfigUtil.addLink(conf, homeDirRoot,
-        fsTarget.makeQualified(new Path(homeDirRoot)).toUri());
-    ConfigUtil.setHomeDirConf(conf, homeDirRoot);
-    Log.info("Home dir base " + homeDirRoot);
+    linkUpFirstComponents(conf, testDir, fsTarget, "test dir");
+    
+    
+    // Set up viewfs link for home dir as described above
+    setUpHomeDir(conf, fsTarget);
+    
+    
+    // the test path may be relative to working dir - we need to make that work:
+    // Set up viewfs link for wd as described above
+    String wdDir = fsTarget.getWorkingDirectory().toUri().getPath();
+    linkUpFirstComponents(conf, wdDir, fsTarget, "working dir");
+
 
     FileSystem fsView = FileSystem.get(FsConstants.VIEWFS_URI, conf);
+    fsView.setWorkingDirectory(new Path(wdDir)); // in case testdir relative to wd.
+    Log.info("Working dir is: " + fsView.getWorkingDirectory());
     return fsView;
   }
 
@@ -91,4 +101,33 @@ public class ViewFileSystemTestSetup {
     conf.set("fs.viewfs.impl", ViewFileSystem.class.getName());
     return conf; 
   }
+  
+  static void setUpHomeDir(Configuration conf, FileSystem fsTarget) {
+    String homeDir = fsTarget.getHomeDirectory().toUri().getPath();
+    int indexOf2ndSlash = homeDir.indexOf('/', 1);
+    if (indexOf2ndSlash >0) {
+      linkUpFirstComponents(conf, homeDir, fsTarget, "home dir");
+    } else { // home dir is at root. Just link the home dir itse
+      URI linkTarget = fsTarget.makeQualified(new Path(homeDir)).toUri();
+      ConfigUtil.addLink(conf, homeDir, linkTarget);
+      Log.info("Added link for home dir " + homeDir + "->" + linkTarget);
+    }
+    // Now set the root of the home dir for viewfs
+    String homeDirRoot = fsTarget.getHomeDirectory().getParent().toUri().getPath();
+    ConfigUtil.setHomeDirConf(conf, homeDirRoot);
+    Log.info("Home dir base for viewfs" + homeDirRoot);  
+  }
+  
+  /*
+   * Set up link in config for first component of path to the same
+   * in the target file system.
+   */
+  static void linkUpFirstComponents(Configuration conf, String path, FileSystem fsTarget, String info) {
+    int indexOf2ndSlash = path.indexOf('/', 1);
+    String firstComponent = path.substring(0, indexOf2ndSlash);
+    URI linkTarget = fsTarget.makeQualified(new Path(firstComponent)).toUri();
+    ConfigUtil.addLink(conf, firstComponent, linkTarget);
+    Log.info("Added link for " + info + " " 
+        + firstComponent + "->" + linkTarget);    
+  }
 }