You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by su...@apache.org on 2013/03/06 20:15:22 UTC

svn commit: r1453486 [6/7] - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/bin/ src/main/conf/ src/main/docs/src/documentation/content/xdocs/ src/main/java/ src/main/java/org/apache/hadoop/fs/ src/main/java/org/apache/hadoop/...

Added: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/winutils/winutils.sln
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/winutils/winutils.sln?rev=1453486&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/winutils/winutils.sln (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/winutils/winutils.sln Wed Mar  6 19:15:18 2013
@@ -0,0 +1,55 @@
+
+Microsoft Visual Studio Solution File, Format Version 11.00
+# Visual Studio 2010
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "winutils", "winutils.vcxproj", "{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}"
+	ProjectSection(ProjectDependencies) = postProject
+		{12131AA7-902E-4A6D-9CE3-043261D22A12} = {12131AA7-902E-4A6D-9CE3-043261D22A12}
+	EndProjectSection
+EndProject
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libwinutils", "libwinutils.vcxproj", "{12131AA7-902E-4A6D-9CE3-043261D22A12}"
+EndProject
+Global
+	GlobalSection(SolutionConfigurationPlatforms) = preSolution
+		Debug|Win32 = Debug|Win32
+		Debug|x64 = Debug|x64
+		Release|Win32 = Release|Win32
+		Release|x64 = Release|x64
+	EndGlobalSection
+	GlobalSection(ProjectConfigurationPlatforms) = postSolution
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|Win32.ActiveCfg = Debug|x64
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|Win32.Build.0 = Debug|x64
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|x64.ActiveCfg = Debug|x64
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|x64.Build.0 = Debug|x64
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|Win32.ActiveCfg = Release|Win32
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|Win32.Build.0 = Release|Win32
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|x64.ActiveCfg = Release|x64
+		{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|x64.Build.0 = Release|x64
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|Win32.ActiveCfg = Debug|x64
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|Win32.Build.0 = Debug|x64
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|x64.ActiveCfg = Debug|x64
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|x64.Build.0 = Debug|x64
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|Win32.ActiveCfg = Release|Win32
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|Win32.Build.0 = Release|Win32
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|x64.ActiveCfg = Release|x64
+		{12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|x64.Build.0 = Release|x64
+	EndGlobalSection
+	GlobalSection(SolutionProperties) = preSolution
+		HideSolutionNode = FALSE
+	EndGlobalSection
+EndGlobal

Added: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj?rev=1453486&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj Wed Mar  6 19:15:18 2013
@@ -0,0 +1,181 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+
+<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <ItemGroup Label="ProjectConfigurations">
+    <ProjectConfiguration Include="Debug|Win32">
+      <Configuration>Debug</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Debug|x64">
+      <Configuration>Debug</Configuration>
+      <Platform>x64</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Release|Win32">
+      <Configuration>Release</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Release|x64">
+      <Configuration>Release</Configuration>
+      <Platform>x64</Platform>
+    </ProjectConfiguration>
+  </ItemGroup>
+  <PropertyGroup Label="Globals">
+    <ProjectGuid>{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}</ProjectGuid>
+    <Keyword>Win32Proj</Keyword>
+    <RootNamespace>winutils</RootNamespace>
+  </PropertyGroup>
+  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
+    <ConfigurationType>Application</ConfigurationType>
+    <UseDebugLibraries>true</UseDebugLibraries>
+    <CharacterSet>Unicode</CharacterSet>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
+    <ConfigurationType>Application</ConfigurationType>
+    <UseDebugLibraries>true</UseDebugLibraries>
+    <CharacterSet>Unicode</CharacterSet>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
+    <ConfigurationType>Application</ConfigurationType>
+    <UseDebugLibraries>false</UseDebugLibraries>
+    <WholeProgramOptimization>true</WholeProgramOptimization>
+    <CharacterSet>Unicode</CharacterSet>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
+    <ConfigurationType>Application</ConfigurationType>
+    <UseDebugLibraries>false</UseDebugLibraries>
+    <WholeProgramOptimization>true</WholeProgramOptimization>
+    <CharacterSet>Unicode</CharacterSet>
+  </PropertyGroup>
+  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+  <ImportGroup Label="ExtensionSettings">
+  </ImportGroup>
+  <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <PropertyGroup Label="UserMacros" />
+  <PropertyGroup>
+    <IncludePath>include;$(IncludePath)</IncludePath>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+    <LinkIncremental>true</LinkIncremental>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+    <LinkIncremental>true</LinkIncremental>
+    <OutDir />
+    <IntDir>..\..\..\target\winutils\$(Configuration)\</IntDir>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+    <LinkIncremental>false</LinkIncremental>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+    <LinkIncremental>false</LinkIncremental>
+    <IntDir>..\..\..\target\winutils\$(Platform)\$(Configuration)\</IntDir>
+    <OutDir>..\..\..\target\bin\</OutDir>
+  </PropertyGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+    <ClCompile>
+      <PrecompiledHeader>
+      </PrecompiledHeader>
+      <WarningLevel>Level3</WarningLevel>
+      <Optimization>Disabled</Optimization>
+      <PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+    </ClCompile>
+    <Link>
+      <SubSystem>Console</SubSystem>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+    <ClCompile>
+      <PrecompiledHeader>
+      </PrecompiledHeader>
+      <WarningLevel>Level4</WarningLevel>
+      <Optimization>Disabled</Optimization>
+      <PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+    </ClCompile>
+    <Link>
+      <SubSystem>Console</SubSystem>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+    <ClCompile>
+      <WarningLevel>Level3</WarningLevel>
+      <PrecompiledHeader>
+      </PrecompiledHeader>
+      <Optimization>MaxSpeed</Optimization>
+      <FunctionLevelLinking>true</FunctionLevelLinking>
+      <IntrinsicFunctions>true</IntrinsicFunctions>
+      <PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+    </ClCompile>
+    <Link>
+      <SubSystem>Console</SubSystem>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <EnableCOMDATFolding>true</EnableCOMDATFolding>
+      <OptimizeReferences>true</OptimizeReferences>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+    <ClCompile>
+      <WarningLevel>Level3</WarningLevel>
+      <PrecompiledHeader>
+      </PrecompiledHeader>
+      <Optimization>MaxSpeed</Optimization>
+      <FunctionLevelLinking>true</FunctionLevelLinking>
+      <IntrinsicFunctions>true</IntrinsicFunctions>
+      <PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+    </ClCompile>
+    <Link>
+      <SubSystem>Console</SubSystem>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <EnableCOMDATFolding>true</EnableCOMDATFolding>
+      <OptimizeReferences>true</OptimizeReferences>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemGroup>
+    <ClCompile Include="symlink.c" />
+    <ClCompile Include="systeminfo.c" />
+    <ClCompile Include="chmod.c" />
+    <ClCompile Include="chown.c" />
+    <ClCompile Include="groups.c" />
+    <ClCompile Include="hardlink.c" />
+    <ClCompile Include="task.c" />
+    <ClCompile Include="ls.c" />
+    <ClCompile Include="main.c" />
+  </ItemGroup>
+  <ItemGroup>
+    <ProjectReference Include="libwinutils.vcxproj">
+      <Project>{12131aa7-902e-4a6d-9ce3-043261d22a12}</Project>
+    </ProjectReference>
+  </ItemGroup>
+  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+  <ImportGroup Label="ExtensionTargets">
+  </ImportGroup>
+</Project>

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm Wed Mar  6 19:15:18 2013
@@ -33,9 +33,7 @@ Single Node Setup
      * GNU/Linux is supported as a development and production platform.
        Hadoop has been demonstrated on GNU/Linux clusters with 2000 nodes.
 
-     * Win32 is supported as a development platform. Distributed operation
-       has not been well tested on Win32, so it is not supported as a
-       production platform.
+     * Windows is also a supported platform.
 
 ** Required Software
 
@@ -46,11 +44,6 @@ Single Node Setup
     [[2]] ssh must be installed and sshd must be running to use the Hadoop
        scripts that manage remote Hadoop daemons.
 
-   Additional requirements for Windows include:
-
-    [[1]] Cygwin - Required for shell support in addition to the required
-       software above.
-
 ** Installing Software
 
    If your cluster doesn't have the requisite software you will need to
@@ -63,11 +56,6 @@ Single Node Setup
    $ sudo apt-get install rsync
 ----
 
-   On Windows, if you did not install the required software when you
-   installed cygwin, start the cygwin installer and select the packages:
-
-     * openssh - the Net category
-
 * Download
 
    To get a Hadoop distribution, download a recent stable release from one

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java Wed Mar  6 19:15:18 2013
@@ -68,7 +68,7 @@ public final class FileContextTestHelper
   public static String getAbsoluteTestRootDir(FileContext fc)
       throws IOException {
     if (absTestRootDir == null) {
-      if (TEST_ROOT_DIR.startsWith("/")) {
+      if (new Path(TEST_ROOT_DIR).isAbsolute()) {
         absTestRootDir = TEST_ROOT_DIR;
       } else {
         absTestRootDir = fc.getWorkingDirectory().toString() + "/"

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java Wed Mar  6 19:15:18 2013
@@ -20,9 +20,11 @@ package org.apache.hadoop.fs;
 
 import java.io.*;
 import java.util.ArrayList;
+import java.util.regex.Pattern;
 import junit.framework.Assert;
 
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.util.Shell;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -52,6 +54,12 @@ public abstract class FileContextURIBase
   private static final String basePath = System.getProperty("test.build.data",
   "build/test/data") + "/testContextURI";
   private static final Path BASE = new Path(basePath);
+
+  // Matches anything containing <, >, :, ", |, ?, *, or anything that ends with
+  // space or dot.
+  private static final Pattern WIN_INVALID_FILE_NAME_PATTERN = Pattern.compile(
+    "^(.*?[<>\\:\"\\|\\?\\*].*?)|(.*?[ \\.])$");
+
   protected FileContext fc1;
   protected FileContext fc2;
 
@@ -81,6 +89,10 @@ public abstract class FileContextURIBase
         "  ", "^ " };
 
     for (String f : fileNames) {
+      if (!isTestableFileNameOnPlatform(f)) {
+        continue;
+      }
+
       // Create a file on fc2's file system using fc1
       Path testPath = qualifiedPath(f, fc2);
       // Ensure file does not exist
@@ -205,6 +217,10 @@ public abstract class FileContextURIBase
         "deleteTest/()&^%$#@!~_+}{><?", "  ", "^ " };
 
     for (String f : dirNames) {
+      if (!isTestableFileNameOnPlatform(f)) {
+        continue;
+      }
+
       // Create a file on fc2's file system using fc1
       Path testPath = qualifiedPath(f, fc2);
       // Ensure file does not exist
@@ -374,6 +390,10 @@ public abstract class FileContextURIBase
         "deleteTest/()&^%$#@!~_+}{><?", "  ", "^ " };
 
     for (String f : dirNames) {
+      if (!isTestableFileNameOnPlatform(f)) {
+        continue;
+      }
+
       // Create a file on fc2's file system using fc1
       Path testPath = qualifiedPath(f, fc2);
       // Ensure file does not exist
@@ -492,6 +512,10 @@ public abstract class FileContextURIBase
     ArrayList<Path> testDirs = new ArrayList<Path>();
 
     for (String d : dirs) {
+      if (!isTestableFileNameOnPlatform(d)) {
+        continue;
+      }
+
       testDirs.add(qualifiedPath(d, fc2));
     }
     Assert.assertFalse(exists(fc1, testDirs.get(0)));
@@ -506,15 +530,17 @@ public abstract class FileContextURIBase
     Assert.assertEquals(qualifiedPath(hPrefix, fc1), paths[0].getPath());
 
     paths = fc1.util().listStatus(qualifiedPath(hPrefix, fc1));
-    Assert.assertEquals(6, paths.length);
-    for (int i = 0; i < dirs.length; i++) {
+    Assert.assertEquals(testDirs.size(), paths.length);
+    for (int i = 0; i < testDirs.size(); i++) {
       boolean found = false;
       for (int j = 0; j < paths.length; j++) {
-        if (qualifiedPath(dirs[i],fc1).equals(paths[j].getPath())) {
+        if (qualifiedPath(testDirs.get(i).toString(), fc1).equals(
+            paths[j].getPath())) {
+
           found = true;
         }
       }
-      Assert.assertTrue(dirs[i] + " not found", found);
+      Assert.assertTrue(testDirs.get(i) + " not found", found);
     }
 
     paths = fc1.util().listStatus(qualifiedPath(dirs[0], fc1));
@@ -539,9 +565,32 @@ public abstract class FileContextURIBase
       }
       Assert.assertTrue(stat.getPath() + " not found", found);
     }
-    Assert.assertEquals(6, dirLen);
+    Assert.assertEquals(testDirs.size(), dirLen);
 
     pathsItor = fc1.listStatus(qualifiedPath(dirs[0], fc1));
     Assert.assertFalse(pathsItor.hasNext());
   }
+
+  /**
+   * Returns true if the argument is a file name that is testable on the platform
+   * currently running the test.  This is intended for use by tests so that they
+   * can skip checking file names that aren't supported by the underlying
+   * platform.  The current implementation specifically checks for patterns that
+   * are not valid file names on Windows when the tests are running on Windows.
+   * 
+   * @param fileName String file name to check
+   * @return boolean true if the argument is valid as a file name
+   */
+  private static boolean isTestableFileNameOnPlatform(String fileName) {
+    boolean valid = true;
+
+    if (Shell.WINDOWS) {
+      // Disallow reserved characters: <, >, :, ", |, ?, *.
+      // Disallow trailing space or period.
+      // See http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx
+      valid = !WIN_INVALID_FILE_NAME_PATTERN.matcher(fileName).matches();
+    }
+
+    return valid;
+  }
 }

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java Wed Mar  6 19:15:18 2013
@@ -86,7 +86,7 @@ public final class FileSystemTestHelper 
       throws IOException {
     // NOTE: can't cache because of different filesystems!
     //if (absTestRootDir == null) 
-      if (TEST_ROOT_DIR.startsWith("/")) {
+      if (new Path(TEST_ROOT_DIR).isAbsolute()) {
         absTestRootDir = TEST_ROOT_DIR;
       } else {
         absTestRootDir = fSys.getWorkingDirectory().toString() + "/"

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java Wed Mar  6 19:15:18 2013
@@ -43,13 +43,14 @@ public class TestFileContextResolveAfs {
     fc = FileContext.getFileContext();
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testFileContextResolveAfs() throws IOException {
     Configuration conf = new Configuration();
     localFs = FileSystem.get(conf);
     
     Path localPath = new Path(TEST_ROOT_DIR_LOCAL + "/TestFileContextResolveAfs1");
-    Path linkPath = new Path("file://" + TEST_ROOT_DIR_LOCAL + "/TestFileContextResolveAfs2");
+    Path linkPath = localFs.makeQualified(new Path(TEST_ROOT_DIR_LOCAL,
+      "TestFileContextResolveAfs2"));
     localFs.mkdirs(new Path(TEST_ROOT_DIR_LOCAL));
     localFs.create(localPath);
     

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java Wed Mar  6 19:15:18 2013
@@ -20,16 +20,24 @@ package org.apache.hadoop.fs;
 import org.junit.Before;
 import java.io.BufferedReader;
 import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
 import java.io.FileReader;
 import java.io.IOException;
 import java.io.PrintWriter;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
+import java.util.jar.Attributes;
+import java.util.jar.JarFile;
+import java.util.jar.Manifest;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.StringUtils;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Test;
@@ -121,7 +129,7 @@ public class TestFileUtil {
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testListFiles() throws IOException {
     setupDirs();
     //Test existing files case 
@@ -148,7 +156,7 @@ public class TestFileUtil {
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testListAPI() throws IOException {
     setupDirs();
     //Test existing files case 
@@ -196,7 +204,7 @@ public class TestFileUtil {
     Assert.assertTrue(!partitioned.exists());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFullyDelete() throws IOException {
     setupDirs();
     boolean ret = FileUtil.fullyDelete(del);
@@ -211,7 +219,7 @@ public class TestFileUtil {
    * (b) symlink to dir only and not the dir pointed to by symlink.
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testFullyDeleteSymlinks() throws IOException {
     setupDirs();
     
@@ -241,7 +249,7 @@ public class TestFileUtil {
    * (b) dangling symlink to directory properly
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testFullyDeleteDanglingSymlinks() throws IOException {
     setupDirs();
     // delete the directory tmp to make tmpDir a dangling link to dir tmp and
@@ -268,7 +276,7 @@ public class TestFileUtil {
     Assert.assertEquals(3, del.list().length);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFullyDeleteContents() throws IOException {
     setupDirs();
     boolean ret = FileUtil.fullyDeleteContents(del);
@@ -384,15 +392,19 @@ public class TestFileUtil {
         zlink.exists());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFailFullyDelete() throws IOException {
+    if(Shell.WINDOWS) {
+      // windows Dir.setWritable(false) does not work for directories
+      return;
+    }
     LOG.info("Running test to verify failure of fullyDelete()");
     setupDirsAndNonWritablePermissions();
     boolean ret = FileUtil.fullyDelete(new MyFile(del));
     validateAndSetWritablePermissions(true, ret);
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testFailFullyDeleteGrantPermissions() throws IOException {
     setupDirsAndNonWritablePermissions();
     boolean ret = FileUtil.fullyDelete(new MyFile(del), true);
@@ -461,15 +473,19 @@ public class TestFileUtil {
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFailFullyDeleteContents() throws IOException {
+    if(Shell.WINDOWS) {
+      // windows Dir.setWritable(false) does not work for directories
+      return;
+    }
     LOG.info("Running test to verify failure of fullyDeleteContents()");
     setupDirsAndNonWritablePermissions();
     boolean ret = FileUtil.fullyDeleteContents(new MyFile(del));
     validateAndSetWritablePermissions(true, ret);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFailFullyDeleteContentsGrantPermissions() throws IOException {
     setupDirsAndNonWritablePermissions();
     boolean ret = FileUtil.fullyDeleteContents(new MyFile(del), true);
@@ -477,7 +493,7 @@ public class TestFileUtil {
     validateAndSetWritablePermissions(false, ret);
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testCopyMergeSingleDirectory() throws IOException {
     setupDirs();
     boolean copyMergeResult = copyMerge("partitioned", "tmp/merged");
@@ -536,7 +552,7 @@ public class TestFileUtil {
    * and that directory sizes are not added to the final calculated size
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testGetDU() throws IOException {
     setupDirs();
 
@@ -547,6 +563,136 @@ public class TestFileUtil {
     Assert.assertEquals(expected, du);
   }
 
+  @Test (timeout = 30000)
+  public void testSymlink() throws Exception {
+    Assert.assertFalse(del.exists());
+    del.mkdirs();
+
+    byte[] data = "testSymLink".getBytes();
+
+    File file = new File(del, FILE);
+    File link = new File(del, "_link");
+
+    //write some data to the file
+    FileOutputStream os = new FileOutputStream(file);
+    os.write(data);
+    os.close();
+
+    //create the symlink
+    FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath());
+
+    //ensure that symlink length is correctly reported by Java
+    Assert.assertEquals(data.length, file.length());
+    Assert.assertEquals(data.length, link.length());
+
+    //ensure that we can read from link.
+    FileInputStream in = new FileInputStream(link);
+    long len = 0;
+    while (in.read() > 0) {
+      len++;
+    }
+    in.close();
+    Assert.assertEquals(data.length, len);
+  }
+  
+  /**
+   * Test that rename on a symlink works as expected.
+   */
+  @Test (timeout = 30000)
+  public void testSymlinkRenameTo() throws Exception {
+    Assert.assertFalse(del.exists());
+    del.mkdirs();
+
+    File file = new File(del, FILE);
+    file.createNewFile();
+    File link = new File(del, "_link");
+
+    // create the symlink
+    FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath());
+
+    Assert.assertTrue(file.exists());
+    Assert.assertTrue(link.exists());
+
+    File link2 = new File(del, "_link2");
+
+    // Rename the symlink
+    Assert.assertTrue(link.renameTo(link2));
+
+    // Make sure the file still exists
+    // (NOTE: this would fail on Java6 on Windows if we didn't
+    // copy the file in FileUtil#symlink)
+    Assert.assertTrue(file.exists());
+
+    Assert.assertTrue(link2.exists());
+    Assert.assertFalse(link.exists());
+  }
+
+  /**
+   * Test that deletion of a symlink works as expected.
+   */
+  @Test (timeout = 30000)
+  public void testSymlinkDelete() throws Exception {
+    Assert.assertFalse(del.exists());
+    del.mkdirs();
+
+    File file = new File(del, FILE);
+    file.createNewFile();
+    File link = new File(del, "_link");
+
+    // create the symlink
+    FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath());
+
+    Assert.assertTrue(file.exists());
+    Assert.assertTrue(link.exists());
+
+    // make sure that deleting a symlink works properly
+    Assert.assertTrue(link.delete());
+    Assert.assertFalse(link.exists());
+    Assert.assertTrue(file.exists());
+  }
+
+  /**
+   * Test that length on a symlink works as expected.
+   */
+  @Test (timeout = 30000)
+  public void testSymlinkLength() throws Exception {
+    Assert.assertFalse(del.exists());
+    del.mkdirs();
+
+    byte[] data = "testSymLinkData".getBytes();
+
+    File file = new File(del, FILE);
+    File link = new File(del, "_link");
+
+    // write some data to the file
+    FileOutputStream os = new FileOutputStream(file);
+    os.write(data);
+    os.close();
+
+    Assert.assertEquals(0, link.length());
+
+    // create the symlink
+    FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath());
+
+    // ensure that File#length returns the target file and link size
+    Assert.assertEquals(data.length, file.length());
+    Assert.assertEquals(data.length, link.length());
+
+    file.delete();
+    Assert.assertFalse(file.exists());
+
+    if (Shell.WINDOWS && !Shell.isJava7OrAbove()) {
+      // On Java6 on Windows, we copied the file
+      Assert.assertEquals(data.length, link.length());
+    } else {
+      // Otherwise, the target file size is zero
+      Assert.assertEquals(0, link.length());
+    }
+
+    link.delete();
+    Assert.assertFalse(link.exists());
+  }
+
   private void doUntarAndVerify(File tarFile, File untarDir) 
                                  throws IOException {
     if (untarDir.exists() && !FileUtil.fullyDelete(untarDir)) {
@@ -574,7 +720,7 @@ public class TestFileUtil {
     Assert.assertTrue(testFile.length() == 8);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testUntar() throws IOException {
     String tarGzFileName = System.getProperty("test.cache.data",
         "build/test/cache") + "/test-untar.tgz";
@@ -586,4 +732,69 @@ public class TestFileUtil {
     doUntarAndVerify(new File(tarGzFileName), untarDir);
     doUntarAndVerify(new File(tarFileName), untarDir);
   }
+
+  @Test (timeout = 30000)
+  public void testCreateJarWithClassPath() throws Exception {
+    // setup test directory for files
+    Assert.assertFalse(tmp.exists());
+    Assert.assertTrue(tmp.mkdirs());
+
+    // create files expected to match a wildcard
+    List<File> wildcardMatches = Arrays.asList(new File(tmp, "wildcard1.jar"),
+      new File(tmp, "wildcard2.jar"), new File(tmp, "wildcard3.JAR"),
+      new File(tmp, "wildcard4.JAR"));
+    for (File wildcardMatch: wildcardMatches) {
+      Assert.assertTrue("failure creating file: " + wildcardMatch,
+        wildcardMatch.createNewFile());
+    }
+
+    // create non-jar files, which we expect to not be included in the classpath
+    Assert.assertTrue(new File(tmp, "text.txt").createNewFile());
+    Assert.assertTrue(new File(tmp, "executable.exe").createNewFile());
+    Assert.assertTrue(new File(tmp, "README").createNewFile());
+
+    // create classpath jar
+    String wildcardPath = tmp.getCanonicalPath() + File.separator + "*";
+    List<String> classPaths = Arrays.asList("cp1.jar", "cp2.jar", wildcardPath,
+      "cp3.jar");
+    String inputClassPath = StringUtils.join(File.pathSeparator, classPaths);
+    String classPathJar = FileUtil.createJarWithClassPath(inputClassPath,
+      new Path(tmp.getCanonicalPath()));
+
+    // verify classpath by reading manifest from jar file
+    JarFile jarFile = null;
+    try {
+      jarFile = new JarFile(classPathJar);
+      Manifest jarManifest = jarFile.getManifest();
+      Assert.assertNotNull(jarManifest);
+      Attributes mainAttributes = jarManifest.getMainAttributes();
+      Assert.assertNotNull(mainAttributes);
+      Assert.assertTrue(mainAttributes.containsKey(Attributes.Name.CLASS_PATH));
+      String classPathAttr = mainAttributes.getValue(Attributes.Name.CLASS_PATH);
+      Assert.assertNotNull(classPathAttr);
+      List<String> expectedClassPaths = new ArrayList<String>();
+      for (String classPath: classPaths) {
+        if (!wildcardPath.equals(classPath)) {
+          expectedClassPaths.add(new File(classPath).toURI().toURL()
+            .toExternalForm());
+        } else {
+          // add wildcard matches
+          for (File wildcardMatch: wildcardMatches) {
+            expectedClassPaths.add(wildcardMatch.toURI().toURL()
+              .toExternalForm());
+          }
+        }
+      }
+      List<String> actualClassPaths = Arrays.asList(classPathAttr.split(" "));
+      Assert.assertEquals(expectedClassPaths, actualClassPaths);
+    } finally {
+      if (jarFile != null) {
+        try {
+          jarFile.close();
+        } catch (IOException e) {
+          LOG.warn("exception closing jarFile: " + classPathJar, e);
+        }
+      }
+    }
+  }
 }

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java Wed Mar  6 19:15:18 2013
@@ -121,20 +121,22 @@ public class TestFsShellReturnCode {
    * 
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testChmod() throws Exception {
+    Path p1 = new Path(TEST_ROOT_DIR, "testChmod/fileExists");
 
-    final String f1 = TEST_ROOT_DIR + "/" + "testChmod/fileExists";
-    final String f2 = TEST_ROOT_DIR + "/" + "testChmod/fileDoesNotExist";
-    final String f3 = TEST_ROOT_DIR + "/" + "testChmod/nonExistingfiles*";
+    final String f1 = p1.toUri().getPath();
+    final String f2 = new Path(TEST_ROOT_DIR, "testChmod/fileDoesNotExist")
+      .toUri().getPath();
+    final String f3 = new Path(TEST_ROOT_DIR, "testChmod/nonExistingfiles*")
+      .toUri().getPath();
+
+    final Path p4 = new Path(TEST_ROOT_DIR, "testChmod/file1");
+    final Path p5 = new Path(TEST_ROOT_DIR, "testChmod/file2");
+    final Path p6 = new Path(TEST_ROOT_DIR, "testChmod/file3");
 
-    Path p1 = new Path(f1);
-
-    final Path p4 = new Path(TEST_ROOT_DIR + "/" + "testChmod/file1");
-    final Path p5 = new Path(TEST_ROOT_DIR + "/" + "testChmod/file2");
-    final Path p6 = new Path(TEST_ROOT_DIR + "/" + "testChmod/file3");
-
-    final String f7 = TEST_ROOT_DIR + "/" + "testChmod/file*";
+    final String f7 = new Path(TEST_ROOT_DIR, "testChmod/file*").toUri()
+      .getPath();
 
     // create and write test file
     writeFile(fileSys, p1);
@@ -175,20 +177,23 @@ public class TestFsShellReturnCode {
    * 
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testChown() throws Exception {
+    Path p1 = new Path(TEST_ROOT_DIR, "testChown/fileExists");
 
-    final String f1 = TEST_ROOT_DIR + "/" + "testChown/fileExists";
-    final String f2 = TEST_ROOT_DIR + "/" + "testChown/fileDoesNotExist";
-    final String f3 = TEST_ROOT_DIR + "/" + "testChown/nonExistingfiles*";
+    final String f1 = p1.toUri().getPath();
+    final String f2 = new Path(TEST_ROOT_DIR, "testChown/fileDoesNotExist")
+      .toUri().getPath();
+    final String f3 = new Path(TEST_ROOT_DIR, "testChown/nonExistingfiles*")
+      .toUri().getPath();
 
-    Path p1 = new Path(f1);
 
-    final Path p4 = new Path(TEST_ROOT_DIR + "/" + "testChown/file1");
-    final Path p5 = new Path(TEST_ROOT_DIR + "/" + "testChown/file2");
-    final Path p6 = new Path(TEST_ROOT_DIR + "/" + "testChown/file3");
+    final Path p4 = new Path(TEST_ROOT_DIR, "testChown/file1");
+    final Path p5 = new Path(TEST_ROOT_DIR, "testChown/file2");
+    final Path p6 = new Path(TEST_ROOT_DIR, "testChown/file3");
 
-    final String f7 = TEST_ROOT_DIR + "/" + "testChown/file*";
+    final String f7 = new Path(TEST_ROOT_DIR, "testChown/file*").toUri()
+      .getPath();
 
     // create and write test file
     writeFile(fileSys, p1);
@@ -228,20 +233,22 @@ public class TestFsShellReturnCode {
    * 
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testChgrp() throws Exception {
+    Path p1 = new Path(TEST_ROOT_DIR, "testChgrp/fileExists");
 
-    final String f1 = TEST_ROOT_DIR + "/" + "testChgrp/fileExists";
-    final String f2 = TEST_ROOT_DIR + "/" + "testChgrp/fileDoesNotExist";
-    final String f3 = TEST_ROOT_DIR + "/" + "testChgrp/nonExistingfiles*";
-
-    Path p1 = new Path(f1);
-
-    final Path p4 = new Path(TEST_ROOT_DIR + "/" + "testChgrp/file1");
-    final Path p5 = new Path(TEST_ROOT_DIR + "/" + "testChgrp/file2");
-    final Path p6 = new Path(TEST_ROOT_DIR + "/" + "testChgrp/file3");
+    final String f1 = p1.toUri().getPath();
+    final String f2 = new Path(TEST_ROOT_DIR, "testChgrp/fileDoesNotExist")
+      .toUri().getPath();
+    final String f3 = new Path(TEST_ROOT_DIR, "testChgrp/nonExistingfiles*")
+      .toUri().getPath();
+
+    final Path p4 = new Path(TEST_ROOT_DIR, "testChgrp/file1");
+    final Path p5 = new Path(TEST_ROOT_DIR, "testChgrp/file2");
+    final Path p6 = new Path(TEST_ROOT_DIR, "testChgrp/file3");
 
-    final String f7 = TEST_ROOT_DIR + "/" + "testChgrp/file*";
+    final String f7 = new Path(TEST_ROOT_DIR, "testChgrp/file*").toUri()
+      .getPath();
 
     // create and write test file
     writeFile(fileSys, p1);
@@ -271,7 +278,7 @@ public class TestFsShellReturnCode {
     change(1, null, "admin", f2, f7);
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testGetWithInvalidSourcePathShouldNotDisplayNullInConsole()
       throws Exception {
     Configuration conf = new Configuration();
@@ -288,8 +295,8 @@ public class TestFsShellReturnCode {
       fileSys.mkdirs(tdir);
       String[] args = new String[3];
       args[0] = "-get";
-      args[1] = tdir+"/invalidSrc";
-      args[2] = tdir+"/invalidDst";
+      args[1] = new Path(tdir.toUri().getPath(), "/invalidSrc").toString();
+      args[2] = new Path(tdir.toUri().getPath(), "/invalidDst").toString();
       assertTrue("file exists", !fileSys.exists(new Path(args[1])));
       assertTrue("file exists", !fileSys.exists(new Path(args[2])));
       int run = shell.run(args);
@@ -303,7 +310,7 @@ public class TestFsShellReturnCode {
     }
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testRmWithNonexistentGlob() throws Exception {
     Configuration conf = new Configuration();
     FsShell shell = new FsShell();
@@ -324,7 +331,7 @@ public class TestFsShellReturnCode {
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testRmForceWithNonexistentGlob() throws Exception {
     Configuration conf = new Configuration();
     FsShell shell = new FsShell();
@@ -343,7 +350,7 @@ public class TestFsShellReturnCode {
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testInvalidDefaultFS() throws Exception {
     // if default fs doesn't exist or is invalid, but the path provided in 
     // arguments is valid - fsshell should work
@@ -374,7 +381,7 @@ public class TestFsShellReturnCode {
     
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testInterrupt() throws Exception {
     MyFsShell shell = new MyFsShell();
     shell.setConf(new Configuration());

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java Wed Mar  6 19:15:18 2013
@@ -54,17 +54,6 @@ import static org.apache.hadoop.fs.HardL
  * NOTICE: This test class only tests the functionality of the OS
  * upon which the test is run! (although you're pretty safe with the
  * unix-like OS's, unless a typo sneaks in.)
- * 
- * Notes about Windows testing:  
- * (a) In order to create hardlinks, the process must be run with 
- * administrative privs, in both the account AND the invocation.
- * For instance, to run within Eclipse, the Eclipse application must be 
- * launched by right-clicking on it, and selecting "Run as Administrator" 
- * (and that option will only be available if the current user id does 
- * in fact have admin privs).
- * (b) The getLinkCount() test case will fail for Windows, unless Cygwin
- * is set up properly.  In particular, ${cygwin}/bin must be in
- * the PATH environment variable, so the cygwin utilities can be found.
  */
 public class TestHardLink {
   
@@ -221,9 +210,6 @@ public class TestHardLink {
    * Sanity check the simplest case of HardLink.getLinkCount()
    * to make sure we get back "1" for ordinary single-linked files.
    * Tests with multiply-linked files are in later test cases.
-   * 
-   * If this fails on Windows but passes on Unix, the most likely cause is 
-   * incorrect configuration of the Cygwin installation; see above.
    */
   @Test
   public void testGetLinkCount() throws IOException {
@@ -412,7 +398,7 @@ public class TestHardLink {
     assertEquals(5, win.hardLinkCommand.length); 
     assertEquals(7, win.hardLinkMultPrefix.length);
     assertEquals(8, win.hardLinkMultSuffix.length);
-    assertEquals(3, win.getLinkCountCommand.length);
+    assertEquals(4, win.getLinkCountCommand.length);
 
     assertTrue(win.hardLinkMultPrefix[4].equals("%f"));
     //make sure "%f" was not munged
@@ -423,7 +409,7 @@ public class TestHardLink {
     assertTrue(win.hardLinkMultSuffix[7].equals("1>NUL"));
     //make sure "1>NUL" was not munged
     assertEquals(5, ("1>NUL").length()); 
-    assertTrue(win.getLinkCountCommand[1].equals("-c%h"));
+    assertTrue(win.getLinkCountCommand[1].equals("hardlink"));
     //make sure "-c%h" was not munged
     assertEquals(4, ("-c%h").length()); 
   }

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java Wed Mar  6 19:15:18 2013
@@ -129,7 +129,7 @@ public class TestLocalDirAllocator {
    * The second dir exists & is RW
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void test0() throws Exception {
     if (isWindows) return;
     String dir0 = buildBufferDir(ROOT, 0);
@@ -141,7 +141,8 @@ public class TestLocalDirAllocator {
       validateTempDirCreation(dir1);
       validateTempDirCreation(dir1);
     } finally {
-      Shell.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
+      Shell.execCommand(Shell.getSetPermissionCommand("u+w", false,
+                                                      BUFFER_DIR_ROOT));
       rmBufferDirs();
     }
   }
@@ -150,7 +151,7 @@ public class TestLocalDirAllocator {
    * The second dir exists & is RW
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testROBufferDirAndRWBufferDir() throws Exception {
     if (isWindows) return;
     String dir1 = buildBufferDir(ROOT, 1);
@@ -162,14 +163,15 @@ public class TestLocalDirAllocator {
       validateTempDirCreation(dir2);
       validateTempDirCreation(dir2);
     } finally {
-      Shell.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
+      Shell.execCommand(Shell.getSetPermissionCommand("u+w", false,
+                                                      BUFFER_DIR_ROOT));
       rmBufferDirs();
     }
   }
   /** Two buffer dirs. Both do not exist but on a RW disk.
    * Check if tmp dirs are allocated in a round-robin
    */
-  @Test
+  @Test (timeout = 30000)
   public void testDirsNotExist() throws Exception {
     if (isWindows) return;
     String dir2 = buildBufferDir(ROOT, 2);
@@ -195,7 +197,7 @@ public class TestLocalDirAllocator {
    * Later disk1 becomes read-only.
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testRWBufferDirBecomesRO() throws Exception {
     if (isWindows) return;
     String dir3 = buildBufferDir(ROOT, 3);
@@ -233,7 +235,7 @@ public class TestLocalDirAllocator {
    * @throws Exception
    */
   static final int TRIALS = 100;
-  @Test
+  @Test (timeout = 30000)
   public void testCreateManyFiles() throws Exception {
     if (isWindows) return;
     String dir5 = buildBufferDir(ROOT, 5);
@@ -270,7 +272,7 @@ public class TestLocalDirAllocator {
    * directory. With checkAccess true, the directory should not be created.
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testLocalPathForWriteDirCreation() throws IOException {
     String dir0 = buildBufferDir(ROOT, 0);
     String dir1 = buildBufferDir(ROOT, 1);
@@ -291,7 +293,8 @@ public class TestLocalDirAllocator {
         assertEquals(e.getClass(), FileNotFoundException.class);
       }
     } finally {
-      Shell.execCommand(new String[] { "chmod", "u+w", BUFFER_DIR_ROOT });
+      Shell.execCommand(Shell.getSetPermissionCommand("u+w", false,
+                                                      BUFFER_DIR_ROOT));
       rmBufferDirs();
     }
   }
@@ -300,7 +303,7 @@ public class TestLocalDirAllocator {
    * Test when mapred.local.dir not configured and called
    * getLocalPathForWrite
    */
-  @Test
+  @Test (timeout = 30000)
   public void testShouldNotthrowNPE() throws Exception {
     Configuration conf1 = new Configuration();
     try {
@@ -319,7 +322,7 @@ public class TestLocalDirAllocator {
    * are mistakenly created from fully qualified path strings.
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testNoSideEffects() throws IOException {
     assumeTrue(!isWindows);
     String dir = buildBufferDir(ROOT, 0);
@@ -330,7 +333,8 @@ public class TestLocalDirAllocator {
       assertTrue(result.getParentFile().delete());
       assertFalse(new File(dir).exists());
     } finally {
-      Shell.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
+      Shell.execCommand(Shell.getSetPermissionCommand("u+w", false,
+                                                      BUFFER_DIR_ROOT));
       rmBufferDirs();
     }
   }
@@ -340,7 +344,7 @@ public class TestLocalDirAllocator {
    *
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testGetLocalPathToRead() throws IOException {
     assumeTrue(!isWindows);
     String dir = buildBufferDir(ROOT, 0);
@@ -353,7 +357,8 @@ public class TestLocalDirAllocator {
       assertEquals(f1.getName(), p1.getName());
       assertEquals("file", p1.getFileSystem(conf).getUri().getScheme());
     } finally {
-      Shell.execCommand(new String[] { "chmod", "u+w", BUFFER_DIR_ROOT });
+      Shell.execCommand(Shell.getSetPermissionCommand("u+w", false,
+                                                      BUFFER_DIR_ROOT));
       rmBufferDirs();
     }
   }
@@ -364,7 +369,7 @@ public class TestLocalDirAllocator {
    *
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testGetAllLocalPathsToRead() throws IOException {
     assumeTrue(!isWindows);
     
@@ -412,7 +417,7 @@ public class TestLocalDirAllocator {
     }
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testRemoveContext() throws IOException {
     String dir = buildBufferDir(ROOT, 0);
     try {

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java Wed Mar  6 19:15:18 2013
@@ -17,6 +17,7 @@
  */
 
 package org.apache.hadoop.fs;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.net.URI;
@@ -25,10 +26,14 @@ import java.util.Arrays;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.AvroTestUtil;
+import org.apache.hadoop.util.Shell;
 
 import junit.framework.TestCase;
 
+import static org.junit.Assert.fail;
+
 public class TestPath extends TestCase {
+  @Test (timeout = 30000)
   public void testToString() {
     toStringTest("/");
     toStringTest("/foo");
@@ -61,6 +66,7 @@ public class TestPath extends TestCase {
     assertEquals(pathString, new Path(pathString).toString());
   }
 
+  @Test (timeout = 30000)
   public void testNormalize() throws URISyntaxException {
     assertEquals("", new Path(".").toString());
     assertEquals("..", new Path("..").toString());
@@ -82,6 +88,7 @@ public class TestPath extends TestCase {
     }
   }
 
+  @Test (timeout = 30000)
   public void testIsAbsolute() {
     assertTrue(new Path("/").isAbsolute());
     assertTrue(new Path("/foo").isAbsolute());
@@ -94,6 +101,7 @@ public class TestPath extends TestCase {
     }
   }
 
+  @Test (timeout = 30000)
   public void testParent() {
     assertEquals(new Path("/foo"), new Path("/foo/bar").getParent());
     assertEquals(new Path("foo"), new Path("foo/bar").getParent());
@@ -104,6 +112,7 @@ public class TestPath extends TestCase {
     }
   }
 
+  @Test (timeout = 30000)
   public void testChild() {
     assertEquals(new Path("."), new Path(".", "."));
     assertEquals(new Path("/"), new Path("/", "."));
@@ -123,10 +132,12 @@ public class TestPath extends TestCase {
     }
   }
   
+  @Test (timeout = 30000)
   public void testEquals() {
     assertFalse(new Path("/").equals(new Path("/foo")));
   }
 
+  @Test (timeout = 30000)
   public void testDots() {
     // Test Path(String) 
     assertEquals(new Path("/foo/bar/baz").toString(), "/foo/bar/baz");
@@ -164,18 +175,54 @@ public class TestPath extends TestCase {
     assertEquals(new Path("foo/bar/baz","../../../../..").toString(), "../..");
   }
 
+  /** Test that Windows paths are correctly handled */
+  @Test (timeout = 5000)
+  public void testWindowsPaths() throws URISyntaxException, IOException {
+    if (!Path.WINDOWS) {
+      return;
+    }
+
+    assertEquals(new Path("c:\\foo\\bar").toString(), "c:/foo/bar");
+    assertEquals(new Path("c:/foo/bar").toString(), "c:/foo/bar");
+    assertEquals(new Path("/c:/foo/bar").toString(), "c:/foo/bar");
+    assertEquals(new Path("file://c:/foo/bar").toString(), "file://c:/foo/bar");
+  }
+
+  /** Test invalid paths on Windows are correctly rejected */
+  @Test (timeout = 5000)
+  public void testInvalidWindowsPaths() throws URISyntaxException, IOException {
+    if (!Path.WINDOWS) {
+      return;
+    }
+
+    String [] invalidPaths = {
+        "hdfs:\\\\\\tmp"
+    };
+
+    for (String path : invalidPaths) {
+      try {
+        Path item = new Path(path);
+        fail("Did not throw for invalid path " + path);
+      } catch (IllegalArgumentException iae) {
+      }
+    }
+  }
+
   /** Test Path objects created from other Path objects */
+  @Test (timeout = 30000)
   public void testChildParentResolution() throws URISyntaxException, IOException {
     Path parent = new Path("foo1://bar1/baz1");
     Path child  = new Path("foo2://bar2/baz2");
     assertEquals(child, new Path(parent, child));
   }
   
+  @Test (timeout = 30000)
   public void testScheme() throws java.io.IOException {
     assertEquals("foo:/bar", new Path("foo:/","/bar").toString()); 
     assertEquals("foo://bar/baz", new Path("foo://bar/","/baz").toString()); 
   }
 
+  @Test (timeout = 30000)
   public void testURI() throws URISyntaxException, IOException {
     URI uri = new URI("file:///bar#baz");
     Path path = new Path(uri);
@@ -198,6 +245,7 @@ public class TestPath extends TestCase {
   }
 
   /** Test URIs created from Path objects */
+  @Test (timeout = 30000)
   public void testPathToUriConversion() throws URISyntaxException, IOException {
     // Path differs from URI in that it ignores the query part..
     assertEquals(new URI(null, null, "/foo?bar", null, null),  new Path("/foo?bar").toUri());
@@ -218,6 +266,7 @@ public class TestPath extends TestCase {
   }
 
   /** Test reserved characters in URIs (and therefore Paths) */
+  @Test (timeout = 30000)
   public void testReservedCharacters() throws URISyntaxException, IOException {
     // URI encodes the path
     assertEquals("/foo%20bar", new URI(null, null, "/foo bar", null, null).getRawPath());
@@ -239,6 +288,7 @@ public class TestPath extends TestCase {
     assertEquals("/foo%3Fbar", new URI("http", "localhost", "/foo?bar", null, null).toURL().getPath());
   }
   
+  @Test (timeout = 30000)
   public void testMakeQualified() throws URISyntaxException {
     URI defaultUri = new URI("hdfs://host1/dir1");
     URI wd         = new URI("hdfs://host2/dir2");
@@ -252,6 +302,7 @@ public class TestPath extends TestCase {
                  new Path("file").makeQualified(defaultUri, new Path(wd)));
  }
 
+  @Test (timeout = 30000)
   public void testGetName() {
     assertEquals("", new Path("/").getName());
     assertEquals("foo", new Path("foo").getName());
@@ -261,13 +312,17 @@ public class TestPath extends TestCase {
     assertEquals("bar", new Path("hdfs://host/foo/bar").getName());
   }
   
+  @Test (timeout = 30000)
   public void testAvroReflect() throws Exception {
     AvroTestUtil.testReflect
       (new Path("foo"),
        "{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.fs.Path\"}");
   }
 
+  @Test (timeout = 30000)
   public void testGlobEscapeStatus() throws Exception {
+    // This test is not meaningful on Windows where * is disallowed in file name.
+    if (Shell.WINDOWS) return;
     FileSystem lfs = FileSystem.getLocal(new Configuration());
     Path testRoot = lfs.makeQualified(new Path(
         System.getProperty("test.build.data","test/build/data"),
@@ -324,4 +379,31 @@ public class TestPath extends TestCase {
     assertEquals(1, stats.length);
     assertEquals(new Path(testRoot, "*/f"), stats[0].getPath());
   }
+
+  @Test (timeout = 30000)
+  public void testMergePaths() {
+    assertEquals(new Path("/foo/bar"),
+      Path.mergePaths(new Path("/foo"),
+        new Path("/bar")));
+
+    assertEquals(new Path("/foo/bar/baz"),
+      Path.mergePaths(new Path("/foo/bar"),
+        new Path("/baz")));
+
+    assertEquals(new Path("/foo/bar/baz"),
+      Path.mergePaths(new Path("/foo"),
+        new Path("/bar/baz")));
+
+    assertEquals(new Path(Shell.WINDOWS ? "/C:/foo/bar" : "/C:/foo/C:/bar"),
+      Path.mergePaths(new Path("/C:/foo"),
+        new Path("/C:/bar")));
+
+    assertEquals(new Path("viewfs:///foo/bar"),
+      Path.mergePaths(new Path("viewfs:///foo"),
+        new Path("file:///bar")));
+
+    assertEquals(new Path("viewfs://vfsauthority/foo/bar"),
+      Path.mergePaths(new Path("viewfs://vfsauthority/foo"),
+        new Path("file://fileauthority/bar")));
+  }
 }

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java Wed Mar  6 19:15:18 2013
@@ -55,7 +55,7 @@ public class TestTrash extends TestCase 
   // check that the specified file is in Trash
   protected static void checkTrash(FileSystem trashFs, Path trashRoot,
       Path path) throws IOException {
-    Path p = new Path(trashRoot+"/"+ path.toUri().getPath());
+    Path p = Path.mergePaths(trashRoot, path);
     assertTrue("Could not find file in trash: "+ p , trashFs.exists(p));
   }
   
@@ -399,7 +399,8 @@ public class TestTrash extends TestCase 
         assertTrue(val==0);
       }
       // current trash directory
-      Path trashDir = new Path(trashRoot.toUri().getPath() + myFile.getParent().toUri().getPath());
+      Path trashDir = Path.mergePaths(new Path(trashRoot.toUri().getPath()),
+        new Path(myFile.getParent().toUri().getPath()));
       
       System.out.println("Deleting same myFile: myFile.parent=" + myFile.getParent().toUri().getPath() + 
           "; trashroot="+trashRoot.toUri().getPath() + 

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java Wed Mar  6 19:15:18 2013
@@ -19,8 +19,10 @@ package org.apache.hadoop.fs.shell;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.File;
+import java.io.IOException;
 import java.util.Arrays;
 
 import org.apache.hadoop.conf.Configuration;
@@ -59,7 +61,7 @@ public class TestPathData {
     fs.close();
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testWithDirStringAndConf() throws Exception {
     String dirString = "d1";
     PathData item = new PathData(dirString, conf);
@@ -72,7 +74,7 @@ public class TestPathData {
     checkPathData(dirString, item);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testUnqualifiedUriContents() throws Exception {
     String dirString = "d1";
     PathData item = new PathData(dirString, conf);
@@ -83,7 +85,7 @@ public class TestPathData {
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testQualifiedUriContents() throws Exception {
     String dirString = fs.makeQualified(new Path("d1")).toString();
     PathData item = new PathData(dirString, conf);
@@ -94,7 +96,7 @@ public class TestPathData {
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testCwdContents() throws Exception {
     String dirString = Path.CUR_DIR;
     PathData item = new PathData(dirString, conf);
@@ -105,7 +107,7 @@ public class TestPathData {
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testToFile() throws Exception {
     PathData item = new PathData(".", conf);
     assertEquals(new File(testDir.toString()), item.toFile());
@@ -115,7 +117,56 @@ public class TestPathData {
     assertEquals(new File(testDir + "/d1/f1"), item.toFile());
   }
 
-  @Test
+  @Test (timeout = 5000)
+  public void testToFileRawWindowsPaths() throws Exception {
+    if (!Path.WINDOWS) {
+      return;
+    }
+
+    // Can we handle raw Windows paths? The files need not exist for
+    // these tests to succeed.
+    String[] winPaths = {
+        "n:\\",
+        "N:\\",
+        "N:\\foo",
+        "N:\\foo\\bar",
+        "N:/",
+        "N:/foo",
+        "N:/foo/bar"
+    };
+
+    PathData item;
+
+    for (String path : winPaths) {
+      item = new PathData(path, conf);
+      assertEquals(new File(path), item.toFile());
+    }
+
+    item = new PathData("foo\\bar", conf);
+    assertEquals(new File(testDir + "\\foo\\bar"), item.toFile());
+  }
+
+  @Test (timeout = 5000)
+  public void testInvalidWindowsPath() throws Exception {
+    if (!Path.WINDOWS) {
+      return;
+    }
+
+    // Verify that the following invalid paths are rejected.
+    String [] winPaths = {
+        "N:\\foo/bar"
+    };
+
+    for (String path : winPaths) {
+      try {
+        PathData item = new PathData(path, conf);
+        fail("Did not throw for invalid path " + path);
+      } catch (IOException ioe) {
+      }
+    }
+  }
+
+  @Test (timeout = 30000)
   public void testAbsoluteGlob() throws Exception {
     PathData[] items = PathData.expandAsGlob(testDir+"/d1/f1*", conf);
     assertEquals(
@@ -124,7 +175,7 @@ public class TestPathData {
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testRelativeGlob() throws Exception {
     PathData[] items = PathData.expandAsGlob("d1/f1*", conf);
     assertEquals(
@@ -133,7 +184,7 @@ public class TestPathData {
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testRelativeGlobBack() throws Exception {
     fs.setWorkingDirectory(new Path("d1"));
     PathData[] items = PathData.expandAsGlob("../d2/*", conf);
@@ -143,7 +194,7 @@ public class TestPathData {
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testWithStringAndConfForBuggyPath() throws Exception {
     String dirString = "file:///tmp";
     Path tmpDir = new Path(dirString);

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java Wed Mar  6 19:15:18 2013
@@ -26,9 +26,11 @@ import java.io.InputStream;
 import java.io.IOException;
 import java.io.StringWriter;
 import java.lang.reflect.Method;
+import java.net.URI;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
 import org.junit.Test;
 
 /**
@@ -38,12 +40,13 @@ import org.junit.Test;
 public class TestTextCommand {
   private static final String TEST_ROOT_DIR =
     System.getProperty("test.build.data", "build/test/data/") + "/testText";
-  private static final String AVRO_FILENAME = TEST_ROOT_DIR + "/weather.avro";
+  private static final String AVRO_FILENAME =
+    new Path(TEST_ROOT_DIR, "weather.avro").toUri().getPath();
 
   /**
    * Tests whether binary Avro data files are displayed correctly.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testDisplayForAvroFiles() throws Exception {
     // Create a small Avro data file on the local file system.
     createAvroFile(generateWeatherAvroBinaryData());
@@ -51,7 +54,7 @@ public class TestTextCommand {
     // Prepare and call the Text command's protected getInputStream method
     // using reflection.
     Configuration conf = new Configuration();
-    File localPath = new File(AVRO_FILENAME);
+    URI localPath = new URI(AVRO_FILENAME);
     PathData pathData = new PathData(localPath, conf);
     Display.Text text = new Display.Text();
     text.setConf(conf);

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java Wed Mar  6 19:15:18 2013
@@ -21,6 +21,8 @@ import java.io.File;
 import java.io.FileDescriptor;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.FileWriter;
 import java.io.IOException;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.ArrayList;
@@ -60,11 +62,15 @@ public class TestNativeIO {
     TEST_DIR.mkdirs();
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFstat() throws Exception {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testfstat"));
-    NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
+    NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD());
     fos.close();
     LOG.info("Stat: " + String.valueOf(stat));
 
@@ -72,7 +78,8 @@ public class TestNativeIO {
     assertNotNull(stat.getGroup());
     assertTrue(!stat.getGroup().isEmpty());
     assertEquals("Stat mode field should indicate a regular file",
-      NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT);
+      NativeIO.POSIX.Stat.S_IFREG,
+      stat.getMode() & NativeIO.POSIX.Stat.S_IFMT);
   }
 
   /**
@@ -81,8 +88,12 @@ public class TestNativeIO {
    * NOTE: this test is likely to fail on RHEL 6.0 which has a non-threadsafe
    * implementation of getpwuid_r.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testMultiThreadedFstat() throws Exception {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     final FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testfstat"));
 
@@ -96,12 +107,13 @@ public class TestNativeIO {
           long et = Time.now() + 5000;
           while (Time.now() < et) {
             try {
-              NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
+              NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD());
               assertEquals(System.getProperty("user.name"), stat.getOwner());
               assertNotNull(stat.getGroup());
               assertTrue(!stat.getGroup().isEmpty());
               assertEquals("Stat mode field should indicate a regular file",
-                NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT);
+                NativeIO.POSIX.Stat.S_IFREG,
+                stat.getMode() & NativeIO.POSIX.Stat.S_IFMT);
             } catch (Throwable t) {
               thrown.set(t);
             }
@@ -122,26 +134,123 @@ public class TestNativeIO {
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFstatClosedFd() throws Exception {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testfstat2"));
     fos.close();
     try {
-      NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
+      NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD());
     } catch (NativeIOException nioe) {
       LOG.info("Got expected exception", nioe);
       assertEquals(Errno.EBADF, nioe.getErrno());
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
+  public void testSetFilePointer() throws Exception {
+    if (!Path.WINDOWS) {
+      return;
+    }
+
+    LOG.info("Set a file pointer on Windows");
+    try {
+      File testfile = new File(TEST_DIR, "testSetFilePointer");
+      assertTrue("Create test subject",
+          testfile.exists() || testfile.createNewFile());
+      FileWriter writer = new FileWriter(testfile);
+      try {
+        for (int i = 0; i < 200; i++)
+          if (i < 100)
+            writer.write('a');
+          else
+            writer.write('b');
+        writer.flush();
+      } catch (Exception writerException) {
+        fail("Got unexpected exception: " + writerException.getMessage());
+      } finally {
+        writer.close();
+      }
+
+      FileDescriptor fd = NativeIO.Windows.createFile(
+          testfile.getCanonicalPath(),
+          NativeIO.Windows.GENERIC_READ,
+          NativeIO.Windows.FILE_SHARE_READ |
+          NativeIO.Windows.FILE_SHARE_WRITE |
+          NativeIO.Windows.FILE_SHARE_DELETE,
+          NativeIO.Windows.OPEN_EXISTING);
+      NativeIO.Windows.setFilePointer(fd, 120, NativeIO.Windows.FILE_BEGIN);
+      FileReader reader = new FileReader(fd);
+      try {
+        int c = reader.read();
+        assertTrue("Unexpected character: " + c, c == 'b');
+      } catch (Exception readerException) {
+        fail("Got unexpected exception: " + readerException.getMessage());
+      } finally {
+        reader.close();
+      }
+    } catch (Exception e) {
+      fail("Got unexpected exception: " + e.getMessage());
+    }
+  }
+
+  @Test (timeout = 30000)
+  public void testCreateFile() throws Exception {
+    if (!Path.WINDOWS) {
+      return;
+    }
+
+    LOG.info("Open a file on Windows with SHARE_DELETE shared mode");
+    try {
+      File testfile = new File(TEST_DIR, "testCreateFile");
+      assertTrue("Create test subject",
+          testfile.exists() || testfile.createNewFile());
+
+      FileDescriptor fd = NativeIO.Windows.createFile(
+          testfile.getCanonicalPath(),
+          NativeIO.Windows.GENERIC_READ,
+          NativeIO.Windows.FILE_SHARE_READ |
+          NativeIO.Windows.FILE_SHARE_WRITE |
+          NativeIO.Windows.FILE_SHARE_DELETE,
+          NativeIO.Windows.OPEN_EXISTING);
+
+      FileInputStream fin = new FileInputStream(fd);
+      try {
+        fin.read();
+
+        File newfile = new File(TEST_DIR, "testRenamedFile");
+
+        boolean renamed = testfile.renameTo(newfile);
+        assertTrue("Rename failed.", renamed);
+
+        fin.read();
+      } catch (Exception e) {
+        fail("Got unexpected exception: " + e.getMessage());
+      }
+      finally {
+        fin.close();
+      }
+    } catch (Exception e) {
+      fail("Got unexpected exception: " + e.getMessage());
+    }
+
+  }
+
+  @Test (timeout = 30000)
   public void testOpenMissingWithoutCreate() throws Exception {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     LOG.info("Open a missing file without O_CREAT and it should fail");
     try {
-      FileDescriptor fd = NativeIO.open(
+      FileDescriptor fd = NativeIO.POSIX.open(
         new File(TEST_DIR, "doesntexist").getAbsolutePath(),
-        NativeIO.O_WRONLY, 0700);
+        NativeIO.POSIX.O_WRONLY, 0700);
       fail("Able to open a new file without O_CREAT");
     } catch (NativeIOException nioe) {
       LOG.info("Got expected exception", nioe);
@@ -149,12 +258,16 @@ public class TestNativeIO {
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testOpenWithCreate() throws Exception {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     LOG.info("Test creating a file with O_CREAT");
-    FileDescriptor fd = NativeIO.open(
+    FileDescriptor fd = NativeIO.POSIX.open(
       new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
-      NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
+      NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT, 0700);
     assertNotNull(true);
     assertTrue(fd.valid());
     FileOutputStream fos = new FileOutputStream(fd);
@@ -165,9 +278,9 @@ public class TestNativeIO {
 
     LOG.info("Test exclusive create");
     try {
-      fd = NativeIO.open(
+      fd = NativeIO.POSIX.open(
         new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
-        NativeIO.O_WRONLY | NativeIO.O_CREAT | NativeIO.O_EXCL, 0700);
+        NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT | NativeIO.POSIX.O_EXCL, 0700);
       fail("Was able to create existing file with O_EXCL");
     } catch (NativeIOException nioe) {
       LOG.info("Got expected exception for failed exclusive create", nioe);
@@ -179,12 +292,16 @@ public class TestNativeIO {
    * Test that opens and closes a file 10000 times - this would crash with
    * "Too many open files" if we leaked fds using this access pattern.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testFDDoesntLeak() throws IOException {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     for (int i = 0; i < 10000; i++) {
-      FileDescriptor fd = NativeIO.open(
+      FileDescriptor fd = NativeIO.POSIX.open(
         new File(TEST_DIR, "testNoFdLeak").getAbsolutePath(),
-        NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
+        NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT, 0700);
       assertNotNull(true);
       assertTrue(fd.valid());
       FileOutputStream fos = new FileOutputStream(fd);
@@ -196,10 +313,14 @@ public class TestNativeIO {
   /**
    * Test basic chmod operation
    */
-  @Test
+  @Test (timeout = 30000)
   public void testChmod() throws Exception {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     try {
-      NativeIO.chmod("/this/file/doesnt/exist", 777);
+      NativeIO.POSIX.chmod("/this/file/doesnt/exist", 777);
       fail("Chmod of non-existent file didn't fail");
     } catch (NativeIOException nioe) {
       assertEquals(Errno.ENOENT, nioe.getErrno());
@@ -208,21 +329,26 @@ public class TestNativeIO {
     File toChmod = new File(TEST_DIR, "testChmod");
     assertTrue("Create test subject",
                toChmod.exists() || toChmod.mkdir());
-    NativeIO.chmod(toChmod.getAbsolutePath(), 0777);
+    NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0777);
     assertPermissions(toChmod, 0777);
-    NativeIO.chmod(toChmod.getAbsolutePath(), 0000);
+    NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0000);
     assertPermissions(toChmod, 0000);
-    NativeIO.chmod(toChmod.getAbsolutePath(), 0644);
+    NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0644);
     assertPermissions(toChmod, 0644);
   }
 
 
-  @Test
+  @Test (timeout = 30000)
   public void testPosixFadvise() throws Exception {
+    if (Path.WINDOWS) {
+      return;
+    }
+
     FileInputStream fis = new FileInputStream("/dev/zero");
     try {
-      NativeIO.posix_fadvise(fis.getFD(), 0, 0,
-                             NativeIO.POSIX_FADV_SEQUENTIAL);
+      NativeIO.POSIX.posix_fadvise(
+          fis.getFD(), 0, 0,
+          NativeIO.POSIX.POSIX_FADV_SEQUENTIAL);
     } catch (UnsupportedOperationException uoe) {
       // we should just skip the unit test on machines where we don't
       // have fadvise support
@@ -235,8 +361,9 @@ public class TestNativeIO {
     }
 
     try {
-      NativeIO.posix_fadvise(fis.getFD(), 0, 1024,
-                             NativeIO.POSIX_FADV_SEQUENTIAL);
+      NativeIO.POSIX.posix_fadvise(
+          fis.getFD(), 0, 1024,
+          NativeIO.POSIX.POSIX_FADV_SEQUENTIAL);
 
       fail("Did not throw on bad file");
     } catch (NativeIOException nioe) {
@@ -244,8 +371,9 @@ public class TestNativeIO {
     }
     
     try {
-      NativeIO.posix_fadvise(null, 0, 1024,
-                             NativeIO.POSIX_FADV_SEQUENTIAL);
+      NativeIO.POSIX.posix_fadvise(
+          null, 0, 1024,
+          NativeIO.POSIX.POSIX_FADV_SEQUENTIAL);
 
       fail("Did not throw on null file");
     } catch (NullPointerException npe) {
@@ -253,14 +381,15 @@ public class TestNativeIO {
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testSyncFileRange() throws Exception {
     FileOutputStream fos = new FileOutputStream(
       new File(TEST_DIR, "testSyncFileRange"));
     try {
       fos.write("foo".getBytes());
-      NativeIO.sync_file_range(fos.getFD(), 0, 1024,
-                               NativeIO.SYNC_FILE_RANGE_WRITE);
+      NativeIO.POSIX.sync_file_range(
+          fos.getFD(), 0, 1024,
+          NativeIO.POSIX.SYNC_FILE_RANGE_WRITE);
       // no way to verify that this actually has synced,
       // but if it doesn't throw, we can assume it worked
     } catch (UnsupportedOperationException uoe) {
@@ -271,8 +400,9 @@ public class TestNativeIO {
       fos.close();
     }
     try {
-      NativeIO.sync_file_range(fos.getFD(), 0, 1024,
-                               NativeIO.SYNC_FILE_RANGE_WRITE);
+      NativeIO.POSIX.sync_file_range(
+          fos.getFD(), 0, 1024,
+          NativeIO.POSIX.SYNC_FILE_RANGE_WRITE);
       fail("Did not throw on bad file");
     } catch (NativeIOException nioe) {
       assertEquals(Errno.EBADF, nioe.getErrno());
@@ -286,17 +416,25 @@ public class TestNativeIO {
     assertEquals(expected, perms.toShort());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testGetUserName() throws IOException {
-    assertFalse(NativeIO.getUserName(0).isEmpty());
+    if (Path.WINDOWS) {
+      return;
+    }
+
+    assertFalse(NativeIO.POSIX.getUserName(0).isEmpty());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testGetGroupName() throws IOException {
-    assertFalse(NativeIO.getGroupName(0).isEmpty());
+    if (Path.WINDOWS) {
+      return;
+    }
+
+    assertFalse(NativeIO.POSIX.getGroupName(0).isEmpty());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testRenameTo() throws Exception {
     final File TEST_DIR = new File(new File(
         System.getProperty("test.build.data","build/test/data")), "renameTest");