You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ar...@apache.org on 2013/08/23 22:57:02 UTC

svn commit: r1517030 - in /hadoop/common/branches/HDFS-2832/hadoop-common-project: hadoop-common/ hadoop-common/src/main/java/ hadoop-common/src/main/java/org/apache/hadoop/fs/ hadoop-common/src/main/java/org/apache/hadoop/fs/shell/ hadoop-common/src/t...

Author: arp
Date: Fri Aug 23 20:57:00 2013
New Revision: 1517030

URL: http://svn.apache.org/r1517030
Log:
Merge all changes from trunk to branch HDFS-2832

Modified:
    hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java
    hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
    hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CommandExecutor.java
    hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/pom.xml

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1517030&r1=1517029&r2=1517030&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/CHANGES.txt Fri Aug 23 20:57:00 2013
@@ -420,6 +420,9 @@ Release 2.1.1-beta - UNRELEASED
     HADOOP-9880. SASL changes from HADOOP-9421 breaks Secure HA NN. (daryn via
     jing9)
 
+    HADOOP-9887. globStatus does not correctly handle paths starting with a drive
+    spec on Windows. (Chuan Liu via cnauroth)
+
 Release 2.1.0-beta - 2013-08-22
 
   INCOMPATIBLE CHANGES

Propchange: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1516229-1517028

Propchange: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1516229-1517028

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java?rev=1517030&r1=1517029&r2=1517030&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Globber.java Fri Aug 23 20:57:00 2013
@@ -97,7 +97,7 @@ class Globber {
   /**
    * Translate an absolute path into a list of path components.
    * We merge double slashes into a single slash here.
-   * The first path component (i.e. root) does not get an entry in the list.
+   * POSIX root path, i.e. '/', does not get an entry in the list.
    */
   private static List<String> getPathComponents(String path)
       throws IOException {
@@ -167,8 +167,8 @@ class Globber {
       // Get the absolute path for this flattened pattern.  We couldn't do 
       // this prior to flattening because of patterns like {/,a}, where which
       // path you go down influences how the path must be made absolute.
-      Path absPattern =
-          fixRelativePart(new Path(flatPattern .isEmpty() ? "." : flatPattern ));
+      Path absPattern = fixRelativePart(new Path(
+          flatPattern.isEmpty() ? Path.CUR_DIR : flatPattern));
       // Now we break the flattened, absolute pattern into path components.
       // For example, /a/*/c would be broken into the list [a, *, c]
       List<String> components =
@@ -176,9 +176,19 @@ class Globber {
       // Starting out at the root of the filesystem, we try to match
       // filesystem entries against pattern components.
       ArrayList<FileStatus> candidates = new ArrayList<FileStatus>(1);
-      candidates.add(new FileStatus(0, true, 0, 0, 0,
-          new Path(scheme, authority, "/")));
-
+      if (Path.WINDOWS && !components.isEmpty()
+          && Path.isWindowsAbsolutePath(absPattern.toUri().getPath(), true)) {
+        // On Windows the path could begin with a drive letter, e.g. /E:/foo.
+        // We will skip matching the drive letter and start from listing the
+        // root of the filesystem on that drive.
+        String driveLetter = components.remove(0);
+        candidates.add(new FileStatus(0, true, 0, 0, 0, new Path(scheme,
+            authority, Path.SEPARATOR + driveLetter + Path.SEPARATOR)));
+      } else {
+        candidates.add(new FileStatus(0, true, 0, 0, 0,
+            new Path(scheme, authority, Path.SEPARATOR)));
+      }
+      
       for (String component : components) {
         ArrayList<FileStatus> newCandidates =
             new ArrayList<FileStatus>(candidates.size());

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java?rev=1517030&r1=1517029&r2=1517030&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java Fri Aug 23 20:57:00 2013
@@ -106,10 +106,12 @@ public class PathData implements Compara
 
   /**
    * Validates the given Windows path.
-   * Throws IOException on failure.
    * @param pathString a String of the path suppliued by the user.
+   * @return true if the URI scheme was not present in the pathString but
+   * inferred; false, otherwise.
+   * @throws IOException if anything goes wrong
    */
-  private void ValidateWindowsPath(String pathString)
+  private static boolean checkIfSchemeInferredFromPath(String pathString)
   throws IOException
   {
     if (windowsNonUriAbsolutePath1.matcher(pathString).find()) {
@@ -118,23 +120,21 @@ public class PathData implements Compara
         throw new IOException("Invalid path string " + pathString);
       }
 
-      inferredSchemeFromPath = true;
-      return;
+      return true;
     }
 
     // Is it a forward slash-separated absolute path?
     if (windowsNonUriAbsolutePath2.matcher(pathString).find()) {
-      inferredSchemeFromPath = true;
-      return;
+      return true;
     }
 
     // Does it look like a URI? If so then just leave it alone.
     if (potentialUri.matcher(pathString).find()) {
-      return;
+      return false;
     }
 
     // Looks like a relative path on Windows.
-    return;
+    return false;
   }
 
   /**
@@ -153,7 +153,7 @@ public class PathData implements Compara
     setStat(stat);
 
     if (Path.WINDOWS) {
-      ValidateWindowsPath(pathString);
+      inferredSchemeFromPath = checkIfSchemeInferredFromPath(pathString);
     }
   }
 
@@ -302,7 +302,7 @@ public class PathData implements Compara
     // check getPath() so scheme slashes aren't considered part of the path
     String separator = uri.getPath().endsWith(Path.SEPARATOR)
         ? "" : Path.SEPARATOR;
-    return uri + separator + basename;
+    return uriToString(uri, inferredSchemeFromPath) + separator + basename;
   }
   
   protected enum PathType { HAS_SCHEME, SCHEMELESS_ABSOLUTE, RELATIVE };
@@ -356,7 +356,7 @@ public class PathData implements Compara
             if (globUri.getAuthority() == null) {
               matchUri = removeAuthority(matchUri);
             }
-            globMatch = matchUri.toString();
+            globMatch = uriToString(matchUri, false);
             break;
           case SCHEMELESS_ABSOLUTE: // take just the uri's path
             globMatch = matchUri.getPath();
@@ -438,6 +438,10 @@ public class PathData implements Compara
    */
   @Override
   public String toString() {
+    return uriToString(uri, inferredSchemeFromPath);
+  }
+ 
+  private static String uriToString(URI uri, boolean inferredSchemeFromPath) {
     String scheme = uri.getScheme();
     // No interpretation of symbols. Just decode % escaped chars.
     String decodedRemainder = uri.getSchemeSpecificPart();

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CommandExecutor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CommandExecutor.java?rev=1517030&r1=1517029&r2=1517030&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CommandExecutor.java (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/cli/util/CommandExecutor.java Fri Aug 23 20:57:00 2013
@@ -24,6 +24,9 @@ import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.PrintStream;
 import java.util.StringTokenizer;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.ArrayList;
 
 /**
  *
@@ -32,23 +35,31 @@ import java.util.StringTokenizer;
 public abstract class CommandExecutor {  
   protected String[] getCommandAsArgs(final String cmd, final String masterKey,
 		                                       final String master) {
-    StringTokenizer tokenizer = new StringTokenizer(cmd, " ");
-    String[] args = new String[tokenizer.countTokens()];
-    
-    int i = 0;
-    while (tokenizer.hasMoreTokens()) {
-      args[i] = tokenizer.nextToken();
-
-      args[i] = args[i].replaceAll(masterKey, master);
-      args[i] = args[i].replaceAll("CLITEST_DATA", 
-        new File(CLITestHelper.TEST_CACHE_DATA_DIR).
-        toURI().toString().replace(' ', '+'));
-      args[i] = args[i].replaceAll("USERNAME", System.getProperty("user.name"));
+    String regex = "\'([^\']*)\'|\"([^\"]*)\"|(\\S+)";
+    Matcher matcher = Pattern.compile(regex).matcher(cmd);
 
-      i++;
-    }
-    
-    return args;
+    ArrayList<String> args = new ArrayList<String>();
+    String arg = null;
+
+    while (matcher.find()) {
+      if (matcher.group(1) != null) {
+        arg = matcher.group(1);
+      } else if (matcher.group(2) != null) {
+        arg = matcher.group(2);
+      } else {
+        arg = matcher.group(3);
+      }
+
+      arg = arg.replaceAll(masterKey, master);
+      arg = arg.replaceAll("CLITEST_DATA",
+         new File(CLITestHelper.TEST_CACHE_DATA_DIR).
+         toURI().toString().replace(' ', '+'));
+      arg = arg.replaceAll("USERNAME", System.getProperty("user.name"));
+
+      args.add(arg);
+     }
+
+    return args.toArray(new String[0]);
   }
   
   public Result executeCommand(final String cmd) throws Exception {

Modified: hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/pom.xml?rev=1517030&r1=1517029&r2=1517030&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/pom.xml (original)
+++ hadoop/common/branches/HDFS-2832/hadoop-common-project/hadoop-nfs/pom.xml Fri Aug 23 20:57:00 2013
@@ -95,4 +95,50 @@
       <version>11.0.2</version>
     </dependency>
   </dependencies>
+
+
+  <profiles>
+    <profile>
+      <id>dist</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-assembly-plugin</artifactId>
+            <dependencies>
+              <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-assemblies</artifactId>
+                <version>${project.version}</version>
+              </dependency>
+            </dependencies>
+            <executions>
+              <execution>
+                <id>dist</id>
+                <phase>package</phase>
+                <goals>
+                  <goal>single</goal>
+                </goals>
+                <configuration>
+                  <finalName>${project.artifactId}-${project.version}</finalName>
+                  <appendAssemblyId>false</appendAssemblyId>
+                  <attach>false</attach>
+                  <!--<descriptorRefs>
+                    <descriptorRef>hadoop-nfs-dist</descriptorRef>
+                  </descriptorRefs>-->
+                  <descriptors>
+                    <descriptor>../../hadoop-assemblies/src/main/resources/assemblies/hadoop-nfs-dist.xml</descriptor>
+                  </descriptors>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+
 </project>