You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by su...@apache.org on 2013/03/06 20:15:22 UTC
svn commit: r1453486 [2/7] - in
/hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/bin/
src/main/conf/ src/main/docs/src/documentation/content/xdocs/
src/main/java/ src/main/java/org/apache/hadoop/fs/
src/main/java/org/apache/hadoop/...
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Path.java Wed Mar 6 19:15:18 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.fs;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
+import java.util.regex.Pattern;
import org.apache.avro.reflect.Stringable;
import org.apache.commons.lang.StringUtils;
@@ -43,9 +44,17 @@ public class Path implements Comparable
public static final String CUR_DIR = ".";
- static final boolean WINDOWS
+ public static final boolean WINDOWS
= System.getProperty("os.name").startsWith("Windows");
+ /**
+ * Pre-compiled regular expressions to detect path formats.
+ */
+ private static final Pattern hasUriScheme =
+ Pattern.compile("^[a-zA-Z][a-zA-Z0-9+-.]+:");
+ private static final Pattern hasDriveLetterSpecifier =
+ Pattern.compile("^/?[a-zA-Z]:");
+
private URI uri; // a hierarchical uri
/** Resolve a child path against a parent path. */
@@ -81,7 +90,7 @@ public class Path implements Comparable
resolved.getPath(), resolved.getFragment());
}
- private void checkPathArg( String path ) {
+ private void checkPathArg( String path ) throws IllegalArgumentException {
// disallow construction of a Path from an empty string
if ( path == null ) {
throw new IllegalArgumentException(
@@ -95,15 +104,16 @@ public class Path implements Comparable
/** Construct a path from a String. Path strings are URIs, but with
* unescaped elements and some additional normalization. */
- public Path(String pathString) {
+ public Path(String pathString) throws IllegalArgumentException {
checkPathArg( pathString );
// We can't use 'new URI(String)' directly, since it assumes things are
// escaped, which we don't require of Paths.
// add a slash in front of paths with Windows drive letters
- if (hasWindowsDrive(pathString, false))
- pathString = "/"+pathString;
+ if (hasWindowsDrive(pathString) && pathString.charAt(0) != '/') {
+ pathString = "/" + pathString;
+ }
// parse uri components
String scheme = null;
@@ -151,22 +161,54 @@ public class Path implements Comparable
private void initialize(String scheme, String authority, String path,
String fragment) {
try {
- this.uri = new URI(scheme, authority, normalizePath(path), null, fragment)
+ this.uri = new URI(scheme, authority, normalizePath(scheme, path), null, fragment)
.normalize();
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
}
- private String normalizePath(String path) {
- // remove double slashes & backslashes
+ /**
+ * Merge 2 paths such that the second path is appended relative to the first.
+ * The returned path has the scheme and authority of the first path. On
+ * Windows, the drive specification in the second path is discarded.
+ *
+ * @param path1 Path first path
+ * @param path2 Path second path, to be appended relative to path1
+ * @return Path merged path
+ */
+ public static Path mergePaths(Path path1, Path path2) {
+ String path2Str = path2.toUri().getPath();
+ if(hasWindowsDrive(path2Str)) {
+ path2Str = path2Str.substring(path2Str.indexOf(':')+1);
+ }
+ return new Path(path1 + path2Str);
+ }
+
+ /**
+ * Normalize a path string to use non-duplicated forward slashes as
+ * the path separator and remove any trailing path separators.
+ * @param scheme Supplies the URI scheme. Used to deduce whether we
+ * should replace backslashes or not.
+ * @param path Supplies the scheme-specific part
+ * @return Normalized path string.
+ */
+ private static String normalizePath(String scheme, String path) {
+ // Remove double forward slashes.
path = StringUtils.replace(path, "//", "/");
- if (Path.WINDOWS) {
+
+ // Remove backslashes if this looks like a Windows path. Avoid
+ // the substitution if it looks like a non-local URI.
+ if (WINDOWS &&
+ (hasWindowsDrive(path) ||
+ (scheme == null) ||
+ (scheme.isEmpty()) ||
+ (scheme.equals("file")))) {
path = StringUtils.replace(path, "\\", "/");
}
// trim trailing slash from non-root path (ignoring windows drive)
- int minLength = hasWindowsDrive(path, true) ? 4 : 1;
+ int minLength = hasWindowsDrive(path) ? 4 : 1;
if (path.length() > minLength && path.endsWith("/")) {
path = path.substring(0, path.length()-1);
}
@@ -174,17 +216,29 @@ public class Path implements Comparable
return path;
}
- private boolean hasWindowsDrive(String path, boolean slashed) {
- if (!WINDOWS) return false;
- int start = slashed ? 1 : 0;
- return
- path.length() >= start+2 &&
- (slashed ? path.charAt(0) == '/' : true) &&
- path.charAt(start+1) == ':' &&
- ((path.charAt(start) >= 'A' && path.charAt(start) <= 'Z') ||
- (path.charAt(start) >= 'a' && path.charAt(start) <= 'z'));
+ private static boolean hasWindowsDrive(String path) {
+ return (WINDOWS && hasDriveLetterSpecifier.matcher(path).find());
}
+ /**
+ * Determine whether a given path string represents an absolute path on
+ * Windows. e.g. "C:/a/b" is an absolute path. "C:a/b" is not.
+ *
+ * @param pathString Supplies the path string to evaluate.
+ * @param slashed true if the given path is prefixed with "/".
+ * @return true if the supplied path looks like an absolute path with a Windows
+ * drive-specifier.
+ */
+ public static boolean isWindowsAbsolutePath(final String pathString,
+ final boolean slashed) {
+ int start = (slashed ? 1 : 0);
+
+ return
+ hasWindowsDrive(pathString) &&
+ pathString.length() >= (start + 3) &&
+ ((pathString.charAt(start + 2) == SEPARATOR_CHAR) ||
+ (pathString.charAt(start + 2) == '\\'));
+ }
/** Convert this to a URI. */
public URI toUri() { return uri; }
@@ -207,7 +261,7 @@ public class Path implements Comparable
* True if the path component (i.e. directory) of this URI is absolute.
*/
public boolean isUriPathAbsolute() {
- int start = hasWindowsDrive(uri.getPath(), true) ? 3 : 0;
+ int start = hasWindowsDrive(uri.getPath()) ? 3 : 0;
return uri.getPath().startsWith(SEPARATOR, start);
}
@@ -241,7 +295,7 @@ public class Path implements Comparable
public Path getParent() {
String path = uri.getPath();
int lastSlash = path.lastIndexOf('/');
- int start = hasWindowsDrive(path, true) ? 3 : 0;
+ int start = hasWindowsDrive(path) ? 3 : 0;
if ((path.length() == start) || // empty path
(lastSlash == start && path.length() == start+1)) { // at root
return null;
@@ -250,7 +304,7 @@ public class Path implements Comparable
if (lastSlash==-1) {
parent = CUR_DIR;
} else {
- int end = hasWindowsDrive(path, true) ? 3 : 0;
+ int end = hasWindowsDrive(path) ? 3 : 0;
parent = path.substring(0, lastSlash==end?end+1:lastSlash);
}
return new Path(uri.getScheme(), uri.getAuthority(), parent);
@@ -277,7 +331,7 @@ public class Path implements Comparable
if (uri.getPath() != null) {
String path = uri.getPath();
if (path.indexOf('/')==0 &&
- hasWindowsDrive(path, true) && // has windows drive
+ hasWindowsDrive(path) && // has windows drive
uri.getScheme() == null && // but no scheme
uri.getAuthority() == null) // or authority
path = path.substring(1); // remove slash before drive
@@ -364,7 +418,7 @@ public class Path implements Comparable
URI newUri = null;
try {
newUri = new URI(scheme, authority ,
- normalizePath(pathUri.getPath()), null, fragment);
+ normalizePath(scheme, pathUri.getPath()), null, fragment);
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java Wed Mar 6 19:15:18 2013
@@ -508,9 +508,10 @@ public class RawLocalFileSystem extends
return !super.getOwner().isEmpty();
}
- RawLocalFileStatus(File f, long defaultBlockSize, FileSystem fs) {
+ RawLocalFileStatus(File f, long defaultBlockSize, FileSystem fs) {
super(f.length(), f.isDirectory(), 1, defaultBlockSize,
- f.lastModified(), fs.makeQualified(new Path(f.getPath())));
+ f.lastModified(), new Path(f.getPath()).makeQualified(fs.getUri(),
+ fs.getWorkingDirectory()));
}
@Override
@@ -541,9 +542,10 @@ public class RawLocalFileSystem extends
private void loadPermissionInfo() {
IOException e = null;
try {
- StringTokenizer t = new StringTokenizer(
- execCommand(new File(getPath().toUri()),
- Shell.getGET_PERMISSION_COMMAND()));
+ String output = FileUtil.execCommand(new File(getPath().toUri()),
+ Shell.getGetPermissionCommand());
+ StringTokenizer t =
+ new StringTokenizer(output, Shell.TOKEN_SEPARATOR_REGEX);
//expected format
//-rw------- 1 username groupname ...
String permission = t.nextToken();
@@ -552,7 +554,17 @@ public class RawLocalFileSystem extends
}
setPermission(FsPermission.valueOf(permission));
t.nextToken();
- setOwner(t.nextToken());
+
+ String owner = t.nextToken();
+ // If on windows domain, token format is DOMAIN\\user and we want to
+ // extract only the user name
+ if (Shell.WINDOWS) {
+ int i = owner.indexOf('\\');
+ if (i != -1)
+ owner = owner.substring(i + 1);
+ }
+ setOwner(owner);
+
setGroup(t.nextToken());
} catch (Shell.ExitCodeException ioe) {
if (ioe.getExitCode() != 1) {
@@ -588,17 +600,7 @@ public class RawLocalFileSystem extends
@Override
public void setOwner(Path p, String username, String groupname)
throws IOException {
- if (username == null && groupname == null) {
- throw new IOException("username == null && groupname == null");
- }
-
- if (username == null) {
- execCommand(pathToFile(p), Shell.SET_GROUP_COMMAND, groupname);
- } else {
- //OWNER[:[GROUP]]
- String s = username + (groupname == null? "": ":" + groupname);
- execCommand(pathToFile(p), Shell.SET_OWNER_COMMAND, s);
- }
+ FileUtil.setOwner(pathToFile(p), username, groupname);
}
/**
@@ -608,20 +610,12 @@ public class RawLocalFileSystem extends
public void setPermission(Path p, FsPermission permission)
throws IOException {
if (NativeIO.isAvailable()) {
- NativeIO.chmod(pathToFile(p).getCanonicalPath(),
+ NativeIO.POSIX.chmod(pathToFile(p).getCanonicalPath(),
permission.toShort());
} else {
- execCommand(pathToFile(p), Shell.SET_PERMISSION_COMMAND,
- String.format("%05o", permission.toShort()));
+ String perm = String.format("%04o", permission.toShort());
+ Shell.execCommand(Shell.getSetPermissionCommand(perm, false,
+ FileUtil.makeShellPath(pathToFile(p), true)));
}
}
-
- private static String execCommand(File f, String... cmd) throws IOException {
- String[] args = new String[cmd.length + 1];
- System.arraycopy(cmd, 0, args, 0, cmd.length);
- args[cmd.length] = FileUtil.makeShellPath(f, true);
- String output = Shell.execCommand(args);
- return output;
- }
-
-}
\ No newline at end of file
+}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicyDefault.java Wed Mar 6 19:15:18 2013
@@ -92,7 +92,7 @@ public class TrashPolicyDefault extends
}
private Path makeTrashRelativePath(Path basePath, Path rmFilePath) {
- return new Path(basePath + rmFilePath.toUri().getPath());
+ return Path.mergePaths(basePath, rmFilePath);
}
@Override
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/local/RawLocalFs.java Wed Mar 6 19:15:18 2013
@@ -89,11 +89,9 @@ public class RawLocalFs extends Delegate
}
// NB: Use createSymbolicLink in java.nio.file.Path once available
try {
- Shell.execCommand(Shell.LINK_COMMAND, "-s",
- new URI(target.toString()).getPath(),
- new URI(link.toString()).getPath());
- } catch (URISyntaxException x) {
- throw new IOException("Invalid symlink path: "+x.getMessage());
+ Shell.execCommand(Shell.getSymlinkCommand(
+ getPathWithoutSchemeAndAuthority(target),
+ getPathWithoutSchemeAndAuthority(link)));
} catch (IOException x) {
throw new IOException("Unable to create symlink: "+x.getMessage());
}
@@ -176,4 +174,13 @@ public class RawLocalFs extends Delegate
*/
throw new AssertionError();
}
+
+ private static String getPathWithoutSchemeAndAuthority(Path path) {
+ // This code depends on Path.toString() to remove the leading slash before
+ // the drive specification on Windows.
+ Path newPath = path.isUriPathAbsolute() ?
+ new Path(null, null, path.toUri().getPath()) :
+ path;
+ return newPath.toString();
+ }
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java Wed Mar 6 19:15:18 2013
@@ -21,6 +21,8 @@ package org.apache.hadoop.fs.shell;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
import java.util.LinkedList;
import org.apache.hadoop.fs.FSDataOutputStream;
@@ -72,8 +74,12 @@ abstract class CommandWithDestination ex
*/
protected void getLocalDestination(LinkedList<String> args)
throws IOException {
- String pathString = (args.size() < 2) ? Path.CUR_DIR : args.removeLast();
- dst = new PathData(new File(pathString), getConf());
+ try {
+ String pathString = (args.size() < 2) ? Path.CUR_DIR : args.removeLast();
+ dst = new PathData(new URI(pathString), getConf());
+ } catch (URISyntaxException e) {
+ throw new IOException("unexpected URISyntaxException", e);
+ }
}
/**
@@ -295,4 +301,4 @@ abstract class CommandWithDestination ex
processDeleteOnExit();
}
}
-}
\ No newline at end of file
+}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CopyCommands.java Wed Mar 6 19:15:18 2013
@@ -20,6 +20,8 @@ package org.apache.hadoop.fs.shell;
import java.io.File;
import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
import java.util.LinkedList;
import java.util.List;
@@ -60,16 +62,20 @@ class CopyCommands {
@Override
protected void processOptions(LinkedList<String> args) throws IOException {
- CommandFormat cf = new CommandFormat(2, Integer.MAX_VALUE, "nl");
- cf.parse(args);
+ try {
+ CommandFormat cf = new CommandFormat(2, Integer.MAX_VALUE, "nl");
+ cf.parse(args);
- delimiter = cf.getOpt("nl") ? "\n" : null;
+ delimiter = cf.getOpt("nl") ? "\n" : null;
- dst = new PathData(new File(args.removeLast()), getConf());
- if (dst.exists && dst.stat.isDirectory()) {
- throw new PathIsDirectoryException(dst.toString());
+ dst = new PathData(new URI(args.removeLast()), getConf());
+ if (dst.exists && dst.stat.isDirectory()) {
+ throw new PathIsDirectoryException(dst.toString());
+ }
+ srcs = new LinkedList<PathData>();
+ } catch (URISyntaxException e) {
+ throw new IOException("unexpected URISyntaxException", e);
}
- srcs = new LinkedList<PathData>();
}
@Override
@@ -188,9 +194,13 @@ class CopyCommands {
// commands operating on local paths have no need for glob expansion
@Override
protected List<PathData> expandArgument(String arg) throws IOException {
- List<PathData> items = new LinkedList<PathData>();
- items.add(new PathData(new File(arg), getConf()));
- return items;
+ try {
+ List<PathData> items = new LinkedList<PathData>();
+ items.add(new PathData(new URI(arg), getConf()));
+ return items;
+ } catch (URISyntaxException e) {
+ throw new IOException("unexpected URISyntaxException", e);
+ }
}
@Override
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java Wed Mar 6 19:15:18 2013
@@ -24,6 +24,7 @@ import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
+import java.util.regex.Pattern;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -39,6 +40,9 @@ import org.apache.hadoop.fs.PathNotFound
/**
* Encapsulates a Path (path), its FileStatus (stat), and its FileSystem (fs).
+ * PathData ensures that the returned path string will be the same as the
+ * one passed in during initialization (unlike Path objects which can
+ * modify the path string).
* The stat field will be null if the path does not exist.
*/
@InterfaceAudience.Private
@@ -51,6 +55,20 @@ public class PathData implements Compara
public FileStatus stat;
public boolean exists;
+ /* True if the URI scheme was not present in the pathString but inferred.
+ */
+ private boolean inferredSchemeFromPath = false;
+
+ /**
+ * Pre-compiled regular expressions to detect path formats.
+ */
+ private static final Pattern potentialUri =
+ Pattern.compile("^[a-zA-Z][a-zA-Z0-9+-.]+:");
+ private static final Pattern windowsNonUriAbsolutePath1 =
+ Pattern.compile("^/?[a-zA-Z]:\\\\");
+ private static final Pattern windowsNonUriAbsolutePath2 =
+ Pattern.compile("^/?[a-zA-Z]:/");
+
/**
* Creates an object to wrap the given parameters as fields. The string
* used to create the path will be recorded since the Path object does not
@@ -67,12 +85,12 @@ public class PathData implements Compara
* Creates an object to wrap the given parameters as fields. The string
* used to create the path will be recorded since the Path object does not
* return exactly the same string used to initialize it
- * @param localPath a local File
+ * @param localPath a local URI
* @param conf the configuration file
* @throws IOException if anything goes wrong...
*/
- public PathData(File localPath, Configuration conf) throws IOException {
- this(FileSystem.getLocal(conf), localPath.toString());
+ public PathData(URI localPath, Configuration conf) throws IOException {
+ this(FileSystem.getLocal(conf), localPath.getPath());
}
/**
@@ -87,6 +105,39 @@ public class PathData implements Compara
}
/**
+ * Validates the given Windows path.
+ * Throws IOException on failure.
+ * @param pathString a String of the path suppliued by the user.
+ */
+ private void ValidateWindowsPath(String pathString)
+ throws IOException
+ {
+ if (windowsNonUriAbsolutePath1.matcher(pathString).find()) {
+ // Forward slashes disallowed in a backslash-separated path.
+ if (pathString.indexOf('/') != -1) {
+ throw new IOException("Invalid path string " + pathString);
+ }
+
+ inferredSchemeFromPath = true;
+ return;
+ }
+
+ // Is it a forward slash-separated absolute path?
+ if (windowsNonUriAbsolutePath2.matcher(pathString).find()) {
+ inferredSchemeFromPath = true;
+ return;
+ }
+
+ // Does it look like a URI? If so then just leave it alone.
+ if (potentialUri.matcher(pathString).find()) {
+ return;
+ }
+
+ // Looks like a relative path on Windows.
+ return;
+ }
+
+ /**
* Creates an object to wrap the given parameters as fields. The string
* used to create the path will be recorded since the Path object does not
* return exactly the same string used to initialize it.
@@ -100,6 +151,10 @@ public class PathData implements Compara
this.uri = stringToUri(pathString);
this.path = fs.makeQualified(new Path(uri));
setStat(stat);
+
+ if (Path.WINDOWS) {
+ ValidateWindowsPath(pathString);
+ }
}
// need a static method for the ctor above
@@ -236,7 +291,7 @@ public class PathData implements Compara
* Given a child of this directory, use the directory's path and the child's
* basename to construct the string to the child. This preserves relative
* paths since Path will fully qualify.
- * @param child a path contained within this directory
+ * @param childPath a path contained within this directory
* @return String of the path relative to this directory
*/
private String getStringForChildPath(Path childPath) {
@@ -386,7 +441,14 @@ public class PathData implements Compara
// No interpretation of symbols. Just decode % escaped chars.
String decodedRemainder = uri.getSchemeSpecificPart();
- if (scheme == null) {
+ // Drop the scheme if it was inferred to ensure fidelity between
+ // the input and output path strings.
+ if ((scheme == null) || (inferredSchemeFromPath)) {
+ if (Path.isWindowsAbsolutePath(decodedRemainder, true)) {
+ // Strip the leading '/' added in stringToUri so users see a valid
+ // Windows path.
+ decodedRemainder = decodedRemainder.substring(1);
+ }
return decodedRemainder;
} else {
StringBuilder buffer = new StringBuilder();
@@ -409,13 +471,56 @@ public class PathData implements Compara
return ((LocalFileSystem)fs).pathToFile(path);
}
+ /** Normalize the given Windows path string. This does the following:
+ * 1. Adds "file:" scheme for absolute paths.
+ * 2. Ensures the scheme-specific part starts with '/' per RFC2396.
+ * 3. Replaces backslash path separators with forward slashes.
+ * @param pathString Path string supplied by the user.
+ * @return normalized absolute path string. Returns the input string
+ * if it is not a Windows absolute path.
+ */
+ private static String normalizeWindowsPath(String pathString)
+ throws IOException
+ {
+ if (!Path.WINDOWS) {
+ return pathString;
+ }
+
+ boolean slashed =
+ ((pathString.length() >= 1) && (pathString.charAt(0) == '/'));
+
+ // Is it a backslash-separated absolute path?
+ if (windowsNonUriAbsolutePath1.matcher(pathString).find()) {
+ // Forward slashes disallowed in a backslash-separated path.
+ if (pathString.indexOf('/') != -1) {
+ throw new IOException("Invalid path string " + pathString);
+ }
+
+ pathString = pathString.replace('\\', '/');
+ return "file:" + (slashed ? "" : "/") + pathString;
+ }
+
+ // Is it a forward slash-separated absolute path?
+ if (windowsNonUriAbsolutePath2.matcher(pathString).find()) {
+ return "file:" + (slashed ? "" : "/") + pathString;
+ }
+
+ // Is it a backslash-separated relative file path (no scheme and
+ // no drive-letter specifier)?
+ if ((pathString.indexOf(':') == -1) && (pathString.indexOf('\\') != -1)) {
+ pathString = pathString.replace('\\', '/');
+ }
+
+ return pathString;
+ }
+
/** Construct a URI from a String with unescaped special characters
- * that have non-standard sematics. e.g. /, ?, #. A custom parsing
- * is needed to prevent misbihaviors.
+ * that have non-standard semantics. e.g. /, ?, #. A custom parsing
+ * is needed to prevent misbehavior.
* @param pathString The input path in string form
* @return URI
*/
- private static URI stringToUri(String pathString) {
+ private static URI stringToUri(String pathString) throws IOException {
// We can't use 'new URI(String)' directly. Since it doesn't do quoting
// internally, the internal parser may fail or break the string at wrong
// places. Use of multi-argument ctors will quote those chars for us,
@@ -424,9 +529,10 @@ public class PathData implements Compara
// parse uri components
String scheme = null;
String authority = null;
-
int start = 0;
+ pathString = normalizeWindowsPath(pathString);
+
// parse uri scheme, if any
int colon = pathString.indexOf(':');
int slash = pathString.indexOf('/');
@@ -445,8 +551,7 @@ public class PathData implements Compara
authority = pathString.substring(start, authEnd);
start = authEnd;
}
-
- // uri path is the rest of the string. ? or # are not interpreated,
+ // uri path is the rest of the string. ? or # are not interpreted,
// but any occurrence of them will be quoted by the URI ctor.
String path = pathString.substring(start, pathString.length());
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java Wed Mar 6 19:15:18 2013
@@ -61,6 +61,7 @@ import org.apache.hadoop.security.authen
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hadoop.util.Shell;
import org.mortbay.io.Buffer;
import org.mortbay.jetty.Connector;
import org.mortbay.jetty.Handler;
@@ -305,6 +306,13 @@ public class HttpServer implements Filte
ret.setAcceptQueueSize(128);
ret.setResolveNames(false);
ret.setUseDirectBuffers(false);
+ if(Shell.WINDOWS) {
+ // result of setting the SO_REUSEADDR flag is different on Windows
+ // http://msdn.microsoft.com/en-us/library/ms740621(v=vs.85).aspx
+ // without this 2 NN's can start on the same machine and listen on
+ // the same port with indeterminate routing of incoming requests to them
+ ret.setReuseAddress(false);
+ }
ret.setHeaderBufferSize(1024*64);
return ret;
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java Wed Mar 6 19:15:18 2013
@@ -203,8 +203,8 @@ public class ReadaheadPool {
// It's also possible that we'll end up requesting readahead on some
// other FD, which may be wasted work, but won't cause a problem.
try {
- NativeIO.posixFadviseIfPossible(fd, off, len,
- NativeIO.POSIX_FADV_WILLNEED);
+ NativeIO.POSIX.posixFadviseIfPossible(fd, off, len,
+ NativeIO.POSIX.POSIX_FADV_WILLNEED);
} catch (IOException ioe) {
if (canceled) {
// no big deal - the reader canceled the request and closed
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java Wed Mar 6 19:15:18 2013
@@ -22,6 +22,7 @@ import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
+import java.util.Arrays;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -30,7 +31,7 @@ import org.apache.hadoop.fs.permission.F
import org.apache.hadoop.io.nativeio.Errno;
import org.apache.hadoop.io.nativeio.NativeIO;
import org.apache.hadoop.io.nativeio.NativeIOException;
-import org.apache.hadoop.io.nativeio.NativeIO.Stat;
+import org.apache.hadoop.io.nativeio.NativeIO.POSIX.Stat;
import org.apache.hadoop.security.UserGroupInformation;
/**
@@ -120,7 +121,7 @@ public class SecureIOUtils {
FileInputStream fis = new FileInputStream(f);
boolean success = false;
try {
- Stat stat = NativeIO.getFstat(fis.getFD());
+ Stat stat = NativeIO.POSIX.getFstat(fis.getFD());
checkStat(f, stat.getOwner(), stat.getGroup(), expectedOwner,
expectedGroup);
success = true;
@@ -166,35 +167,30 @@ public class SecureIOUtils {
if (skipSecurity) {
return insecureCreateForWrite(f, permissions);
} else {
- // Use the native wrapper around open(2)
- try {
- FileDescriptor fd = NativeIO.open(f.getAbsolutePath(),
- NativeIO.O_WRONLY | NativeIO.O_CREAT | NativeIO.O_EXCL,
- permissions);
- return new FileOutputStream(fd);
- } catch (NativeIOException nioe) {
- if (nioe.getErrno() == Errno.EEXIST) {
- throw new AlreadyExistsException(nioe);
- }
- throw nioe;
- }
+ return NativeIO.getCreateForWriteFileOutputStream(f, permissions);
}
}
private static void checkStat(File f, String owner, String group,
String expectedOwner,
String expectedGroup) throws IOException {
+ boolean success = true;
if (expectedOwner != null &&
!expectedOwner.equals(owner)) {
- throw new IOException(
- "Owner '" + owner + "' for path " + f + " did not match " +
- "expected owner '" + expectedOwner + "'");
+ if (Path.WINDOWS) {
+ UserGroupInformation ugi =
+ UserGroupInformation.createRemoteUser(expectedOwner);
+ final String adminsGroupString = "Administrators";
+ success = owner.equals(adminsGroupString)
+ && Arrays.asList(ugi.getGroupNames()).contains(adminsGroupString);
+ } else {
+ success = false;
+ }
}
- if (expectedGroup != null &&
- !expectedGroup.equals(group)) {
+ if (!success) {
throw new IOException(
- "Group '" + group + "' for path " + f + " did not match " +
- "expected group '" + expectedGroup + "'");
+ "Owner '" + owner + "' for path " + f + " did not match " +
+ "expected owner '" + expectedOwner + "'");
}
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java Wed Mar 6 19:15:18 2013
@@ -19,7 +19,10 @@ package org.apache.hadoop.io.nativeio;
import java.io.File;
import java.io.FileDescriptor;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
import java.io.IOException;
+import java.io.RandomAccessFile;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@@ -27,10 +30,13 @@ import org.apache.hadoop.classification.
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.io.SecureIOUtils.AlreadyExistsException;
import org.apache.hadoop.util.NativeCodeLoader;
+import org.apache.hadoop.util.Shell;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+
/**
* JNI wrappers for various native IO-related calls not available in Java.
* These functions should generally be used alongside a fallback to another
@@ -39,81 +45,341 @@ import org.apache.commons.logging.LogFac
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class NativeIO {
- // Flags for open() call from bits/fcntl.h
- public static final int O_RDONLY = 00;
- public static final int O_WRONLY = 01;
- public static final int O_RDWR = 02;
- public static final int O_CREAT = 0100;
- public static final int O_EXCL = 0200;
- public static final int O_NOCTTY = 0400;
- public static final int O_TRUNC = 01000;
- public static final int O_APPEND = 02000;
- public static final int O_NONBLOCK = 04000;
- public static final int O_SYNC = 010000;
- public static final int O_ASYNC = 020000;
- public static final int O_FSYNC = O_SYNC;
- public static final int O_NDELAY = O_NONBLOCK;
-
- // Flags for posix_fadvise() from bits/fcntl.h
- /* No further special treatment. */
- public static final int POSIX_FADV_NORMAL = 0;
- /* Expect random page references. */
- public static final int POSIX_FADV_RANDOM = 1;
- /* Expect sequential page references. */
- public static final int POSIX_FADV_SEQUENTIAL = 2;
- /* Will need these pages. */
- public static final int POSIX_FADV_WILLNEED = 3;
- /* Don't need these pages. */
- public static final int POSIX_FADV_DONTNEED = 4;
- /* Data will be accessed once. */
- public static final int POSIX_FADV_NOREUSE = 5;
-
-
- /* Wait upon writeout of all pages
- in the range before performing the
- write. */
- public static final int SYNC_FILE_RANGE_WAIT_BEFORE = 1;
- /* Initiate writeout of all those
- dirty pages in the range which are
- not presently under writeback. */
- public static final int SYNC_FILE_RANGE_WRITE = 2;
-
- /* Wait upon writeout of all pages in
- the range after performing the
- write. */
- public static final int SYNC_FILE_RANGE_WAIT_AFTER = 4;
+ public static class POSIX {
+ // Flags for open() call from bits/fcntl.h
+ public static final int O_RDONLY = 00;
+ public static final int O_WRONLY = 01;
+ public static final int O_RDWR = 02;
+ public static final int O_CREAT = 0100;
+ public static final int O_EXCL = 0200;
+ public static final int O_NOCTTY = 0400;
+ public static final int O_TRUNC = 01000;
+ public static final int O_APPEND = 02000;
+ public static final int O_NONBLOCK = 04000;
+ public static final int O_SYNC = 010000;
+ public static final int O_ASYNC = 020000;
+ public static final int O_FSYNC = O_SYNC;
+ public static final int O_NDELAY = O_NONBLOCK;
+
+ // Flags for posix_fadvise() from bits/fcntl.h
+ /* No further special treatment. */
+ public static final int POSIX_FADV_NORMAL = 0;
+ /* Expect random page references. */
+ public static final int POSIX_FADV_RANDOM = 1;
+ /* Expect sequential page references. */
+ public static final int POSIX_FADV_SEQUENTIAL = 2;
+ /* Will need these pages. */
+ public static final int POSIX_FADV_WILLNEED = 3;
+ /* Don't need these pages. */
+ public static final int POSIX_FADV_DONTNEED = 4;
+ /* Data will be accessed once. */
+ public static final int POSIX_FADV_NOREUSE = 5;
+
+
+ /* Wait upon writeout of all pages
+ in the range before performing the
+ write. */
+ public static final int SYNC_FILE_RANGE_WAIT_BEFORE = 1;
+ /* Initiate writeout of all those
+ dirty pages in the range which are
+ not presently under writeback. */
+ public static final int SYNC_FILE_RANGE_WRITE = 2;
+
+ /* Wait upon writeout of all pages in
+ the range after performing the
+ write. */
+ public static final int SYNC_FILE_RANGE_WAIT_AFTER = 4;
+
+ private static final Log LOG = LogFactory.getLog(NativeIO.class);
+
+ private static boolean nativeLoaded = false;
+ private static boolean fadvisePossible = true;
+ private static boolean syncFileRangePossible = true;
+
+ static final String WORKAROUND_NON_THREADSAFE_CALLS_KEY =
+ "hadoop.workaround.non.threadsafe.getpwuid";
+ static final boolean WORKAROUND_NON_THREADSAFE_CALLS_DEFAULT = false;
+
+ private static long cacheTimeout = -1;
+
+ static {
+ if (NativeCodeLoader.isNativeCodeLoaded()) {
+ try {
+ Configuration conf = new Configuration();
+ workaroundNonThreadSafePasswdCalls = conf.getBoolean(
+ WORKAROUND_NON_THREADSAFE_CALLS_KEY,
+ WORKAROUND_NON_THREADSAFE_CALLS_DEFAULT);
+
+ initNative();
+ nativeLoaded = true;
+
+ cacheTimeout = conf.getLong(
+ CommonConfigurationKeys.HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_KEY,
+ CommonConfigurationKeys.HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_DEFAULT) *
+ 1000;
+ LOG.debug("Initialized cache for IDs to User/Group mapping with a " +
+ " cache timeout of " + cacheTimeout/1000 + " seconds.");
+
+ } catch (Throwable t) {
+ // This can happen if the user has an older version of libhadoop.so
+ // installed - in this case we can continue without native IO
+ // after warning
+ LOG.error("Unable to initialize NativeIO libraries", t);
+ }
+ }
+ }
- private static final Log LOG = LogFactory.getLog(NativeIO.class);
+ /**
+ * Return true if the JNI-based native IO extensions are available.
+ */
+ public static boolean isAvailable() {
+ return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
+ }
+
+ /** Wrapper around open(2) */
+ public static native FileDescriptor open(String path, int flags, int mode) throws IOException;
+ /** Wrapper around fstat(2) */
+ private static native Stat fstat(FileDescriptor fd) throws IOException;
+
+ /** Native chmod implementation. On UNIX, it is a wrapper around chmod(2) */
+ private static native void chmodImpl(String path, int mode) throws IOException;
+
+ public static void chmod(String path, int mode) throws IOException {
+ if (!Shell.WINDOWS) {
+ chmodImpl(path, mode);
+ } else {
+ try {
+ chmodImpl(path, mode);
+ } catch (NativeIOException nioe) {
+ if (nioe.getErrorCode() == 3) {
+ throw new NativeIOException("No such file or directory",
+ Errno.ENOENT);
+ } else {
+ LOG.warn(String.format("NativeIO.chmod error (%d): %s",
+ nioe.getErrorCode(), nioe.getMessage()));
+ throw new NativeIOException("Unknown error", Errno.UNKNOWN);
+ }
+ }
+ }
+ }
+
+ /** Wrapper around posix_fadvise(2) */
+ static native void posix_fadvise(
+ FileDescriptor fd, long offset, long len, int flags) throws NativeIOException;
+
+ /** Wrapper around sync_file_range(2) */
+ static native void sync_file_range(
+ FileDescriptor fd, long offset, long nbytes, int flags) throws NativeIOException;
+
+ /**
+ * Call posix_fadvise on the given file descriptor. See the manpage
+ * for this syscall for more information. On systems where this
+ * call is not available, does nothing.
+ *
+ * @throws NativeIOException if there is an error with the syscall
+ */
+ public static void posixFadviseIfPossible(
+ FileDescriptor fd, long offset, long len, int flags)
+ throws NativeIOException {
+ if (nativeLoaded && fadvisePossible) {
+ try {
+ posix_fadvise(fd, offset, len, flags);
+ } catch (UnsupportedOperationException uoe) {
+ fadvisePossible = false;
+ } catch (UnsatisfiedLinkError ule) {
+ fadvisePossible = false;
+ }
+ }
+ }
+
+ /**
+ * Call sync_file_range on the given file descriptor. See the manpage
+ * for this syscall for more information. On systems where this
+ * call is not available, does nothing.
+ *
+ * @throws NativeIOException if there is an error with the syscall
+ */
+ public static void syncFileRangeIfPossible(
+ FileDescriptor fd, long offset, long nbytes, int flags)
+ throws NativeIOException {
+ if (nativeLoaded && syncFileRangePossible) {
+ try {
+ sync_file_range(fd, offset, nbytes, flags);
+ } catch (UnsupportedOperationException uoe) {
+ syncFileRangePossible = false;
+ } catch (UnsatisfiedLinkError ule) {
+ syncFileRangePossible = false;
+ }
+ }
+ }
+
+ /** Linux only methods used for getOwner() implementation */
+ private static native long getUIDforFDOwnerforOwner(FileDescriptor fd) throws IOException;
+ private static native String getUserName(long uid) throws IOException;
+
+ /**
+ * Result type of the fstat call
+ */
+ public static class Stat {
+ private int ownerId, groupId;
+ private String owner, group;
+ private int mode;
+
+ // Mode constants
+ public static final int S_IFMT = 0170000; /* type of file */
+ public static final int S_IFIFO = 0010000; /* named pipe (fifo) */
+ public static final int S_IFCHR = 0020000; /* character special */
+ public static final int S_IFDIR = 0040000; /* directory */
+ public static final int S_IFBLK = 0060000; /* block special */
+ public static final int S_IFREG = 0100000; /* regular */
+ public static final int S_IFLNK = 0120000; /* symbolic link */
+ public static final int S_IFSOCK = 0140000; /* socket */
+ public static final int S_IFWHT = 0160000; /* whiteout */
+ public static final int S_ISUID = 0004000; /* set user id on execution */
+ public static final int S_ISGID = 0002000; /* set group id on execution */
+ public static final int S_ISVTX = 0001000; /* save swapped text even after use */
+ public static final int S_IRUSR = 0000400; /* read permission, owner */
+ public static final int S_IWUSR = 0000200; /* write permission, owner */
+ public static final int S_IXUSR = 0000100; /* execute/search permission, owner */
+
+ Stat(int ownerId, int groupId, int mode) {
+ this.ownerId = ownerId;
+ this.groupId = groupId;
+ this.mode = mode;
+ }
+
+ @Override
+ public String toString() {
+ return "Stat(owner='" + owner + "', group='" + group + "'" +
+ ", mode=" + mode + ")";
+ }
+
+ public String getOwner() {
+ return owner;
+ }
+ public String getGroup() {
+ return group;
+ }
+ public int getMode() {
+ return mode;
+ }
+ }
+
+ /**
+ * Returns the file stat for a file descriptor.
+ *
+ * @param fd file descriptor.
+ * @return the file descriptor file stat.
+ * @throws IOException thrown if there was an IO error while obtaining the file stat.
+ */
+ public static Stat getFstat(FileDescriptor fd) throws IOException {
+ Stat stat = fstat(fd);
+ stat.owner = getName(IdCache.USER, stat.ownerId);
+ stat.group = getName(IdCache.GROUP, stat.groupId);
+ return stat;
+ }
+
+ private static String getName(IdCache domain, int id) throws IOException {
+ Map<Integer, CachedName> idNameCache = (domain == IdCache.USER)
+ ? USER_ID_NAME_CACHE : GROUP_ID_NAME_CACHE;
+ String name;
+ CachedName cachedName = idNameCache.get(id);
+ long now = System.currentTimeMillis();
+ if (cachedName != null && (cachedName.timestamp + cacheTimeout) > now) {
+ name = cachedName.name;
+ } else {
+ name = (domain == IdCache.USER) ? getUserName(id) : getGroupName(id);
+ if (LOG.isDebugEnabled()) {
+ String type = (domain == IdCache.USER) ? "UserName" : "GroupName";
+ LOG.debug("Got " + type + " " + name + " for ID " + id +
+ " from the native implementation");
+ }
+ cachedName = new CachedName(name, now);
+ idNameCache.put(id, cachedName);
+ }
+ return name;
+ }
+
+ static native String getUserName(int uid) throws IOException;
+ static native String getGroupName(int uid) throws IOException;
+
+ private static class CachedName {
+ final long timestamp;
+ final String name;
+
+ public CachedName(String name, long timestamp) {
+ this.name = name;
+ this.timestamp = timestamp;
+ }
+ }
+
+ private static final Map<Integer, CachedName> USER_ID_NAME_CACHE =
+ new ConcurrentHashMap<Integer, CachedName>();
+
+ private static final Map<Integer, CachedName> GROUP_ID_NAME_CACHE =
+ new ConcurrentHashMap<Integer, CachedName>();
+
+ private enum IdCache { USER, GROUP }
+ }
- private static boolean nativeLoaded = false;
private static boolean workaroundNonThreadSafePasswdCalls = false;
- private static boolean fadvisePossible = true;
- private static boolean syncFileRangePossible = true;
- static final String WORKAROUND_NON_THREADSAFE_CALLS_KEY =
- "hadoop.workaround.non.threadsafe.getpwuid";
- static final boolean WORKAROUND_NON_THREADSAFE_CALLS_DEFAULT = false;
- private static long cacheTimeout = -1;
+ public static class Windows {
+ // Flags for CreateFile() call on Windows
+ public static final long GENERIC_READ = 0x80000000L;
+ public static final long GENERIC_WRITE = 0x40000000L;
+
+ public static final long FILE_SHARE_READ = 0x00000001L;
+ public static final long FILE_SHARE_WRITE = 0x00000002L;
+ public static final long FILE_SHARE_DELETE = 0x00000004L;
+
+ public static final long CREATE_NEW = 1;
+ public static final long CREATE_ALWAYS = 2;
+ public static final long OPEN_EXISTING = 3;
+ public static final long OPEN_ALWAYS = 4;
+ public static final long TRUNCATE_EXISTING = 5;
+
+ public static final long FILE_BEGIN = 0;
+ public static final long FILE_CURRENT = 1;
+ public static final long FILE_END = 2;
+
+ /** Wrapper around CreateFile() on Windows */
+ public static native FileDescriptor createFile(String path,
+ long desiredAccess, long shareMode, long creationDisposition)
+ throws IOException;
+
+ /** Wrapper around SetFilePointer() on Windows */
+ public static native long setFilePointer(FileDescriptor fd,
+ long distanceToMove, long moveMethod) throws IOException;
+
+ /** Windows only methods used for getOwner() implementation */
+ private static native String getOwner(FileDescriptor fd) throws IOException;
+
+ static {
+ if (NativeCodeLoader.isNativeCodeLoaded()) {
+ try {
+ initNative();
+ nativeLoaded = true;
+ } catch (Throwable t) {
+ // This can happen if the user has an older version of libhadoop.so
+ // installed - in this case we can continue without native IO
+ // after warning
+ LOG.error("Unable to initialize NativeIO libraries", t);
+ }
+ }
+ }
+ }
+
+ private static final Log LOG = LogFactory.getLog(NativeIO.class);
+
+ private static boolean nativeLoaded = false;
static {
if (NativeCodeLoader.isNativeCodeLoaded()) {
try {
- Configuration conf = new Configuration();
- workaroundNonThreadSafePasswdCalls = conf.getBoolean(
- WORKAROUND_NON_THREADSAFE_CALLS_KEY,
- WORKAROUND_NON_THREADSAFE_CALLS_DEFAULT);
-
initNative();
nativeLoaded = true;
-
- cacheTimeout = conf.getLong(
- CommonConfigurationKeys.HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_KEY,
- CommonConfigurationKeys.HADOOP_SECURITY_UID_NAME_CACHE_TIMEOUT_DEFAULT) *
- 1000;
- LOG.debug("Initialized cache for IDs to User/Group mapping with a" +
- " cache timeout of " + cacheTimeout/1000 + " seconds.");
-
} catch (Throwable t) {
// This can happen if the user has an older version of libhadoop.so
// installed - in this case we can continue without native IO
@@ -130,169 +396,161 @@ public class NativeIO {
return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
}
- /** Wrapper around open(2) */
- public static native FileDescriptor open(String path, int flags, int mode) throws IOException;
- /** Wrapper around fstat(2) */
- private static native Stat fstat(FileDescriptor fd) throws IOException;
- /** Wrapper around chmod(2) */
- public static native void chmod(String path, int mode) throws IOException;
-
- /** Wrapper around posix_fadvise(2) */
- static native void posix_fadvise(
- FileDescriptor fd, long offset, long len, int flags) throws NativeIOException;
-
- /** Wrapper around sync_file_range(2) */
- static native void sync_file_range(
- FileDescriptor fd, long offset, long nbytes, int flags) throws NativeIOException;
-
/** Initialize the JNI method ID and class ID cache */
private static native void initNative();
- /**
- * Call posix_fadvise on the given file descriptor. See the manpage
- * for this syscall for more information. On systems where this
- * call is not available, does nothing.
- *
- * @throws NativeIOException if there is an error with the syscall
- */
- public static void posixFadviseIfPossible(
- FileDescriptor fd, long offset, long len, int flags)
- throws NativeIOException {
- if (nativeLoaded && fadvisePossible) {
- try {
- posix_fadvise(fd, offset, len, flags);
- } catch (UnsupportedOperationException uoe) {
- fadvisePossible = false;
- } catch (UnsatisfiedLinkError ule) {
- fadvisePossible = false;
- }
+ private static class CachedUid {
+ final long timestamp;
+ final String username;
+ public CachedUid(String username, long timestamp) {
+ this.timestamp = timestamp;
+ this.username = username;
}
}
-
- /**
- * Call sync_file_range on the given file descriptor. See the manpage
- * for this syscall for more information. On systems where this
- * call is not available, does nothing.
- *
- * @throws NativeIOException if there is an error with the syscall
- */
- public static void syncFileRangeIfPossible(
- FileDescriptor fd, long offset, long nbytes, int flags)
- throws NativeIOException {
- if (nativeLoaded && syncFileRangePossible) {
- try {
- sync_file_range(fd, offset, nbytes, flags);
- } catch (UnsupportedOperationException uoe) {
- syncFileRangePossible = false;
- } catch (UnsatisfiedLinkError ule) {
- syncFileRangePossible = false;
+ private static final Map<Long, CachedUid> uidCache =
+ new ConcurrentHashMap<Long, CachedUid>();
+ private static long cacheTimeout;
+ private static boolean initialized = false;
+
+ public static String getOwner(FileDescriptor fd) throws IOException {
+ ensureInitialized();
+ if (Shell.WINDOWS) {
+ String owner = Windows.getOwner(fd);
+ int i = owner.indexOf('\\');
+ if (i != -1)
+ owner = owner.substring(i + 1);
+ return owner;
+ } else {
+ long uid = POSIX.getUIDforFDOwnerforOwner(fd);
+ CachedUid cUid = uidCache.get(uid);
+ long now = System.currentTimeMillis();
+ if (cUid != null && (cUid.timestamp + cacheTimeout) > now) {
+ return cUid.username;
}
+ String user = POSIX.getUserName(uid);
+ LOG.info("Got UserName " + user + " for UID " + uid
+ + " from the native implementation");
+ cUid = new CachedUid(user, now);
+ uidCache.put(uid, cUid);
+ return user;
}
}
/**
- * Result type of the fstat call
+ * Create a FileInputStream that shares delete permission on the
+ * file opened, i.e. other process can delete the file the
+ * FileInputStream is reading. Only Windows implementation uses
+ * the native interface.
*/
- public static class Stat {
- private int ownerId, groupId;
- private String owner, group;
- private int mode;
-
- // Mode constants
- public static final int S_IFMT = 0170000; /* type of file */
- public static final int S_IFIFO = 0010000; /* named pipe (fifo) */
- public static final int S_IFCHR = 0020000; /* character special */
- public static final int S_IFDIR = 0040000; /* directory */
- public static final int S_IFBLK = 0060000; /* block special */
- public static final int S_IFREG = 0100000; /* regular */
- public static final int S_IFLNK = 0120000; /* symbolic link */
- public static final int S_IFSOCK = 0140000; /* socket */
- public static final int S_IFWHT = 0160000; /* whiteout */
- public static final int S_ISUID = 0004000; /* set user id on execution */
- public static final int S_ISGID = 0002000; /* set group id on execution */
- public static final int S_ISVTX = 0001000; /* save swapped text even after use */
- public static final int S_IRUSR = 0000400; /* read permission, owner */
- public static final int S_IWUSR = 0000200; /* write permission, owner */
- public static final int S_IXUSR = 0000100; /* execute/search permission, owner */
-
- Stat(int ownerId, int groupId, int mode) {
- this.ownerId = ownerId;
- this.groupId = groupId;
- this.mode = mode;
- }
-
- @Override
- public String toString() {
- return "Stat(owner='" + owner + "', group='" + group + "'" +
- ", mode=" + mode + ")";
- }
-
- public String getOwner() {
- return owner;
- }
- public String getGroup() {
- return group;
- }
- public int getMode() {
- return mode;
+ public static FileInputStream getShareDeleteFileInputStream(File f)
+ throws IOException {
+ if (!Shell.WINDOWS) {
+ // On Linux the default FileInputStream shares delete permission
+ // on the file opened.
+ //
+ return new FileInputStream(f);
+ } else {
+ // Use Windows native interface to create a FileInputStream that
+ // shares delete permission on the file opened.
+ //
+ FileDescriptor fd = Windows.createFile(
+ f.getAbsolutePath(),
+ Windows.GENERIC_READ,
+ Windows.FILE_SHARE_READ |
+ Windows.FILE_SHARE_WRITE |
+ Windows.FILE_SHARE_DELETE,
+ Windows.OPEN_EXISTING);
+ return new FileInputStream(fd);
}
}
- static native String getUserName(int uid) throws IOException;
-
- static native String getGroupName(int uid) throws IOException;
-
- private static class CachedName {
- final long timestamp;
- final String name;
-
- public CachedName(String name, long timestamp) {
- this.name = name;
- this.timestamp = timestamp;
+ /**
+ * Create a FileInputStream that shares delete permission on the
+ * file opened at a given offset, i.e. other process can delete
+ * the file the FileInputStream is reading. Only Windows implementation
+ * uses the native interface.
+ */
+ public static FileInputStream getShareDeleteFileInputStream(File f, long seekOffset)
+ throws IOException {
+ if (!Shell.WINDOWS) {
+ RandomAccessFile rf = new RandomAccessFile(f, "r");
+ if (seekOffset > 0) {
+ rf.seek(seekOffset);
+ }
+ return new FileInputStream(rf.getFD());
+ } else {
+ // Use Windows native interface to create a FileInputStream that
+ // shares delete permission on the file opened, and set it to the
+ // given offset.
+ //
+ FileDescriptor fd = NativeIO.Windows.createFile(
+ f.getAbsolutePath(),
+ NativeIO.Windows.GENERIC_READ,
+ NativeIO.Windows.FILE_SHARE_READ |
+ NativeIO.Windows.FILE_SHARE_WRITE |
+ NativeIO.Windows.FILE_SHARE_DELETE,
+ NativeIO.Windows.OPEN_EXISTING);
+ if (seekOffset > 0)
+ NativeIO.Windows.setFilePointer(fd, seekOffset, NativeIO.Windows.FILE_BEGIN);
+ return new FileInputStream(fd);
}
}
- private static final Map<Integer, CachedName> USER_ID_NAME_CACHE =
- new ConcurrentHashMap<Integer, CachedName>();
-
- private static final Map<Integer, CachedName> GROUP_ID_NAME_CACHE =
- new ConcurrentHashMap<Integer, CachedName>();
-
- private enum IdCache { USER, GROUP }
-
- private static String getName(IdCache domain, int id) throws IOException {
- Map<Integer, CachedName> idNameCache = (domain == IdCache.USER)
- ? USER_ID_NAME_CACHE : GROUP_ID_NAME_CACHE;
- String name;
- CachedName cachedName = idNameCache.get(id);
- long now = System.currentTimeMillis();
- if (cachedName != null && (cachedName.timestamp + cacheTimeout) > now) {
- name = cachedName.name;
+ /**
+ * Create the specified File for write access, ensuring that it does not exist.
+ * @param f the file that we want to create
+ * @param permissions we want to have on the file (if security is enabled)
+ *
+ * @throws AlreadyExistsException if the file already exists
+ * @throws IOException if any other error occurred
+ */
+ public static FileOutputStream getCreateForWriteFileOutputStream(File f, int permissions)
+ throws IOException {
+ if (!Shell.WINDOWS) {
+ // Use the native wrapper around open(2)
+ try {
+ FileDescriptor fd = NativeIO.POSIX.open(f.getAbsolutePath(),
+ NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT
+ | NativeIO.POSIX.O_EXCL, permissions);
+ return new FileOutputStream(fd);
+ } catch (NativeIOException nioe) {
+ if (nioe.getErrno() == Errno.EEXIST) {
+ throw new AlreadyExistsException(nioe);
+ }
+ throw nioe;
+ }
} else {
- name = (domain == IdCache.USER) ? getUserName(id) : getGroupName(id);
- if (LOG.isDebugEnabled()) {
- String type = (domain == IdCache.USER) ? "UserName" : "GroupName";
- LOG.debug("Got " + type + " " + name + " for ID " + id +
- " from the native implementation");
+ // Use the Windows native APIs to create equivalent FileOutputStream
+ try {
+ FileDescriptor fd = NativeIO.Windows.createFile(f.getCanonicalPath(),
+ NativeIO.Windows.GENERIC_WRITE,
+ NativeIO.Windows.FILE_SHARE_DELETE
+ | NativeIO.Windows.FILE_SHARE_READ
+ | NativeIO.Windows.FILE_SHARE_WRITE,
+ NativeIO.Windows.CREATE_NEW);
+ NativeIO.POSIX.chmod(f.getCanonicalPath(), permissions);
+ return new FileOutputStream(fd);
+ } catch (NativeIOException nioe) {
+ if (nioe.getErrorCode() == 80) {
+ // ERROR_FILE_EXISTS
+ // 80 (0x50)
+ // The file exists
+ throw new AlreadyExistsException(nioe);
+ }
+ throw nioe;
}
- cachedName = new CachedName(name, now);
- idNameCache.put(id, cachedName);
}
- return name;
}
- /**
- * Returns the file stat for a file descriptor.
- *
- * @param fd file descriptor.
- * @return the file descriptor file stat.
- * @throws IOException thrown if there was an IO error while obtaining the file stat.
- */
- public static Stat getFstat(FileDescriptor fd) throws IOException {
- Stat stat = fstat(fd);
- stat.owner = getName(IdCache.USER, stat.ownerId);
- stat.group = getName(IdCache.GROUP, stat.groupId);
- return stat;
+ private synchronized static void ensureInitialized() {
+ if (!initialized) {
+ cacheTimeout =
+ new Configuration().getLong("hadoop.security.uid.cache.secs",
+ 4*60*60) * 1000;
+ LOG.info("Initialized cache for UID to User mapping with a cache" +
+ " timeout of " + cacheTimeout/1000 + " seconds.");
+ initialized = true;
+ }
}
/**
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIOException.java Wed Mar 6 19:15:18 2013
@@ -18,20 +18,40 @@
package org.apache.hadoop.io.nativeio;
import java.io.IOException;
+import org.apache.hadoop.util.Shell;
+
/**
* An exception generated by a call to the native IO code.
*
- * These exceptions simply wrap <i>errno</i> result codes.
+ * These exceptions simply wrap <i>errno</i> result codes on Linux,
+ * or the System Error Code on Windows.
*/
public class NativeIOException extends IOException {
private static final long serialVersionUID = 1L;
private Errno errno;
+ // Java has no unsigned primitive error code. Use a signed 32-bit
+ // integer to hold the unsigned 32-bit integer.
+ private int errorCode;
+
public NativeIOException(String msg, Errno errno) {
super(msg);
this.errno = errno;
+ // Windows error code is always set to ERROR_SUCCESS on Linux,
+ // i.e. no failure on Windows
+ this.errorCode = 0;
+ }
+
+ public NativeIOException(String msg, int errorCode) {
+ super(msg);
+ this.errorCode = errorCode;
+ this.errno = Errno.UNKNOWN;
+ }
+
+ public long getErrorCode() {
+ return errorCode;
}
public Errno getErrno() {
@@ -40,8 +60,10 @@ public class NativeIOException extends I
@Override
public String toString() {
- return errno.toString() + ": " + super.getMessage();
+ if (Shell.WINDOWS)
+ return errorCode + ": " + super.getMessage();
+ else
+ return errno.toString() + ": " + super.getMessage();
}
}
-
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics/MetricsServlet.java Wed Mar 6 19:15:18 2013
@@ -140,10 +140,12 @@ public class MetricsServlet extends Http
*/
void printMap(PrintWriter out, Map<String, Map<String, List<TagsMetricsPair>>> map) {
for (Map.Entry<String, Map<String, List<TagsMetricsPair>>> context : map.entrySet()) {
- out.println(context.getKey());
+ out.print(context.getKey());
+ out.print("\n");
for (Map.Entry<String, List<TagsMetricsPair>> record : context.getValue().entrySet()) {
indent(out, 1);
- out.println(record.getKey());
+ out.print(record.getKey());
+ out.print("\n");
for (TagsMetricsPair pair : record.getValue()) {
indent(out, 2);
// Prints tag values in the form "{key=value,key=value}:"
@@ -159,7 +161,7 @@ public class MetricsServlet extends Http
out.print("=");
out.print(tagValue.getValue().toString());
}
- out.println("}:");
+ out.print("}:\n");
// Now print metric values, one per line
for (Map.Entry<String, Number> metricValue :
@@ -167,7 +169,8 @@ public class MetricsServlet extends Http
indent(out, 3);
out.print(metricValue.getKey());
out.print("=");
- out.println(metricValue.getValue().toString());
+ out.print(metricValue.getValue().toString());
+ out.print("\n");
}
}
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java Wed Mar 6 19:15:18 2013
@@ -86,7 +86,8 @@ public class ShellBasedUnixGroupsMapping
LOG.warn("got exception trying to get groups for user " + user, e);
}
- StringTokenizer tokenizer = new StringTokenizer(result);
+ StringTokenizer tokenizer =
+ new StringTokenizer(result, Shell.TOKEN_SEPARATOR_REGEX);
List<String> groups = new LinkedList<String>();
while (tokenizer.hasMoreTokens()) {
groups.add(tokenizer.nextToken());
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java Wed Mar 6 19:15:18 2013
@@ -60,7 +60,7 @@ class NativeCrc32 {
fileName, basePos);
}
- private static native void nativeVerifyChunkedSums(
+ private static native void nativeVerifyChunkedSums(
int bytesPerSum, int checksumType,
ByteBuffer sums, int sumsOffset,
ByteBuffer data, int dataOffset, int dataLength,
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PlatformName.java Wed Mar 6 19:15:18 2013
@@ -32,9 +32,10 @@ public class PlatformName {
* The complete platform 'name' to identify the platform as
* per the java-vm.
*/
- private static final String platformName = System.getProperty("os.name") + "-" +
- System.getProperty("os.arch") + "-" +
- System.getProperty("sun.arch.data.model");
+ private static final String platformName =
+ (Shell.WINDOWS ? System.getenv("os") : System.getProperty("os.name"))
+ + "-" + System.getProperty("os.arch")
+ + "-" + System.getProperty("sun.arch.data.model");
/**
* Get the complete platform as per the java-vm.
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java Wed Mar 6 19:15:18 2013
@@ -21,6 +21,7 @@ import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
+import java.util.Arrays;
import java.util.Map;
import java.util.Timer;
import java.util.TimerTask;
@@ -44,46 +45,208 @@ abstract public class Shell {
public static final Log LOG = LogFactory.getLog(Shell.class);
+ private static boolean IS_JAVA7_OR_ABOVE =
+ System.getProperty("java.version").substring(0, 3).compareTo("1.7") >= 0;
+
+ public static boolean isJava7OrAbove() {
+ return IS_JAVA7_OR_ABOVE;
+ }
+
/** a Unix command to get the current user's name */
public final static String USER_NAME_COMMAND = "whoami";
+
+ /** Windows CreateProcess synchronization object */
+ public static final Object WindowsProcessLaunchLock = new Object();
+
/** a Unix command to get the current user's groups list */
public static String[] getGroupsCommand() {
- return new String[]{"bash", "-c", "groups"};
+ return (WINDOWS)? new String[]{"cmd", "/c", "groups"}
+ : new String[]{"bash", "-c", "groups"};
}
+
/** a Unix command to get a given user's groups list */
public static String[] getGroupsForUserCommand(final String user) {
//'groups username' command return is non-consistent across different unixes
- return new String [] {"bash", "-c", "id -Gn " + user};
+ return (WINDOWS)? new String[] { WINUTILS, "groups", "-F", "\"" + user + "\""}
+ : new String [] {"bash", "-c", "id -Gn " + user};
}
+
/** a Unix command to get a given netgroup's user list */
public static String[] getUsersForNetgroupCommand(final String netgroup) {
//'groups username' command return is non-consistent across different unixes
- return new String [] {"bash", "-c", "getent netgroup " + netgroup};
+ return (WINDOWS)? new String [] {"cmd", "/c", "getent netgroup " + netgroup}
+ : new String [] {"bash", "-c", "getent netgroup " + netgroup};
+ }
+
+ /** Return a command to get permission information. */
+ public static String[] getGetPermissionCommand() {
+ return (WINDOWS) ? new String[] { WINUTILS, "ls", "-F" }
+ : new String[] { "/bin/ls", "-ld" };
+ }
+
+ /** Return a command to set permission */
+ public static String[] getSetPermissionCommand(String perm, boolean recursive) {
+ if (recursive) {
+ return (WINDOWS) ? new String[] { WINUTILS, "chmod", "-R", perm }
+ : new String[] { "chmod", "-R", perm };
+ } else {
+ return (WINDOWS) ? new String[] { WINUTILS, "chmod", perm }
+ : new String[] { "chmod", perm };
+ }
+ }
+
+ /**
+ * Return a command to set permission for specific file.
+ *
+ * @param perm String permission to set
+ * @param recursive boolean true to apply to all sub-directories recursively
+ * @param file String file to set
+ * @return String[] containing command and arguments
+ */
+ public static String[] getSetPermissionCommand(String perm, boolean recursive,
+ String file) {
+ String[] baseCmd = getSetPermissionCommand(perm, recursive);
+ String[] cmdWithFile = Arrays.copyOf(baseCmd, baseCmd.length + 1);
+ cmdWithFile[cmdWithFile.length - 1] = file;
+ return cmdWithFile;
+ }
+
+ /** Return a command to set owner */
+ public static String[] getSetOwnerCommand(String owner) {
+ return (WINDOWS) ? new String[] { WINUTILS, "chown", "\"" + owner + "\"" }
+ : new String[] { "chown", owner };
+ }
+
+ /** Return a command to create symbolic links */
+ public static String[] getSymlinkCommand(String target, String link) {
+ return WINDOWS ? new String[] { WINUTILS, "symlink", link, target }
+ : new String[] { "ln", "-s", target, link };
}
+
/** a Unix command to set permission */
public static final String SET_PERMISSION_COMMAND = "chmod";
/** a Unix command to set owner */
public static final String SET_OWNER_COMMAND = "chown";
+
+ /** a Unix command to set the change user's groups list */
public static final String SET_GROUP_COMMAND = "chgrp";
/** a Unix command to create a link */
public static final String LINK_COMMAND = "ln";
/** a Unix command to get a link target */
public static final String READ_LINK_COMMAND = "readlink";
- /** Return a Unix command to get permission information. */
- public static String[] getGET_PERMISSION_COMMAND() {
- //force /bin/ls, except on windows.
- return new String[] {(WINDOWS ? "ls" : "/bin/ls"), "-ld"};
- }
/**Time after which the executing script would be timedout*/
protected long timeOutInterval = 0L;
/** If or not script timed out*/
private AtomicBoolean timedOut;
+
+ /** Centralized logic to discover and validate the sanity of the Hadoop
+ * home directory. Returns either NULL or a directory that exists and
+ * was specified via either -Dhadoop.home.dir or the HADOOP_HOME ENV
+ * variable. This does a lot of work so it should only be called
+ * privately for initialization once per process.
+ **/
+ private static String checkHadoopHome() {
+
+ // first check the Dflag hadoop.home.dir with JVM scope
+ String home = System.getProperty("hadoop.home.dir");
+
+ // fall back to the system/user-global env variable
+ if (home == null) {
+ home = System.getenv("HADOOP_HOME");
+ }
+
+ try {
+ // couldn't find either setting for hadoop's home directory
+ if (home == null) {
+ throw new IOException("HADOOP_HOME or hadoop.home.dir are not set.");
+ }
+
+ if (home.startsWith("\"") && home.endsWith("\"")) {
+ home = home.substring(1, home.length()-1);
+ }
+
+ // check that the home setting is actually a directory that exists
+ File homedir = new File(home);
+ if (!homedir.isAbsolute() || !homedir.exists() || !homedir.isDirectory()) {
+ throw new IOException("Hadoop home directory " + homedir
+ + " does not exist, is not a directory, or is not an absolute path.");
+ }
+
+ home = homedir.getCanonicalPath();
+
+ } catch (IOException ioe) {
+ LOG.error("Failed to detect a valid hadoop home directory", ioe);
+ home = null;
+ }
+
+ return home;
+ }
+ private static String HADOOP_HOME_DIR = checkHadoopHome();
+
+ // Public getter, throws an exception if HADOOP_HOME failed validation
+ // checks and is being referenced downstream.
+ public static final String getHadoopHome() throws IOException {
+ if (HADOOP_HOME_DIR == null) {
+ throw new IOException("Misconfigured HADOOP_HOME cannot be referenced.");
+ }
+
+ return HADOOP_HOME_DIR;
+ }
+
+ /** fully qualify the path to a binary that should be in a known hadoop
+ * bin location. This is primarily useful for disambiguating call-outs
+ * to executable sub-components of Hadoop to avoid clashes with other
+ * executables that may be in the path. Caveat: this call doesn't
+ * just format the path to the bin directory. It also checks for file
+ * existence of the composed path. The output of this call should be
+ * cached by callers.
+ * */
+ public static final String getQualifiedBinPath(String executable)
+ throws IOException {
+ // construct hadoop bin path to the specified executable
+ String fullExeName = HADOOP_HOME_DIR + File.separator + "bin"
+ + File.separator + executable;
+
+ File exeFile = new File(fullExeName);
+ if (!exeFile.exists()) {
+ throw new IOException("Could not locate executable " + fullExeName
+ + " in the Hadoop binaries.");
+ }
+
+ return exeFile.getCanonicalPath();
+ }
+
/** Set to true on Windows platforms */
public static final boolean WINDOWS /* borrowed from Path.WINDOWS */
= System.getProperty("os.name").startsWith("Windows");
+
+ public static final boolean LINUX
+ = System.getProperty("os.name").startsWith("Linux");
+ /** a Windows utility to emulate Unix commands */
+ public static final String WINUTILS = getWinUtilsPath();
+
+ public static final String getWinUtilsPath() {
+ String winUtilsPath = null;
+
+ try {
+ if (WINDOWS) {
+ winUtilsPath = getQualifiedBinPath("winutils.exe");
+ }
+ } catch (IOException ioe) {
+ LOG.error("Failed to locate the winutils binary in the hadoop binary path",
+ ioe);
+ }
+
+ return winUtilsPath;
+ }
+
+ /** Token separator regex used to parse Shell tool outputs */
+ public static final String TOKEN_SEPARATOR_REGEX
+ = WINDOWS ? "[|\n\r]" : "[ \t\n\r\f]";
+
private long interval; // refresh interval in msec
private long lastTime; // last time the command was performed
private Map<String, String> environment; // env for the command execution
@@ -144,7 +307,19 @@ abstract public class Shell {
builder.directory(this.dir);
}
- process = builder.start();
+ if (Shell.WINDOWS) {
+ synchronized (WindowsProcessLaunchLock) {
+ // To workaround the race condition issue with child processes
+ // inheriting unintended handles during process launch that can
+ // lead to hangs on reading output and error streams, we
+ // serialize process creation. More info available at:
+ // http://support.microsoft.com/kb/315939
+ process = builder.start();
+ }
+ } else {
+ process = builder.start();
+ }
+
if (timeOutInterval > 0) {
timeOutTimer = new Timer("Shell command timeout");
timeoutTimerTask = new ShellTimeoutTimerTask(
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/StringUtils.java Wed Mar 6 19:15:18 2013
@@ -30,12 +30,16 @@ import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
+import java.util.Map;
import java.util.StringTokenizer;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.util.Shell;
import com.google.common.net.InetAddresses;
@@ -52,6 +56,27 @@ public class StringUtils {
public static final int SHUTDOWN_HOOK_PRIORITY = 0;
/**
+ * Shell environment variables: $ followed by one letter or _ followed by
+ * multiple letters, numbers, or underscores. The group captures the
+ * environment variable name without the leading $.
+ */
+ public static final Pattern SHELL_ENV_VAR_PATTERN =
+ Pattern.compile("\\$([A-Za-z_]{1}[A-Za-z0-9_]*)");
+
+ /**
+ * Windows environment variables: surrounded by %. The group captures the
+ * environment variable name without the leading and trailing %.
+ */
+ public static final Pattern WIN_ENV_VAR_PATTERN = Pattern.compile("%(.*?)%");
+
+ /**
+ * Regular expression that matches and captures environment variable names
+ * according to platform-specific rules.
+ */
+ public static final Pattern ENV_VAR_PATTERN = Shell.WINDOWS ?
+ WIN_ENV_VAR_PATTERN : SHELL_ENV_VAR_PATTERN;
+
+ /**
* Make a string representation of the exception.
* @param e The exception to stringify
* @return A string with exception name and call stack.
@@ -792,6 +817,28 @@ public class StringUtils {
}
/**
+ * Concatenates strings, using a separator.
+ *
+ * @param separator to join with
+ * @param strings to join
+ * @return the joined string
+ */
+ public static String join(CharSequence separator, String[] strings) {
+ // Ideally we don't have to duplicate the code here if array is iterable.
+ StringBuilder sb = new StringBuilder();
+ boolean first = true;
+ for (String s : strings) {
+ if (first) {
+ first = false;
+ } else {
+ sb.append(separator);
+ }
+ sb.append(s);
+ }
+ return sb.toString();
+ }
+
+ /**
* Convert SOME_STUFF to SomeStuff
*
* @param s input string
@@ -806,4 +853,37 @@ public class StringUtils {
return sb.toString();
}
+
+ /**
+ * Matches a template string against a pattern, replaces matched tokens with
+ * the supplied replacements, and returns the result. The regular expression
+ * must use a capturing group. The value of the first capturing group is used
+ * to look up the replacement. If no replacement is found for the token, then
+ * it is replaced with the empty string.
+ *
+ * For example, assume template is "%foo%_%bar%_%baz%", pattern is "%(.*?)%",
+ * and replacements contains 2 entries, mapping "foo" to "zoo" and "baz" to
+ * "zaz". The result returned would be "zoo__zaz".
+ *
+ * @param template String template to receive replacements
+ * @param pattern Pattern to match for identifying tokens, must use a capturing
+ * group
+ * @param replacements Map<String, String> mapping tokens identified by the
+ * capturing group to their replacement values
+ * @return String template with replacements
+ */
+ public static String replaceTokens(String template, Pattern pattern,
+ Map<String, String> replacements) {
+ StringBuffer sb = new StringBuffer();
+ Matcher matcher = pattern.matcher(template);
+ while (matcher.find()) {
+ String replacement = replacements.get(matcher.group(1));
+ if (replacement == null) {
+ replacement = "";
+ }
+ matcher.appendReplacement(sb, Matcher.quoteReplacement(replacement));
+ }
+ matcher.appendTail(sb);
+ return sb.toString();
+ }
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/overview.html
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/overview.html?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/overview.html (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/overview.html Wed Mar 6 19:15:18 2013
@@ -60,9 +60,7 @@ that process vast amounts of data. Here'
Hadoop was been demonstrated on GNU/Linux clusters with 2000 nodes.
</li>
<li>
- Win32 is supported as a <i>development</i> platform. Distributed operation
- has not been well tested on Win32, so this is not a <i>production</i>
- platform.
+ Windows is also a supported platform.
</li>
</ul>
@@ -84,15 +82,6 @@ that process vast amounts of data. Here'
</li>
</ol>
-<h4>Additional requirements for Windows</h4>
-
-<ol>
- <li>
- <a href="http://www.cygwin.com/">Cygwin</a> - Required for shell support in
- addition to the required software above.
- </li>
-</ol>
-
<h3>Installing Required Software</h3>
<p>If your platform does not have the required software listed above, you
@@ -104,13 +93,6 @@ $ sudo apt-get install ssh<br>
$ sudo apt-get install rsync<br>
</pre></blockquote></p>
-<p>On Windows, if you did not install the required software when you
-installed cygwin, start the cygwin installer and select the packages:</p>
-<ul>
- <li>openssh - the "Net" category</li>
- <li>rsync - the "Net" category</li>
-</ul>
-
<h2>Getting Started</h2>
<p>First, you need to get a copy of the Hadoop code.</p>