You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2014/08/21 07:22:14 UTC
svn commit: r1619293 - in /hadoop/common/branches/HDFS-6584: ./
hadoop-assemblies/src/main/resources/assemblies/ hadoop-project-dist/
hadoop-project/ hadoop-project/src/site/
hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/
hadoop-tool...
Author: szetszwo
Date: Thu Aug 21 05:22:10 2014
New Revision: 1619293
URL: http://svn.apache.org/r1619293
Log:
Merge r1609845 through r1619277 from trunk.
Added:
hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpWithRawXAttrs.java
- copied unchanged from r1619277, hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpWithRawXAttrs.java
hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/DistCpTestUtils.java
- copied unchanged from r1619277, hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/DistCpTestUtils.java
Modified:
hadoop/common/branches/HDFS-6584/ (props changed)
hadoop/common/branches/HDFS-6584/BUILDING.txt
hadoop/common/branches/HDFS-6584/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
hadoop/common/branches/HDFS-6584/hadoop-project-dist/pom.xml
hadoop/common/branches/HDFS-6584/hadoop-project/pom.xml
hadoop/common/branches/HDFS-6584/hadoop-project/src/site/site.xml
hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java
hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java
hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpWithXAttrs.java
hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java
hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java
Propchange: hadoop/common/branches/HDFS-6584/
------------------------------------------------------------------------------
Merged /hadoop/common/branches/fs-encryption:r1594376-1619194
Merged /hadoop/common/trunk:r1618764-1619277
Modified: hadoop/common/branches/HDFS-6584/BUILDING.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/BUILDING.txt?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/BUILDING.txt (original)
+++ hadoop/common/branches/HDFS-6584/BUILDING.txt Thu Aug 21 05:22:10 2014
@@ -81,6 +81,27 @@ Maven build goals:
the final tar file. This option requires that -Dsnappy.lib is also given,
and it ignores the -Dsnappy.prefix option.
+ OpenSSL build options:
+
+ OpenSSL includes a crypto library that can be utilized by the native code.
+ It is currently an optional component, meaning that Hadoop can be built with
+ or without this dependency.
+
+ * Use -Drequire.openssl to fail the build if libcrypto.so is not found.
+ If this option is not specified and the openssl library is missing,
+ we silently build a version of libhadoop.so that cannot make use of
+ openssl. This option is recommended if you plan on making use of openssl
+ and want to get more repeatable builds.
+ * Use -Dopenssl.prefix to specify a nonstandard location for the libcrypto
+ header files and library files. You do not need this option if you have
+ installed openssl using a package manager.
+ * Use -Dopenssl.lib to specify a nonstandard location for the libcrypto library
+ files. Similarly to openssl.prefix, you do not need this option if you have
+ installed openssl using a package manager.
+ * Use -Dbundle.openssl to copy the contents of the openssl.lib directory into
+ the final tar file. This option requires that -Dopenssl.lib is also given,
+ and it ignores the -Dopenssl.prefix option.
+
Tests options:
* Use -DskipTests to skip tests when running the following Maven goals:
Modified: hadoop/common/branches/HDFS-6584/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml Thu Aug 21 05:22:10 2014
@@ -29,6 +29,7 @@
<exclude>*-config.cmd</exclude>
<exclude>start-*.cmd</exclude>
<exclude>stop-*.cmd</exclude>
+ <exclude>hadoop-layout.sh.example</exclude>
</excludes>
<fileMode>0755</fileMode>
</fileSet>
@@ -42,6 +43,8 @@
<includes>
<include>*-config.sh</include>
<include>*-config.cmd</include>
+ <include>*-functions.sh</include>
+ <include>hadoop-layout.sh.example</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
@@ -57,6 +60,10 @@
<exclude>hadoop.cmd</exclude>
<exclude>hdfs.cmd</exclude>
<exclude>hadoop-config.cmd</exclude>
+ <exclude>hadoop-functions.sh</exclude>
+ <exclude>hadoop-layout.sh.example</exclude>
+ <exclude>hdfs-config.cmd</exclude>
+ <exclude>hdfs-config.sh</exclude>
</excludes>
<fileMode>0755</fileMode>
</fileSet>
Modified: hadoop/common/branches/HDFS-6584/hadoop-project-dist/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-project-dist/pom.xml?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-project-dist/pom.xml (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-project-dist/pom.xml Thu Aug 21 05:22:10 2014
@@ -41,6 +41,8 @@
<hadoop.component>UNDEF</hadoop.component>
<bundle.snappy>false</bundle.snappy>
<bundle.snappy.in.bin>false</bundle.snappy.in.bin>
+ <bundle.openssl>false</bundle.openssl>
+ <bundle.openssl.in.bin>false</bundle.openssl.in.bin>
</properties>
<dependencies>
@@ -351,6 +353,10 @@
cd "${snappy.lib}"
$$TAR *snappy* | (cd $${TARGET_DIR}/; $$UNTAR)
fi
+ if [ "${bundle.openssl}" = "true" ] ; then
+ cd "${openssl.lib}"
+ $$TAR *crypto* | (cd $${TARGET_DIR}/; $$UNTAR)
+ fi
fi
BIN_DIR="${BUILD_DIR}/bin"
if [ -d $${BIN_DIR} ] ; then
@@ -364,6 +370,12 @@
$$TAR *snappy* | (cd $${TARGET_BIN_DIR}/; $$UNTAR)
fi
fi
+ if [ "${bundle.openssl.in.bin}" = "true" ] ; then
+ if [ "${bundle.openssl}" = "true" ] ; then
+ cd "${openssl.lib}"
+ $$TAR *crypto* | (cd $${TARGET_BIN_DIR}/; $$UNTAR)
+ fi
+ fi
fi
</echo>
<exec executable="sh" dir="${project.build.directory}" failonerror="true">
Modified: hadoop/common/branches/HDFS-6584/hadoop-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-project/pom.xml?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-project/pom.xml Thu Aug 21 05:22:10 2014
@@ -1048,6 +1048,7 @@
<!-- attempt to open a file at this path. -->
<java.security.egd>file:/dev/urandom</java.security.egd>
<bundle.snappy.in.bin>true</bundle.snappy.in.bin>
+ <bundle.openssl.in.bin>true</bundle.openssl.in.bin>
</properties>
<build>
<plugins>
@@ -1058,6 +1059,7 @@
<environmentVariables>
<!-- Specify where to look for the native DLL on Windows -->
<PATH>${env.PATH};${hadoop.common.build.dir}/bin;${snappy.lib}</PATH>
+ <PATH>${env.PATH};${hadoop.common.build.dir}/bin;${openssl.lib}</PATH>
</environmentVariables>
</configuration>
</plugin>
Modified: hadoop/common/branches/HDFS-6584/hadoop-project/src/site/site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-project/src/site/site.xml?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-project/src/site/site.xml (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-project/src/site/site.xml Thu Aug 21 05:22:10 2014
@@ -90,6 +90,7 @@
<item name="HDFS NFS Gateway" href="hadoop-project-dist/hadoop-hdfs/HdfsNfsGateway.html"/>
<item name="HDFS Rolling Upgrade" href="hadoop-project-dist/hadoop-hdfs/HdfsRollingUpgrade.html"/>
<item name="Extended Attributes" href="hadoop-project-dist/hadoop-hdfs/ExtendedAttributes.html"/>
+ <item name="Transparent Encryption" href="hadoop-project-dist/hadoop-hdfs/TransparentEncryption.html"/>
<item name="HDFS Support for Multihoming" href="hadoop-project-dist/hadoop-hdfs/HdfsMultihoming.html"/>
</menu>
Modified: hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java Thu Aug 21 05:22:10 2014
@@ -42,6 +42,8 @@ public class DistCpConstants {
public static final String CONF_LABEL_LOG_PATH = "distcp.log.path";
public static final String CONF_LABEL_IGNORE_FAILURES = "distcp.ignore.failures";
public static final String CONF_LABEL_PRESERVE_STATUS = "distcp.preserve.status";
+ public static final String CONF_LABEL_PRESERVE_RAWXATTRS =
+ "distcp.preserve.rawxattrs";
public static final String CONF_LABEL_SYNC_FOLDERS = "distcp.sync.folders";
public static final String CONF_LABEL_DELETE_MISSING = "distcp.delete.missing.source";
public static final String CONF_LABEL_SSL_CONF = "distcp.keystore.resource";
@@ -128,4 +130,8 @@ public class DistCpConstants {
public static final int MIN_RECORDS_PER_CHUNK_DEFAULT = 5;
public static final int SPLIT_RATIO_DEFAULT = 2;
+ /**
+ * Value of reserved raw HDFS directory when copying raw.* xattrs.
+ */
+ static final String HDFS_RESERVED_RAW_DIRECTORY_NAME = "/.reserved/raw";
}
Modified: hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java Thu Aug 21 05:22:10 2014
@@ -48,7 +48,11 @@ public enum DistCpOptionSwitch {
new Option("p", true, "preserve status (rbugpcax)(replication, " +
"block-size, user, group, permission, checksum-type, ACL, XATTR). " +
"If -p is specified with no <arg>, then preserves replication, " +
- "block size, user, group, permission and checksum type.")),
+ "block size, user, group, permission and checksum type." +
+ "raw.* xattrs are preserved when both the source and destination " +
+ "paths are in the /.reserved/raw hierarchy (HDFS only). raw.* xattr" +
+ "preservation is independent of the -p flag." +
+ "Refer to the DistCp documentation for more details.")),
/**
* Update target location by copying only files that are missing
Modified: hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptions.java Thu Aug 21 05:22:10 2014
@@ -52,6 +52,8 @@ public class DistCpOptions {
private EnumSet<FileAttribute> preserveStatus = EnumSet.noneOf(FileAttribute.class);
+ private boolean preserveRawXattrs;
+
private Path atomicWorkPath;
private Path logPath;
@@ -123,6 +125,7 @@ public class DistCpOptions {
this.sslConfigurationFile = that.getSslConfigurationFile();
this.copyStrategy = that.copyStrategy;
this.preserveStatus = that.preserveStatus;
+ this.preserveRawXattrs = that.preserveRawXattrs;
this.atomicWorkPath = that.getAtomicWorkPath();
this.logPath = that.getLogPath();
this.sourceFileListing = that.getSourceFileListing();
@@ -345,7 +348,7 @@ public class DistCpOptions {
}
/**
- * Checks if the input attibute should be preserved or not
+ * Checks if the input attribute should be preserved or not
*
* @param attribute - Attribute to check
* @return True if attribute should be preserved, false otherwise
@@ -369,6 +372,21 @@ public class DistCpOptions {
preserveStatus.add(fileAttribute);
}
+ /**
+ * Return true if raw.* xattrs should be preserved.
+ * @return true if raw.* xattrs should be preserved.
+ */
+ public boolean shouldPreserveRawXattrs() {
+ return preserveRawXattrs;
+ }
+
+ /**
+ * Indicate that raw.* xattrs should be preserved
+ */
+ public void preserveRawXattrs() {
+ preserveRawXattrs = true;
+ }
+
/** Get work path for atomic commit. If null, the work
* path would be parentOf(targetPath) + "/._WIP_" + nameOf(targetPath)
*
@@ -565,6 +583,7 @@ public class DistCpOptions {
", sourcePaths=" + sourcePaths +
", targetPath=" + targetPath +
", targetPathExists=" + targetPathExists +
+ ", preserveRawXattrs=" + preserveRawXattrs +
'}';
}
Modified: hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/SimpleCopyListing.java Thu Aug 21 05:22:10 2014
@@ -37,6 +37,9 @@ import com.google.common.annotations.Vis
import java.io.*;
import java.util.Stack;
+import static org.apache.hadoop.tools.DistCpConstants
+ .HDFS_RESERVED_RAW_DIRECTORY_NAME;
+
/**
* The SimpleCopyListing is responsible for making the exhaustive list of
* all files/directories under its specified list of input-paths.
@@ -67,6 +70,10 @@ public class SimpleCopyListing extends C
Path targetPath = options.getTargetPath();
FileSystem targetFS = targetPath.getFileSystem(getConf());
boolean targetIsFile = targetFS.isFile(targetPath);
+ targetPath = targetFS.makeQualified(targetPath);
+ final boolean targetIsReservedRaw =
+ Path.getPathWithoutSchemeAndAuthority(targetPath).toString().
+ startsWith(HDFS_RESERVED_RAW_DIRECTORY_NAME);
//If target is a file, then source has to be single file
if (targetIsFile) {
@@ -93,6 +100,27 @@ public class SimpleCopyListing extends C
if (!fs.exists(path)) {
throw new InvalidInputException(path + " doesn't exist");
}
+ if (Path.getPathWithoutSchemeAndAuthority(path).toString().
+ startsWith(HDFS_RESERVED_RAW_DIRECTORY_NAME)) {
+ if (!targetIsReservedRaw) {
+ final String msg = "The source path '" + path + "' starts with " +
+ HDFS_RESERVED_RAW_DIRECTORY_NAME + " but the target path '" +
+ targetPath + "' does not. Either all or none of the paths must " +
+ "have this prefix.";
+ throw new InvalidInputException(msg);
+ }
+ } else if (targetIsReservedRaw) {
+ final String msg = "The target path '" + targetPath + "' starts with " +
+ HDFS_RESERVED_RAW_DIRECTORY_NAME + " but the source path '" +
+ path + "' does not. Either all or none of the paths must " +
+ "have this prefix.";
+ throw new InvalidInputException(msg);
+ }
+ }
+
+ if (targetIsReservedRaw) {
+ options.preserveRawXattrs();
+ getConf().setBoolean(DistCpConstants.CONF_LABEL_PRESERVE_RAWXATTRS, true);
}
/* This is requires to allow map tasks to access each of the source
@@ -135,6 +163,9 @@ public class SimpleCopyListing extends C
try {
for (Path path: options.getSourcePaths()) {
FileSystem sourceFS = path.getFileSystem(getConf());
+ final boolean preserveAcls = options.shouldPreserve(FileAttribute.ACL);
+ final boolean preserveXAttrs = options.shouldPreserve(FileAttribute.XATTR);
+ final boolean preserveRawXAttrs = options.shouldPreserveRawXattrs();
path = makeQualified(path);
FileStatus rootStatus = sourceFS.getFileStatus(path);
@@ -145,8 +176,7 @@ public class SimpleCopyListing extends C
if (!explore || rootStatus.isDirectory()) {
CopyListingFileStatus rootCopyListingStatus =
DistCpUtils.toCopyListingFileStatus(sourceFS, rootStatus,
- options.shouldPreserve(FileAttribute.ACL),
- options.shouldPreserve(FileAttribute.XATTR));
+ preserveAcls, preserveXAttrs, preserveRawXAttrs);
writeToFileListingRoot(fileListWriter, rootCopyListingStatus,
sourcePathRoot, options);
}
@@ -157,9 +187,9 @@ public class SimpleCopyListing extends C
}
CopyListingFileStatus sourceCopyListingStatus =
DistCpUtils.toCopyListingFileStatus(sourceFS, sourceStatus,
- options.shouldPreserve(FileAttribute.ACL) &&
- sourceStatus.isDirectory(), options.shouldPreserve(
- FileAttribute.XATTR) && sourceStatus.isDirectory());
+ preserveAcls && sourceStatus.isDirectory(),
+ preserveXAttrs && sourceStatus.isDirectory(),
+ preserveRawXAttrs && sourceStatus.isDirectory());
writeToFileListing(fileListWriter, sourceCopyListingStatus,
sourcePathRoot, options);
@@ -261,6 +291,9 @@ public class SimpleCopyListing extends C
DistCpOptions options)
throws IOException {
FileSystem sourceFS = sourcePathRoot.getFileSystem(getConf());
+ final boolean preserveAcls = options.shouldPreserve(FileAttribute.ACL);
+ final boolean preserveXAttrs = options.shouldPreserve(FileAttribute.XATTR);
+ final boolean preserveRawXattrs = options.shouldPreserveRawXattrs();
Stack<FileStatus> pathStack = new Stack<FileStatus>();
pathStack.push(sourceStatus);
@@ -271,8 +304,9 @@ public class SimpleCopyListing extends C
+ sourceStatus.getPath() + " for copy.");
CopyListingFileStatus childCopyListingStatus =
DistCpUtils.toCopyListingFileStatus(sourceFS, child,
- options.shouldPreserve(FileAttribute.ACL) && child.isDirectory(),
- options.shouldPreserve(FileAttribute.XATTR) && child.isDirectory());
+ preserveAcls && child.isDirectory(),
+ preserveXAttrs && child.isDirectory(),
+ preserveRawXattrs && child.isDirectory());
writeToFileListing(fileListWriter, childCopyListingStatus,
sourcePathRoot, options);
if (isDirectoryAndNotEmpty(sourceFS, child)) {
Modified: hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java Thu Aug 21 05:22:10 2014
@@ -83,7 +83,9 @@ public class CopyCommitter extends FileO
cleanupTempFiles(jobContext);
String attributes = conf.get(DistCpConstants.CONF_LABEL_PRESERVE_STATUS);
- if (attributes != null && !attributes.isEmpty()) {
+ final boolean preserveRawXattrs =
+ conf.getBoolean(DistCpConstants.CONF_LABEL_PRESERVE_RAWXATTRS, false);
+ if ((attributes != null && !attributes.isEmpty()) || preserveRawXattrs) {
preserveFileAttributesForDirectories(conf);
}
@@ -167,6 +169,8 @@ public class CopyCommitter extends FileO
LOG.info("About to preserve attributes: " + attrSymbols);
EnumSet<FileAttribute> attributes = DistCpUtils.unpackAttributes(attrSymbols);
+ final boolean preserveRawXattrs =
+ conf.getBoolean(DistCpConstants.CONF_LABEL_PRESERVE_RAWXATTRS, false);
Path sourceListing = new Path(conf.get(DistCpConstants.CONF_LABEL_LISTING_FILE_PATH));
FileSystem clusterFS = sourceListing.getFileSystem(conf);
@@ -194,7 +198,8 @@ public class CopyCommitter extends FileO
if (targetRoot.equals(targetFile) && syncOrOverwrite) continue;
FileSystem targetFS = targetFile.getFileSystem(conf);
- DistCpUtils.preserve(targetFS, targetFile, srcFileStatus, attributes);
+ DistCpUtils.preserve(targetFS, targetFile, srcFileStatus, attributes,
+ preserveRawXattrs);
taskAttemptContext.progress();
taskAttemptContext.setStatus("Preserving status on directory entries. [" +
Modified: hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java Thu Aug 21 05:22:10 2014
@@ -200,6 +200,8 @@ public class CopyMapper extends Mapper<T
EnumSet<DistCpOptions.FileAttribute> fileAttributes
= getFileAttributeSettings(context);
+ final boolean preserveRawXattrs = context.getConfiguration().getBoolean(
+ DistCpConstants.CONF_LABEL_PRESERVE_RAWXATTRS, false);
final String description = "Copying " + sourcePath + " to " + target;
context.setStatus(description);
@@ -211,10 +213,12 @@ public class CopyMapper extends Mapper<T
FileSystem sourceFS;
try {
sourceFS = sourcePath.getFileSystem(conf);
+ final boolean preserveXAttrs =
+ fileAttributes.contains(FileAttribute.XATTR);
sourceCurrStatus = DistCpUtils.toCopyListingFileStatus(sourceFS,
sourceFS.getFileStatus(sourcePath),
fileAttributes.contains(FileAttribute.ACL),
- fileAttributes.contains(FileAttribute.XATTR));
+ preserveXAttrs, preserveRawXattrs);
} catch (FileNotFoundException e) {
throw new IOException(new RetriableFileCopyCommand.CopyReadException(e));
}
@@ -249,8 +253,8 @@ public class CopyMapper extends Mapper<T
action, fileAttributes);
}
- DistCpUtils.preserve(target.getFileSystem(conf), target,
- sourceCurrStatus, fileAttributes);
+ DistCpUtils.preserve(target.getFileSystem(conf), target, sourceCurrStatus,
+ fileAttributes, preserveRawXattrs);
} catch (IOException exception) {
handleFailures(exception, sourceFileStatus, target, context);
}
Modified: hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java Thu Aug 21 05:22:10 2014
@@ -18,6 +18,7 @@
package org.apache.hadoop.tools.util;
+import com.google.common.collect.Maps;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@@ -25,6 +26,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileChecksum;
+import org.apache.hadoop.fs.XAttr;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclUtil;
import org.apache.hadoop.fs.permission.FsPermission;
@@ -151,7 +153,7 @@ public class DistCpUtils {
* @return - String containing first letters of each attribute to preserve
*/
public static String packAttributes(EnumSet<FileAttribute> attributes) {
- StringBuffer buffer = new StringBuffer(5);
+ StringBuffer buffer = new StringBuffer(FileAttribute.values().length);
int len = 0;
for (FileAttribute attribute : attributes) {
buffer.append(attribute.name().charAt(0));
@@ -186,13 +188,15 @@ public class DistCpUtils {
* @param targetFS - File system
* @param path - Path that needs to preserve original file status
* @param srcFileStatus - Original file status
- * @param attributes - Attribute set that need to be preserved
+ * @param attributes - Attribute set that needs to be preserved
+ * @param preserveRawXattrs if true, raw.* xattrs should be preserved
* @throws IOException - Exception if any (particularly relating to group/owner
* change or any transient error)
*/
public static void preserve(FileSystem targetFS, Path path,
CopyListingFileStatus srcFileStatus,
- EnumSet<FileAttribute> attributes) throws IOException {
+ EnumSet<FileAttribute> attributes,
+ boolean preserveRawXattrs) throws IOException {
FileStatus targetFileStatus = targetFS.getFileStatus(path);
String group = targetFileStatus.getGroup();
@@ -214,15 +218,20 @@ public class DistCpUtils {
!srcFileStatus.getPermission().equals(targetFileStatus.getPermission())) {
targetFS.setPermission(path, srcFileStatus.getPermission());
}
-
- if (attributes.contains(FileAttribute.XATTR)) {
+
+ final boolean preserveXAttrs = attributes.contains(FileAttribute.XATTR);
+ if (preserveXAttrs || preserveRawXattrs) {
+ final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase();
Map<String, byte[]> srcXAttrs = srcFileStatus.getXAttrs();
Map<String, byte[]> targetXAttrs = getXAttrs(targetFS, path);
- if (!srcXAttrs.equals(targetXAttrs)) {
+ if (srcXAttrs != null && !srcXAttrs.equals(targetXAttrs)) {
Iterator<Entry<String, byte[]>> iter = srcXAttrs.entrySet().iterator();
while (iter.hasNext()) {
Entry<String, byte[]> entry = iter.next();
- targetFS.setXAttr(path, entry.getKey(), entry.getValue());
+ final String xattrName = entry.getKey();
+ if (xattrName.startsWith(rawNS) || preserveXAttrs) {
+ targetFS.setXAttr(path, entry.getKey(), entry.getValue());
+ }
}
}
}
@@ -286,11 +295,12 @@ public class DistCpUtils {
* @param fileStatus FileStatus of file
* @param preserveAcls boolean true if preserving ACLs
* @param preserveXAttrs boolean true if preserving XAttrs
+ * @param preserveRawXAttrs boolean true if preserving raw.* XAttrs
* @throws IOException if there is an I/O error
*/
public static CopyListingFileStatus toCopyListingFileStatus(
FileSystem fileSystem, FileStatus fileStatus, boolean preserveAcls,
- boolean preserveXAttrs) throws IOException {
+ boolean preserveXAttrs, boolean preserveRawXAttrs) throws IOException {
CopyListingFileStatus copyListingFileStatus =
new CopyListingFileStatus(fileStatus);
if (preserveAcls) {
@@ -301,9 +311,25 @@ public class DistCpUtils {
copyListingFileStatus.setAclEntries(aclEntries);
}
}
- if (preserveXAttrs) {
- Map<String, byte[]> xAttrs = fileSystem.getXAttrs(fileStatus.getPath());
- copyListingFileStatus.setXAttrs(xAttrs);
+ if (preserveXAttrs || preserveRawXAttrs) {
+ Map<String, byte[]> srcXAttrs = fileSystem.getXAttrs(fileStatus.getPath());
+ if (preserveXAttrs && preserveRawXAttrs) {
+ copyListingFileStatus.setXAttrs(srcXAttrs);
+ } else {
+ Map<String, byte[]> trgXAttrs = Maps.newHashMap();
+ final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase();
+ for (Map.Entry<String, byte[]> ent : srcXAttrs.entrySet()) {
+ final String xattrName = ent.getKey();
+ if (xattrName.startsWith(rawNS)) {
+ if (preserveRawXAttrs) {
+ trgXAttrs.put(xattrName, ent.getValue());
+ }
+ } else if (preserveXAttrs) {
+ trgXAttrs.put(xattrName, ent.getValue());
+ }
+ }
+ copyListingFileStatus.setXAttrs(trgXAttrs);
+ }
}
return copyListingFileStatus;
}
Modified: hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpWithXAttrs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpWithXAttrs.java?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpWithXAttrs.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCpWithXAttrs.java Thu Aug 21 05:22:10 2014
@@ -18,13 +18,9 @@
package org.apache.hadoop.tools;
-import static org.junit.Assert.*;
-
import java.io.IOException;
import java.net.URI;
-import java.util.Iterator;
import java.util.Map;
-import java.util.Map.Entry;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
@@ -37,8 +33,8 @@ import org.apache.hadoop.fs.permission.F
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.tools.util.DistCpTestUtils;
import org.apache.hadoop.util.Progressable;
-import org.apache.hadoop.util.ToolRunner;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -79,6 +75,7 @@ public class TestDistCpWithXAttrs {
private static final Path dstFile2 = new Path(dstDir2, "file2");
private static final Path dstFile3 = new Path(dstDir2, "file3");
private static final Path dstFile4 = new Path(dstDir2, "file4");
+ private static final String rootedSrcName = "/src";
@BeforeClass
public static void init() throws Exception {
@@ -125,55 +122,56 @@ public class TestDistCpWithXAttrs {
@Test
public void testPreserveXAttrs() throws Exception {
- assertRunDistCp(DistCpConstants.SUCCESS, "/dstPreserveXAttrs");
+ DistCpTestUtils.assertRunDistCp(DistCpConstants.SUCCESS, rootedSrcName,
+ "/dstPreserveXAttrs", "-px", conf);
// dstDir1
Map<String, byte[]> xAttrs = Maps.newHashMap();
xAttrs.put(name1, value1);
xAttrs.put(name2, value2);
- assertXAttrs(dstDir1, xAttrs);
+ DistCpTestUtils.assertXAttrs(dstDir1, fs, xAttrs);
// dstSubDir1
xAttrs.clear();
xAttrs.put(name1, value1);
xAttrs.put(name3, new byte[0]);
- assertXAttrs(dstSubDir1, xAttrs);
+ DistCpTestUtils.assertXAttrs(dstSubDir1, fs, xAttrs);
// dstFile1
xAttrs.clear();
xAttrs.put(name1, value1);
xAttrs.put(name2, value2);
xAttrs.put(name3, new byte[0]);
- assertXAttrs(dstFile1, xAttrs);
+ DistCpTestUtils.assertXAttrs(dstFile1, fs, xAttrs);
// dstDir2
xAttrs.clear();
xAttrs.put(name2, value2);
- assertXAttrs(dstDir2, xAttrs);
+ DistCpTestUtils.assertXAttrs(dstDir2, fs, xAttrs);
// dstFile2
xAttrs.clear();
xAttrs.put(name1, value1);
xAttrs.put(name4, new byte[0]);
- assertXAttrs(dstFile2, xAttrs);
+ DistCpTestUtils.assertXAttrs(dstFile2, fs, xAttrs);
// dstFile3
xAttrs.clear();
xAttrs.put(name3, new byte[0]);
xAttrs.put(name4, new byte[0]);
- assertXAttrs(dstFile3, xAttrs);
+ DistCpTestUtils.assertXAttrs(dstFile3, fs, xAttrs);
// dstFile4
xAttrs.clear();
- assertXAttrs(dstFile4, xAttrs);
+ DistCpTestUtils.assertXAttrs(dstFile4, fs, xAttrs);
}
@Test
public void testXAttrsNotEnabled() throws Exception {
try {
restart(false);
- assertRunDistCp(DistCpConstants.XATTRS_NOT_SUPPORTED,
- "/dstXAttrsNotEnabled");
+ DistCpTestUtils.assertRunDistCp(DistCpConstants.XATTRS_NOT_SUPPORTED,
+ rootedSrcName, "/dstXAttrsNotEnabled", "-px", conf);
} finally {
restart(true);
}
@@ -181,8 +179,8 @@ public class TestDistCpWithXAttrs {
@Test
public void testXAttrsNotImplemented() throws Exception {
- assertRunDistCp(DistCpConstants.XATTRS_NOT_SUPPORTED,
- "stubfs://dstXAttrsNotImplemented");
+ DistCpTestUtils.assertRunDistCp(DistCpConstants.XATTRS_NOT_SUPPORTED,
+ rootedSrcName, "stubfs://dstXAttrsNotImplemented", "-px", conf);
}
/**
@@ -252,45 +250,6 @@ public class TestDistCpWithXAttrs {
}
/**
- * Asserts the XAttrs returned by getXAttrs for a specific path.
- *
- * @param path String path to check
- * @param xAttrs XAttr[] expected xAttrs
- * @throws Exception if there is any error
- */
- private static void assertXAttrs(Path path, Map<String, byte[]> expectedXAttrs)
- throws Exception {
- Map<String, byte[]> xAttrs = fs.getXAttrs(path);
- assertEquals(expectedXAttrs.size(), xAttrs.size());
- Iterator<Entry<String, byte[]>> i = expectedXAttrs.entrySet().iterator();
- while (i.hasNext()) {
- Entry<String, byte[]> e = i.next();
- String name = e.getKey();
- byte[] value = e.getValue();
- if (value == null) {
- assertTrue(xAttrs.containsKey(name) && xAttrs.get(name) == null);
- } else {
- assertArrayEquals(value, xAttrs.get(name));
- }
- }
- }
-
- /**
- * Runs distcp from /src to specified destination, preserving XAttrs. Asserts
- * expected exit code.
- *
- * @param int exitCode expected exit code
- * @param dst String distcp destination
- * @throws Exception if there is any error
- */
- private static void assertRunDistCp(int exitCode, String dst)
- throws Exception {
- DistCp distCp = new DistCp(conf, null);
- assertEquals(exitCode,
- ToolRunner.run(conf, distCp, new String[] { "-px", "/src", dst }));
- }
-
- /**
* Initialize the cluster, wait for it to become active, and get FileSystem.
*
* @param format if true, format the NameNode and DataNodes before starting up
Modified: hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java Thu Aug 21 05:22:10 2014
@@ -357,7 +357,8 @@ public class TestOptionsParser {
DistCpOptions option = new DistCpOptions(new Path("abc"), new Path("xyz"));
String val = "DistCpOptions{atomicCommit=false, syncFolder=false, deleteMissing=false, " +
"ignoreFailures=false, maxMaps=20, sslConfigurationFile='null', copyStrategy='uniformsize', " +
- "sourceFileListing=abc, sourcePaths=null, targetPath=xyz, targetPathExists=true}";
+ "sourceFileListing=abc, sourcePaths=null, targetPath=xyz, targetPathExists=true, " +
+ "preserveRawXattrs=false}";
Assert.assertEquals(val, option.toString());
Assert.assertNotSame(DistCpOptionSwitch.ATOMIC_COMMIT.toString(),
DistCpOptionSwitch.ATOMIC_COMMIT.name());
Modified: hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/util/TestDistCpUtils.java Thu Aug 21 05:22:10 2014
@@ -114,14 +114,14 @@ public class TestDistCpUtils {
fs.setPermission(path, noPerm);
fs.setOwner(path, "nobody", "nobody");
- DistCpUtils.preserve(fs, path, srcStatus, attributes);
+ DistCpUtils.preserve(fs, path, srcStatus, attributes, false);
FileStatus target = fs.getFileStatus(path);
Assert.assertEquals(target.getPermission(), noPerm);
Assert.assertEquals(target.getOwner(), "nobody");
Assert.assertEquals(target.getGroup(), "nobody");
attributes.add(FileAttribute.PERMISSION);
- DistCpUtils.preserve(fs, path, srcStatus, attributes);
+ DistCpUtils.preserve(fs, path, srcStatus, attributes, false);
target = fs.getFileStatus(path);
Assert.assertEquals(target.getPermission(), srcStatus.getPermission());
Assert.assertEquals(target.getOwner(), "nobody");
@@ -129,7 +129,7 @@ public class TestDistCpUtils {
attributes.add(FileAttribute.GROUP);
attributes.add(FileAttribute.USER);
- DistCpUtils.preserve(fs, path, srcStatus, attributes);
+ DistCpUtils.preserve(fs, path, srcStatus, attributes, false);
target = fs.getFileStatus(path);
Assert.assertEquals(target.getPermission(), srcStatus.getPermission());
Assert.assertEquals(target.getOwner(), srcStatus.getOwner());
Modified: hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java?rev=1619293&r1=1619292&r2=1619293&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java (original)
+++ hadoop/common/branches/HDFS-6584/hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/ResourceSchedulerWrapper.java Thu Aug 21 05:22:10 2014
@@ -36,8 +36,8 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
-import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
+import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
@@ -61,6 +61,7 @@ import org.apache.hadoop.yarn.exceptions
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
+import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.UpdatedContainerInfo;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation;
@@ -933,4 +934,10 @@ final public class ResourceSchedulerWrap
return new HashMap<ApplicationId,
SchedulerApplication<SchedulerApplicationAttempt>>();
}
+
+ @Override
+ protected void completedContainer(RMContainer rmContainer,
+ ContainerStatus containerStatus, RMContainerEventType event) {
+ // do nothing
+ }
}