You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by vi...@apache.org on 2013/10/30 23:22:14 UTC
svn commit: r1537330 - in /hadoop/common/branches/YARN-321: ./
hadoop-assemblies/src/main/resources/assemblies/ hadoop-client/
hadoop-project/ hadoop-project/src/site/ hadoop-tools/
hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/ ha...
Author: vinodkv
Date: Wed Oct 30 22:21:59 2013
New Revision: 1537330
URL: http://svn.apache.org/r1537330
Log:
Forwarding YARN-321 branch to latest branch-2.
svn merge ../branch-2
Added:
hadoop/common/branches/YARN-321/hadoop-assemblies/src/main/resources/assemblies/hadoop-sls.xml
- copied unchanged from r1537326, hadoop/common/branches/branch-2/hadoop-assemblies/src/main/resources/assemblies/hadoop-sls.xml
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-openstack/
- copied from r1537326, hadoop/common/branches/branch-2/hadoop-tools/hadoop-openstack/
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-sls/
- copied from r1537326, hadoop/common/branches/branch-2/hadoop-tools/hadoop-sls/
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/KeyOnlyTextInputWriter.java
- copied unchanged from r1537326, hadoop/common/branches/branch-2/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/KeyOnlyTextInputWriter.java
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/KeyOnlyTextOutputReader.java
- copied unchanged from r1537326, hadoop/common/branches/branch-2/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/KeyOnlyTextOutputReader.java
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingOutputOnlyKeys.java
- copied unchanged from r1537326, hadoop/common/branches/branch-2/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingOutputOnlyKeys.java
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/io/
- copied from r1537326, hadoop/common/branches/branch-2/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/io/
Modified:
hadoop/common/branches/YARN-321/ (props changed)
hadoop/common/branches/YARN-321/.gitattributes
hadoop/common/branches/YARN-321/BUILDING.txt
hadoop/common/branches/YARN-321/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml
hadoop/common/branches/YARN-321/hadoop-client/pom.xml
hadoop/common/branches/YARN-321/hadoop-project/ (props changed)
hadoop/common/branches/YARN-321/hadoop-project/pom.xml (contents, props changed)
hadoop/common/branches/YARN-321/hadoop-project/src/site/ (props changed)
hadoop/common/branches/YARN-321/hadoop-project/src/site/site.xml
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyCommitter.java
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/DebugJobProducer.java
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/IdentifierResolver.java
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextInputWriter.java
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/test/bin/cat.cmd (props changed)
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/test/bin/xargs_cat.cmd (props changed)
hadoop/common/branches/YARN-321/hadoop-tools/hadoop-tools-dist/pom.xml
hadoop/common/branches/YARN-321/hadoop-tools/pom.xml
hadoop/common/branches/YARN-321/pom.xml
Propchange: hadoop/common/branches/YARN-321/
------------------------------------------------------------------------------
Merged /hadoop/common/branches/branch-2:r1519784-1537326
Merged /hadoop/common/trunk:r1531125
Modified: hadoop/common/branches/YARN-321/.gitattributes
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/.gitattributes?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/.gitattributes (original)
+++ hadoop/common/branches/YARN-321/.gitattributes Wed Oct 30 22:21:59 2013
@@ -15,5 +15,6 @@
*.bat text eol=crlf
*.cmd text eol=crlf
+*.vcxproj text merge=union eol=crlf
*.csproj text merge=union eol=crlf
*.sln text merge=union eol=crlf
Modified: hadoop/common/branches/YARN-321/BUILDING.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/BUILDING.txt?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/BUILDING.txt (original)
+++ hadoop/common/branches/YARN-321/BUILDING.txt Wed Oct 30 22:21:59 2013
@@ -4,8 +4,8 @@ Build instructions for Hadoop
Requirements:
* Unix System
-* JDK 1.6
-* Maven 3.0
+* JDK 1.6+
+* Maven 3.0 or later
* Findbugs 1.3.9 (if running findbugs)
* ProtocolBuffer 2.5.0
* CMake 2.6 or newer (if compiling native code)
@@ -149,6 +149,34 @@ Create a local staging version of the we
----------------------------------------------------------------------------------
+Handling out of memory errors in builds
+
+----------------------------------------------------------------------------------
+
+If the build process fails with an out of memory error, you should be able to fix
+it by increasing the memory used by maven -which can be done via the environment
+variable MAVEN_OPTS.
+
+Here is an example setting to allocate between 256 and 512 MB of heap space to
+Maven
+
+export MAVEN_OPTS="-Xms256m -Xmx512m"
+
+----------------------------------------------------------------------------------
+
+Building on OS/X
+
+----------------------------------------------------------------------------------
+
+A one-time manual step is required to enable building Hadoop OS X with Java 7
+every time the JDK is updated.
+see: https://issues.apache.org/jira/browse/HADOOP-9350
+
+$ sudo mkdir `/usr/libexec/java_home`/Classes
+$ sudo ln -s `/usr/libexec/java_home`/lib/tools.jar `/usr/libexec/java_home`/Classes/classes.jar
+
+----------------------------------------------------------------------------------
+
Building on Windows
----------------------------------------------------------------------------------
Modified: hadoop/common/branches/YARN-321/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml (original)
+++ hadoop/common/branches/YARN-321/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml Wed Oct 30 22:21:59 2013
@@ -93,6 +93,17 @@
<include>*-sources.jar</include>
</includes>
</fileSet>
+ <fileSet>
+ <directory>../hadoop-sls/target</directory>
+ <outputDirectory>/share/hadoop/${hadoop.component}/sources</outputDirectory>
+ <includes>
+ <include>*-sources.jar</include>
+ </includes>
+ </fileSet>
+ <fileSet>
+ <directory>../hadoop-sls/target/hadoop-sls-${project.version}/sls</directory>
+ <outputDirectory>/share/hadoop/${hadoop.component}/sls</outputDirectory>
+ </fileSet>
</fileSets>
<dependencySets>
<dependencySet>
Modified: hadoop/common/branches/YARN-321/hadoop-client/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-client/pom.xml?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-client/pom.xml (original)
+++ hadoop/common/branches/YARN-321/hadoop-client/pom.xml Wed Oct 30 22:21:59 2013
@@ -40,10 +40,6 @@
<scope>compile</scope>
<exclusions>
<exclusion>
- <groupId>commons-httpclient</groupId>
- <artifactId>commons-httpclient</artifactId>
- </exclusion>
- <exclusion>
<groupId>tomcat</groupId>
<artifactId>jasper-compiler</artifactId>
</exclusion>
Propchange: hadoop/common/branches/YARN-321/hadoop-project/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-project:r1531125
Merged /hadoop/common/branches/branch-2/hadoop-project:r1519784-1537326
Modified: hadoop/common/branches/YARN-321/hadoop-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-project/pom.xml?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-project/pom.xml (original)
+++ hadoop/common/branches/YARN-321/hadoop-project/pom.xml Wed Oct 30 22:21:59 2013
@@ -59,6 +59,12 @@
<hadoop.common.build.dir>${basedir}/../../hadoop-common-project/hadoop-common/target</hadoop.common.build.dir>
<java.security.egd>file:///dev/urandom</java.security.egd>
+ <!-- avro version -->
+ <avro.version>1.7.4</avro.version>
+
+ <!-- jersey version -->
+ <jersey.version>1.9</jersey.version>
+
<!-- ProtocolBuffer version, used to verify the protoc version and -->
<!-- define the protobuf JAR version -->
<protobuf.version>2.5.0</protobuf.version>
@@ -300,6 +306,12 @@
</dependency>
<dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-openstack</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+
+ <dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>11.0.2</version>
@@ -315,6 +327,11 @@
<version>2.1</version>
</dependency>
<dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-compress</artifactId>
+ <version>1.4.1</version>
+ </dependency>
+ <dependency>
<groupId>xmlenc</groupId>
<artifactId>xmlenc</artifactId>
<version>0.52</version>
@@ -325,6 +342,11 @@
<version>3.1</version>
</dependency>
<dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient</artifactId>
+ <version>4.2.5</version>
+ </dependency>
+ <dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.4</version>
@@ -357,6 +379,23 @@
</dependency>
<dependency>
+ <groupId>org.glassfish</groupId>
+ <artifactId>javax.servlet</artifactId>
+ <version>3.1</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-utils</artifactId>
+ <version>2.0.5</version>
+ </dependency>
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-component-annotations</artifactId>
+ <version>1.5.5</version>
+ </dependency>
+
+ <dependency>
<groupId>asm</groupId>
<artifactId>asm</artifactId>
<version>3.2</version>
@@ -364,12 +403,12 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>
- <version>1.8</version>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
- <version>1.8</version>
+ <version>${jersey.version}</version>
<exclusions>
<exclusion>
<groupId>javax.xml.stream</groupId>
@@ -380,7 +419,7 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-server</artifactId>
- <version>1.8</version>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
@@ -398,25 +437,25 @@
<dependency>
<groupId>com.sun.jersey.contribs</groupId>
<artifactId>jersey-guice</artifactId>
- <version>1.8</version>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>com.sun.jersey.jersey-test-framework</groupId>
<artifactId>jersey-test-framework-core</artifactId>
- <version>1.8</version>
+ <version>${jersey.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.sun.jersey.jersey-test-framework</groupId>
<artifactId>jersey-test-framework-grizzly2</artifactId>
- <version>1.8</version>
+ <version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty</artifactId>
- <version>3.5.11.Final</version>
+ <version>3.6.2.Final</version>
</dependency>
<dependency>
@@ -604,7 +643,7 @@
<dependency>
<groupId>org.apache.avro</groupId>
<artifactId>avro</artifactId>
- <version>1.7.4</version>
+ <version>${avro.version}</version>
</dependency>
<dependency>
<groupId>net.sf.kosmosfs</groupId>
@@ -645,7 +684,7 @@
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
- <version>3.4.2</version>
+ <version>3.4.5</version>
<exclusions>
<exclusion>
<!-- otherwise seems to drag in junit 3.8.1 via jline -->
@@ -660,14 +699,24 @@
<groupId>com.sun.jmx</groupId>
<artifactId>jmxri</artifactId>
</exclusion>
+ <exclusion>
+ <groupId>org.jboss.netty</groupId>
+ <artifactId>netty</artifactId>
+ </exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
- <version>3.4.2</version>
+ <version>3.4.5</version>
<type>test-jar</type>
<scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.jboss.netty</groupId>
+ <artifactId>netty</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<dependency>
<groupId>org.apache.bookkeeper</groupId>
@@ -680,6 +729,16 @@
<artifactId>hsqldb</artifactId>
<version>2.0.0</version>
</dependency>
+ <dependency>
+ <groupId>com.codahale.metrics</groupId>
+ <artifactId>metrics-core</artifactId>
+ <version>3.0.0</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-sls</artifactId>
+ <version>${project.version}</version>
+ </dependency>
</dependencies>
</dependencyManagement>
@@ -702,7 +761,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
- <version>2.1</version>
+ <version>2.2</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
@@ -712,7 +771,7 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
- <version>2.12.3</version>
+ <version>2.16</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
@@ -762,7 +821,7 @@
<plugin>
<groupId>org.apache.avro</groupId>
<artifactId>avro-maven-plugin</artifactId>
- <version>1.5.3</version>
+ <version>${avro.version}</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo.jspc</groupId>
@@ -864,7 +923,7 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
- <forkMode>always</forkMode>
+ <reuseForks>false</reuseForks>
<forkedProcessTimeoutInSeconds>900</forkedProcessTimeoutInSeconds>
<argLine>-Xmx1024m -XX:+HeapDumpOnOutOfMemoryError</argLine>
<environmentVariables>
@@ -908,6 +967,26 @@
<includeReports>false</includeReports>
</configuration>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-enforcer-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>depcheck</id>
+ <configuration>
+ <rules>
+ <DependencyConvergence>
+ <uniqueVersions>true</uniqueVersions>
+ </DependencyConvergence>
+ </rules>
+ </configuration>
+ <goals>
+ <goal>enforce</goal>
+ </goals>
+ <phase>verify</phase>
+ </execution>
+ </executions>
+ </plugin>
</plugins>
</build>
@@ -1039,23 +1118,5 @@
</plugins>
</build>
</profile>
- <!-- Copied into specific modules supporting parallel testing. Will be uncommented as soon as all modules support this.
- <profile>
- <id>parallel-tests</id>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <configuration>
- <forkMode>perthread</forkMode>
- <threadCount>${testsThreadCount}</threadCount>
- <parallel>classes</parallel>
- </configuration>
- </plugin>
- </plugins>
- </build>
- </profile>
- -->
</profiles>
</project>
Propchange: hadoop/common/branches/YARN-321/hadoop-project/pom.xml
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-project/pom.xml:r1531125
Merged /hadoop/common/branches/branch-2/hadoop-project/pom.xml:r1519784-1537326
Propchange: hadoop/common/branches/YARN-321/hadoop-project/src/site/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-project/src/site:r1531125
Merged /hadoop/common/branches/branch-2/hadoop-project/src/site:r1503799-1537326
Modified: hadoop/common/branches/YARN-321/hadoop-project/src/site/site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-project/src/site/site.xml?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-project/src/site/site.xml (original)
+++ hadoop/common/branches/YARN-321/hadoop-project/src/site/site.xml Wed Oct 30 22:21:59 2013
@@ -80,12 +80,14 @@
<item name="HttpFS Gateway" href="hadoop-hdfs-httpfs/index.html"/>
<item name="Short Circuit Local Reads"
href="hadoop-project-dist/hadoop-hdfs/ShortCircuitLocalReads.html"/>
+ <item name="HDFS NFS Gateway" href="hadoop-project-dist/hadoop-hdfs/HdfsNfsGateway.html"/>
</menu>
<menu name="MapReduce" inherit="top">
<item name="Compatibilty between Hadoop 1.x and Hadoop 2.x" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduce_Compatibility_Hadoop1_Hadoop2.html"/>
<item name="Encrypted Shuffle" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/EncryptedShuffle.html"/>
<item name="Pluggable Shuffle/Sort" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/PluggableShuffleAndPluggableSort.html"/>
+ <item name="Distributed Cache Deploy" href="hadoop-mapreduce-client/hadoop-mapreduce-client-core/DistributedCacheDeploy.html"/>
</menu>
<menu name="YARN" inherit="top">
@@ -95,6 +97,7 @@
<item name="Fair Scheduler" href="hadoop-yarn/hadoop-yarn-site/FairScheduler.html"/>
<item name="Web Application Proxy" href="hadoop-yarn/hadoop-yarn-site/WebApplicationProxy.html"/>
<item name="YARN Commands" href="hadoop-yarn/hadoop-yarn-site/YarnCommands.html"/>
+ <item name="Scheduler Load Simulator" href="hadoop-sls/SchedulerLoadSimulator.html"/>
</menu>
<menu name="YARN REST APIs" inherit="top">
Modified: hadoop/common/branches/YARN-321/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java Wed Oct 30 22:21:59 2013
@@ -19,6 +19,7 @@
package org.apache.hadoop.tools;
import java.io.ByteArrayOutputStream;
+import java.io.FilterInputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URI;
@@ -30,9 +31,13 @@ import java.util.StringTokenizer;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.fs.HarFileSystem;
+import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.io.IOUtils;
@@ -42,6 +47,7 @@ import org.apache.hadoop.yarn.server.res
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Assert;
+import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
@@ -62,19 +68,36 @@ public class TestHadoopArchives {
private static final String inputDir = "input";
private Path inputPath;
+ private Path archivePath;
+ private final List<String> fileList = new ArrayList<String>();
private MiniDFSCluster dfscluster;
private Configuration conf;
private FileSystem fs;
- private Path archivePath;
- static private Path createFile(Path dir, String filename, FileSystem fs)
- throws IOException {
- final Path f = new Path(dir, filename);
+ private static String createFile(Path root, FileSystem fs, String... dirsAndFile
+ ) throws IOException {
+ String fileBaseName = dirsAndFile[dirsAndFile.length - 1];
+ return createFile(root, fs, fileBaseName.getBytes("UTF-8"), dirsAndFile);
+ }
+
+ private static String createFile(Path root, FileSystem fs, byte[] fileContent, String... dirsAndFile
+ ) throws IOException {
+ StringBuilder sb = new StringBuilder();
+ for (String segment: dirsAndFile) {
+ if (sb.length() > 0) {
+ sb.append(Path.SEPARATOR);
+ }
+ sb.append(segment);
+ }
+ final Path f = new Path(root, sb.toString());
final FSDataOutputStream out = fs.create(f);
- out.write(filename.getBytes());
- out.close();
- return f;
+ try {
+ out.write(fileContent);
+ } finally {
+ out.close();
+ }
+ return sb.toString();
}
@Before
@@ -86,102 +109,80 @@ public class TestHadoopArchives {
conf.set(CapacitySchedulerConfiguration.PREFIX
+ CapacitySchedulerConfiguration.ROOT + ".default."
+ CapacitySchedulerConfiguration.CAPACITY, "100");
- dfscluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).format(true)
- .build();
+ dfscluster = new MiniDFSCluster
+ .Builder(conf)
+ .checkExitOnShutdown(true)
+ .numDataNodes(2)
+ .format(true)
+ .racks(null)
+ .build();
fs = dfscluster.getFileSystem();
- inputPath = new Path(fs.getHomeDirectory(), inputDir);
+
+ // prepare archive path:
archivePath = new Path(fs.getHomeDirectory(), "archive");
+ fs.delete(archivePath, true);
+
+ // prepare input path:
+ inputPath = new Path(fs.getHomeDirectory(), inputDir);
+ fs.delete(inputPath, true);
fs.mkdirs(inputPath);
- createFile(inputPath, "a", fs);
- createFile(inputPath, "b", fs);
- createFile(inputPath, "c", fs);
+ // create basic input files:
+ fileList.add(createFile(inputPath, fs, "a"));
+ fileList.add(createFile(inputPath, fs, "b"));
+ fileList.add(createFile(inputPath, fs, "c"));
}
@After
public void tearDown() throws Exception {
- try {
- if (dfscluster != null) {
- dfscluster.shutdown();
- }
- if (dfscluster != null) {
- dfscluster.shutdown();
- }
- } catch (Exception e) {
- System.err.println(e);
+ if (dfscluster != null) {
+ dfscluster.shutdown();
}
}
@Test
public void testRelativePath() throws Exception {
- fs.delete(archivePath, true);
-
final Path sub1 = new Path(inputPath, "dir1");
fs.mkdirs(sub1);
- createFile(sub1, "a", fs);
+ createFile(inputPath, fs, sub1.getName(), "a");
final FsShell shell = new FsShell(conf);
final List<String> originalPaths = lsr(shell, "input");
- System.out.println("originalPath: " + originalPaths);
- final URI uri = fs.getUri();
- final String prefix = "har://hdfs-" + uri.getHost() + ":" + uri.getPort()
- + archivePath.toUri().getPath() + Path.SEPARATOR;
+ System.out.println("originalPaths: " + originalPaths);
- {
- final String harName = "foo.har";
- final String[] args = { "-archiveName", harName, "-p", "input", "*",
- "archive" };
- System.setProperty(HadoopArchives.TEST_HADOOP_ARCHIVES_JAR_PATH,
- HADOOP_ARCHIVES_JAR);
- final HadoopArchives har = new HadoopArchives(conf);
- Assert.assertEquals(0, ToolRunner.run(har, args));
-
- // compare results
- final List<String> harPaths = lsr(shell, prefix + harName);
- Assert.assertEquals(originalPaths, harPaths);
- }
+ // make the archive:
+ final String fullHarPathStr = makeArchive();
+
+ // compare results:
+ final List<String> harPaths = lsr(shell, fullHarPathStr);
+ Assert.assertEquals(originalPaths, harPaths);
}
@Test
public void testPathWithSpaces() throws Exception {
- fs.delete(archivePath, true);
-
// create files/directories with spaces
- createFile(inputPath, "c c", fs);
+ createFile(inputPath, fs, "c c");
final Path sub1 = new Path(inputPath, "sub 1");
fs.mkdirs(sub1);
- createFile(sub1, "file x y z", fs);
- createFile(sub1, "file", fs);
- createFile(sub1, "x", fs);
- createFile(sub1, "y", fs);
- createFile(sub1, "z", fs);
+ createFile(sub1, fs, "file x y z");
+ createFile(sub1, fs, "file");
+ createFile(sub1, fs, "x");
+ createFile(sub1, fs, "y");
+ createFile(sub1, fs, "z");
final Path sub2 = new Path(inputPath, "sub 1 with suffix");
fs.mkdirs(sub2);
- createFile(sub2, "z", fs);
+ createFile(sub2, fs, "z");
final FsShell shell = new FsShell(conf);
-
final String inputPathStr = inputPath.toUri().getPath();
-
final List<String> originalPaths = lsr(shell, inputPathStr);
- final URI uri = fs.getUri();
- final String prefix = "har://hdfs-" + uri.getHost() + ":" + uri.getPort()
- + archivePath.toUri().getPath() + Path.SEPARATOR;
- {// Enable space replacement
- final String harName = "foo.har";
- final String[] args = { "-archiveName", harName, "-p", inputPathStr, "*",
- archivePath.toString() };
- System.setProperty(HadoopArchives.TEST_HADOOP_ARCHIVES_JAR_PATH,
- HADOOP_ARCHIVES_JAR);
- final HadoopArchives har = new HadoopArchives(conf);
- Assert.assertEquals(0, ToolRunner.run(har, args));
-
- // compare results
- final List<String> harPaths = lsr(shell, prefix + harName);
- Assert.assertEquals(originalPaths, harPaths);
- }
+ // make the archive:
+ final String fullHarPathStr = makeArchive();
+ // compare results
+ final List<String> harPaths = lsr(shell, fullHarPathStr);
+ Assert.assertEquals(originalPaths, harPaths);
}
private static List<String> lsr(final FsShell shell, String dir)
@@ -222,4 +223,442 @@ public class TestHadoopArchives {
.println("lsr paths = " + paths.toString().replace(", ", ",\n "));
return paths;
}
+
+ @Test
+ public void testReadFileContent() throws Exception {
+ fileList.add(createFile(inputPath, fs, "c c"));
+ final Path sub1 = new Path(inputPath, "sub 1");
+ fs.mkdirs(sub1);
+ fileList.add(createFile(inputPath, fs, sub1.getName(), "file x y z"));
+ fileList.add(createFile(inputPath, fs, sub1.getName(), "file"));
+ fileList.add(createFile(inputPath, fs, sub1.getName(), "x"));
+ fileList.add(createFile(inputPath, fs, sub1.getName(), "y"));
+ fileList.add(createFile(inputPath, fs, sub1.getName(), "z"));
+ final Path sub2 = new Path(inputPath, "sub 1 with suffix");
+ fs.mkdirs(sub2);
+ fileList.add(createFile(inputPath, fs, sub2.getName(), "z"));
+ // Generate a big binary file content:
+ final byte[] binContent = prepareBin();
+ fileList.add(createFile(inputPath, fs, binContent, sub2.getName(), "bin"));
+ fileList.add(createFile(inputPath, fs, new byte[0], sub2.getName(), "zero-length"));
+
+ final String fullHarPathStr = makeArchive();
+
+ // Create fresh HarFs:
+ final HarFileSystem harFileSystem = new HarFileSystem(fs);
+ try {
+ final URI harUri = new URI(fullHarPathStr);
+ harFileSystem.initialize(harUri, fs.getConf());
+ // now read the file content and compare it against the expected:
+ int readFileCount = 0;
+ for (final String pathStr0 : fileList) {
+ final Path path = new Path(fullHarPathStr + Path.SEPARATOR + pathStr0);
+ final String baseName = path.getName();
+ final FileStatus status = harFileSystem.getFileStatus(path);
+ if (status.isFile()) {
+ // read the file:
+ final byte[] actualContentSimple = readAllSimple(
+ harFileSystem.open(path), true);
+
+ final byte[] actualContentBuffer = readAllWithBuffer(
+ harFileSystem.open(path), true);
+ assertArrayEquals(actualContentSimple, actualContentBuffer);
+
+ final byte[] actualContentFully = readAllWithReadFully(
+ actualContentSimple.length,
+ harFileSystem.open(path), true);
+ assertArrayEquals(actualContentSimple, actualContentFully);
+
+ final byte[] actualContentSeek = readAllWithSeek(
+ actualContentSimple.length,
+ harFileSystem.open(path), true);
+ assertArrayEquals(actualContentSimple, actualContentSeek);
+
+ final byte[] actualContentRead4
+ = readAllWithRead4(harFileSystem.open(path), true);
+ assertArrayEquals(actualContentSimple, actualContentRead4);
+
+ final byte[] actualContentSkip = readAllWithSkip(
+ actualContentSimple.length,
+ harFileSystem.open(path),
+ harFileSystem.open(path),
+ true);
+ assertArrayEquals(actualContentSimple, actualContentSkip);
+
+ if ("bin".equals(baseName)) {
+ assertArrayEquals(binContent, actualContentSimple);
+ } else if ("zero-length".equals(baseName)) {
+ assertEquals(0, actualContentSimple.length);
+ } else {
+ String actual = new String(actualContentSimple, "UTF-8");
+ assertEquals(baseName, actual);
+ }
+ readFileCount++;
+ }
+ }
+ assertEquals(fileList.size(), readFileCount);
+ } finally {
+ harFileSystem.close();
+ }
+ }
+
+ private static byte[] readAllSimple(FSDataInputStream fsdis, boolean close) throws IOException {
+ final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ try {
+ int b;
+ while (true) {
+ b = fsdis.read();
+ if (b < 0) {
+ break;
+ } else {
+ baos.write(b);
+ }
+ }
+ baos.close();
+ return baos.toByteArray();
+ } finally {
+ if (close) {
+ fsdis.close();
+ }
+ }
+ }
+
+ private static byte[] readAllWithBuffer(FSDataInputStream fsdis, boolean close)
+ throws IOException {
+ try {
+ final int available = fsdis.available();
+ final byte[] buffer;
+ final ByteArrayOutputStream baos;
+ if (available < 0) {
+ buffer = new byte[1024];
+ baos = new ByteArrayOutputStream(buffer.length * 2);
+ } else {
+ buffer = new byte[available];
+ baos = new ByteArrayOutputStream(available);
+ }
+ int readIntoBuffer = 0;
+ int read;
+ while (true) {
+ read = fsdis.read(buffer, readIntoBuffer, buffer.length - readIntoBuffer);
+ if (read < 0) {
+ // end of stream:
+ if (readIntoBuffer > 0) {
+ baos.write(buffer, 0, readIntoBuffer);
+ }
+ return baos.toByteArray();
+ } else {
+ readIntoBuffer += read;
+ if (readIntoBuffer == buffer.length) {
+ // buffer is full, need to clean the buffer.
+ // drop the buffered data to baos:
+ baos.write(buffer);
+ // reset the counter to start reading to the buffer beginning:
+ readIntoBuffer = 0;
+ } else if (readIntoBuffer > buffer.length) {
+ throw new IOException("Read more than the buffer length: "
+ + readIntoBuffer + ", buffer length = " + buffer.length);
+ }
+ }
+ }
+ } finally {
+ if (close) {
+ fsdis.close();
+ }
+ }
+ }
+
+ private static byte[] readAllWithReadFully(int totalLength, FSDataInputStream fsdis, boolean close)
+ throws IOException {
+ final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ // Simulate reading of some data structures of known length:
+ final byte[] buffer = new byte[17];
+ final int times = totalLength / buffer.length;
+ final int remainder = totalLength % buffer.length;
+ // it would be simpler to leave the position tracking to the
+ // InputStream, but we need to check the methods #readFully(2)
+ // and #readFully(4) that receive the position as a parameter:
+ int position = 0;
+ try {
+ // read "data structures":
+ for (int i=0; i<times; i++) {
+ fsdis.readFully(position, buffer);
+ position += buffer.length;
+ baos.write(buffer);
+ }
+ if (remainder > 0) {
+ // read the remainder:
+ fsdis.readFully(position, buffer, 0, remainder);
+ position += remainder;
+ baos.write(buffer, 0, remainder);
+ }
+ try {
+ fsdis.readFully(position, buffer, 0, 1);
+ assertTrue(false);
+ } catch (IOException ioe) {
+ // okay
+ }
+ assertEquals(totalLength, position);
+ final byte[] result = baos.toByteArray();
+ assertEquals(totalLength, result.length);
+ return result;
+ } finally {
+ if (close) {
+ fsdis.close();
+ }
+ }
+ }
+
+ private static byte[] readAllWithRead4(FSDataInputStream fsdis, boolean close)
+ throws IOException {
+ try {
+ final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ final byte[] buffer = new byte[17];
+ int totalRead = 0;
+ int read;
+ while (true) {
+ read = fsdis.read(totalRead, buffer, 0, buffer.length);
+ if (read > 0) {
+ totalRead += read;
+ baos.write(buffer, 0, read);
+ } else if (read < 0) {
+ break; // EOF
+ } else {
+ // read == 0:
+ // zero result may be returned *only* in case if the 4th
+ // parameter is 0. Since in our case this is 'buffer.length',
+ // zero return value clearly indicates a bug:
+ throw new AssertionError("FSDataInputStream#read(4) returned 0, while " +
+ " the 4th method parameter is " + buffer.length + ".");
+ }
+ }
+ final byte[] result = baos.toByteArray();
+ return result;
+ } finally {
+ if (close) {
+ fsdis.close();
+ }
+ }
+ }
+
+ private static byte[] readAllWithSeek(final int totalLength,
+ final FSDataInputStream fsdis, final boolean close)
+ throws IOException {
+ final byte[] result = new byte[totalLength];
+ long pos;
+ try {
+ // read the data in the reverse order, from
+ // the tail to the head by pieces of 'buffer' length:
+ final byte[] buffer = new byte[17];
+ final int times = totalLength / buffer.length;
+ int read;
+ int expectedRead;
+ for (int i=times; i>=0; i--) {
+ pos = i * buffer.length;
+ fsdis.seek(pos);
+ // check that seek is successful:
+ assertEquals(pos, fsdis.getPos());
+ read = fsdis.read(buffer);
+ // check we read right number of bytes:
+ if (i == times) {
+ expectedRead = totalLength % buffer.length; // remainder
+ if (expectedRead == 0) {
+ // zero remainder corresponds to the EOS, so
+ // by the contract of DataInpitStream#read(byte[]) -1 should be
+ // returned:
+ expectedRead = -1;
+ }
+ } else {
+ expectedRead = buffer.length;
+ }
+ assertEquals(expectedRead, read);
+ if (read > 0) {
+ System.arraycopy(buffer, 0, result, (int)pos, read);
+ }
+ }
+
+ // finally, check that #seek() to not existing position leads to IOE:
+ expectSeekIOE(fsdis, Long.MAX_VALUE, "Seek to Long.MAX_VALUE should lead to IOE.");
+ expectSeekIOE(fsdis, Long.MIN_VALUE, "Seek to Long.MIN_VALUE should lead to IOE.");
+ long pp = -1L;
+ expectSeekIOE(fsdis, pp, "Seek to "+pp+" should lead to IOE.");
+
+ // NB: is is *possible* to #seek(length), but *impossible* to #seek(length + 1):
+ fsdis.seek(totalLength);
+ assertEquals(totalLength, fsdis.getPos());
+ pp = totalLength + 1;
+ expectSeekIOE(fsdis, pp, "Seek to the length position + 1 ("+pp+") should lead to IOE.");
+
+ return result;
+ } finally {
+ if (close) {
+ fsdis.close();
+ }
+ }
+ }
+
+ private static void expectSeekIOE(FSDataInputStream fsdis, long seekPos, String message) {
+ try {
+ fsdis.seek(seekPos);
+ assertTrue(message + " (Position = " + fsdis.getPos() + ")", false);
+ } catch (IOException ioe) {
+ // okay
+ }
+ }
+
+ /*
+ * Reads data by chunks from 2 input streams:
+ * reads chunk from stream 1, and skips this chunk in the stream 2;
+ * Then reads next chunk from stream 2, and skips this chunk in stream 1.
+ */
+ private static byte[] readAllWithSkip(
+ final int totalLength,
+ final FSDataInputStream fsdis1,
+ final FSDataInputStream fsdis2,
+ final boolean close)
+ throws IOException {
+ // test negative skip arg:
+ assertEquals(0, fsdis1.skip(-1));
+ // test zero skip arg:
+ assertEquals(0, fsdis1.skip(0));
+
+ final ByteArrayOutputStream baos = new ByteArrayOutputStream(totalLength);
+ try {
+ // read the data in the reverse order, from
+ // the tail to the head by pieces of 'buffer' length:
+ final byte[] buffer = new byte[17];
+ final int times = totalLength / buffer.length;
+ final int remainder = totalLength % buffer.length;
+ long skipped;
+ long expectedPosition;
+ int toGo;
+ for (int i=0; i<=times; i++) {
+ toGo = (i < times) ? buffer.length : remainder;
+ if (i % 2 == 0) {
+ fsdis1.readFully(buffer, 0, toGo);
+ skipped = skipUntilZero(fsdis2, toGo);
+ } else {
+ fsdis2.readFully(buffer, 0, toGo);
+ skipped = skipUntilZero(fsdis1, toGo);
+ }
+ if (i < times) {
+ assertEquals(buffer.length, skipped);
+ expectedPosition = (i + 1) * buffer.length;
+ } else {
+ // remainder:
+ if (remainder > 0) {
+ assertEquals(remainder, skipped);
+ } else {
+ assertEquals(0, skipped);
+ }
+ expectedPosition = totalLength;
+ }
+ // check if the 2 streams have equal and correct positions:
+ assertEquals(expectedPosition, fsdis1.getPos());
+ assertEquals(expectedPosition, fsdis2.getPos());
+ // save the read data:
+ if (toGo > 0) {
+ baos.write(buffer, 0, toGo);
+ }
+ }
+
+ // finally, check up if ended stream cannot skip:
+ assertEquals(0, fsdis1.skip(-1));
+ assertEquals(0, fsdis1.skip(0));
+ assertEquals(0, fsdis1.skip(1));
+ assertEquals(0, fsdis1.skip(Long.MAX_VALUE));
+
+ return baos.toByteArray();
+ } finally {
+ if (close) {
+ fsdis1.close();
+ fsdis2.close();
+ }
+ }
+ }
+
+ private static long skipUntilZero(final FilterInputStream fis,
+ final long toSkip) throws IOException {
+ long skipped = 0;
+ long remainsToSkip = toSkip;
+ long s;
+ while (skipped < toSkip) {
+ s = fis.skip(remainsToSkip); // actually skippped
+ if (s == 0) {
+ return skipped; // EOF or impossible to skip.
+ }
+ skipped += s;
+ remainsToSkip -= s;
+ }
+ return skipped;
+ }
+
+ private static byte[] prepareBin() {
+ byte[] bb = new byte[77777];
+ for (int i=0; i<bb.length; i++) {
+ // Generate unique values, as possible:
+ double d = Math.log(i + 2);
+ long bits = Double.doubleToLongBits(d);
+ bb[i] = (byte)bits;
+ }
+ return bb;
+ }
+
+ /*
+ * Run the HadoopArchives tool to create an archive on the
+ * given file system.
+ */
+ private String makeArchive() throws Exception {
+ final String inputPathStr = inputPath.toUri().getPath();
+ System.out.println("inputPathStr = " + inputPathStr);
+
+ final URI uri = fs.getUri();
+ final String prefix = "har://hdfs-" + uri.getHost() + ":" + uri.getPort()
+ + archivePath.toUri().getPath() + Path.SEPARATOR;
+
+ final String harName = "foo.har";
+ final String fullHarPathStr = prefix + harName;
+ final String[] args = { "-archiveName", harName, "-p", inputPathStr, "*",
+ archivePath.toString() };
+ System.setProperty(HadoopArchives.TEST_HADOOP_ARCHIVES_JAR_PATH,
+ HADOOP_ARCHIVES_JAR);
+ final HadoopArchives har = new HadoopArchives(conf);
+ assertEquals(0, ToolRunner.run(har, args));
+ return fullHarPathStr;
+ }
+
+ @Test
+ /*
+ * Tests copying from archive file system to a local file system
+ */
+ public void testCopyToLocal() throws Exception {
+ final String fullHarPathStr = makeArchive();
+
+ // make path to copy the file to:
+ final String tmpDir
+ = System.getProperty("test.build.data","build/test/data") + "/work-dir/har-fs-tmp";
+ final Path tmpPath = new Path(tmpDir);
+ final LocalFileSystem localFs = FileSystem.getLocal(new Configuration());
+ localFs.delete(tmpPath, true);
+ localFs.mkdirs(tmpPath);
+ assertTrue(localFs.exists(tmpPath));
+
+ // Create fresh HarFs:
+ final HarFileSystem harFileSystem = new HarFileSystem(fs);
+ try {
+ final URI harUri = new URI(fullHarPathStr);
+ harFileSystem.initialize(harUri, fs.getConf());
+
+ final Path sourcePath = new Path(fullHarPathStr + Path.SEPARATOR + "a");
+ final Path targetPath = new Path(tmpPath, "straus");
+ // copy the Har file to a local file system:
+ harFileSystem.copyToLocalFile(false, sourcePath, targetPath);
+ FileStatus straus = localFs.getFileStatus(targetPath);
+ // the file should contain just 1 character:
+ assertEquals(1, straus.getLen());
+ } finally {
+ harFileSystem.close();
+ localFs.delete(tmpPath, true);
+ }
+ }
+
}
Modified: hadoop/common/branches/YARN-321/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyCommitter.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyCommitter.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyCommitter.java Wed Oct 30 22:21:59 2013
@@ -332,6 +332,7 @@ public class TestCopyCommitter {
} finally {
TestDistCpUtils.delete(fs, workPath);
TestDistCpUtils.delete(fs, finalPath);
+ conf.setBoolean(DistCpConstants.CONF_LABEL_ATOMIC_COPY, false);
}
}
@@ -373,6 +374,7 @@ public class TestCopyCommitter {
} finally {
TestDistCpUtils.delete(fs, workPath);
TestDistCpUtils.delete(fs, finalPath);
+ conf.setBoolean(DistCpConstants.CONF_LABEL_ATOMIC_COPY, false);
}
}
Modified: hadoop/common/branches/YARN-321/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestCopyMapper.java Wed Oct 30 22:21:59 2013
@@ -75,6 +75,7 @@ public class TestCopyMapper {
Configuration configuration = new Configuration();
System.setProperty("test.build.data", "target/tmp/build/TEST_COPY_MAPPER/data");
configuration.set("hadoop.log.dir", "target/tmp");
+ configuration.set("dfs.namenode.fs-limits.min-block-size", "0");
LOG.debug("fs.default.name == " + configuration.get("fs.default.name"));
LOG.debug("dfs.http.address == " + configuration.get("dfs.http.address"));
return configuration;
Modified: hadoop/common/branches/YARN-321/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java Wed Oct 30 22:21:59 2013
@@ -46,7 +46,6 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.Trash;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.HftpFileSystem;
import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
Modified: hadoop/common/branches/YARN-321/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/DebugJobProducer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/DebugJobProducer.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/DebugJobProducer.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-tools/hadoop-gridmix/src/test/java/org/apache/hadoop/mapred/gridmix/DebugJobProducer.java Wed Oct 30 22:21:59 2013
@@ -122,11 +122,9 @@ public class DebugJobProducer implements
// Add/remove excess
recs[0] += totalrecs - tot_recs;
bytes[0] += totalbytes - tot_bytes;
- if (LOG.isInfoEnabled()) {
- LOG.info(
- "DIST: " + Arrays.toString(recs) + " " + tot_recs + "/" + totalrecs +
- " " + Arrays.toString(bytes) + " " + tot_bytes + "/" + totalbytes);
- }
+ LOG.info(
+ "DIST: " + Arrays.toString(recs) + " " + tot_recs + "/" + totalrecs +
+ " " + Arrays.toString(bytes) + " " + tot_bytes + "/" + totalbytes);
}
private static final AtomicInteger seq = new AtomicInteger(0);
Modified: hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/IdentifierResolver.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/IdentifierResolver.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/IdentifierResolver.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/IdentifierResolver.java Wed Oct 30 22:21:59 2013
@@ -19,6 +19,7 @@
package org.apache.hadoop.streaming.io;
import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.typedbytes.TypedBytesWritable;
@@ -34,6 +35,7 @@ public class IdentifierResolver {
public static final String TEXT_ID = "text";
public static final String RAW_BYTES_ID = "rawbytes";
public static final String TYPED_BYTES_ID = "typedbytes";
+ public static final String KEY_ONLY_TEXT_ID = "keyonlytext";
private Class<? extends InputWriter> inputWriterClass = null;
private Class<? extends OutputReader> outputReaderClass = null;
@@ -55,6 +57,11 @@ public class IdentifierResolver {
setOutputReaderClass(TypedBytesOutputReader.class);
setOutputKeyClass(TypedBytesWritable.class);
setOutputValueClass(TypedBytesWritable.class);
+ } else if (identifier.equalsIgnoreCase(KEY_ONLY_TEXT_ID)) {
+ setInputWriterClass(KeyOnlyTextInputWriter.class);
+ setOutputReaderClass(KeyOnlyTextOutputReader.class);
+ setOutputKeyClass(Text.class);
+ setOutputValueClass(NullWritable.class);
} else { // assume TEXT_ID
setInputWriterClass(TextInputWriter.class);
setOutputReaderClass(TextOutputReader.class);
Modified: hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextInputWriter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextInputWriter.java?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextInputWriter.java (original)
+++ hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/io/TextInputWriter.java Wed Oct 30 22:21:59 2013
@@ -30,7 +30,7 @@ import org.apache.hadoop.streaming.PipeM
*/
public class TextInputWriter extends InputWriter<Object, Object> {
- private DataOutput clientOut;
+ protected DataOutput clientOut;
private byte[] inputSeparator;
@Override
@@ -53,7 +53,7 @@ public class TextInputWriter extends Inp
}
// Write an object to the output stream using UTF-8 encoding
- private void writeUTF8(Object object) throws IOException {
+ protected void writeUTF8(Object object) throws IOException {
byte[] bval;
int valSize;
if (object instanceof BytesWritable) {
Propchange: hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/test/bin/cat.cmd
------------------------------------------------------------------------------
svn:eol-style = native
Propchange: hadoop/common/branches/YARN-321/hadoop-tools/hadoop-streaming/src/test/bin/xargs_cat.cmd
------------------------------------------------------------------------------
svn:eol-style = native
Modified: hadoop/common/branches/YARN-321/hadoop-tools/hadoop-tools-dist/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-tools/hadoop-tools-dist/pom.xml?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-tools/hadoop-tools-dist/pom.xml (original)
+++ hadoop/common/branches/YARN-321/hadoop-tools/hadoop-tools-dist/pom.xml Wed Oct 30 22:21:59 2013
@@ -77,6 +77,17 @@
<type>pom</type>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-openstack</artifactId>
+ <scope>compile</scope>
+ <version>${project.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-sls</artifactId>
+ <scope>compile</scope>
+ </dependency>
</dependencies>
<build>
Modified: hadoop/common/branches/YARN-321/hadoop-tools/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/hadoop-tools/pom.xml?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/hadoop-tools/pom.xml (original)
+++ hadoop/common/branches/YARN-321/hadoop-tools/pom.xml Wed Oct 30 22:21:59 2013
@@ -40,6 +40,8 @@
<module>hadoop-tools-dist</module>
<module>hadoop-extras</module>
<module>hadoop-pipes</module>
+ <module>hadoop-openstack</module>
+ <module>hadoop-sls</module>
</modules>
<build>
Modified: hadoop/common/branches/YARN-321/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/YARN-321/pom.xml?rev=1537330&r1=1537329&r2=1537330&view=diff
==============================================================================
--- hadoop/common/branches/YARN-321/pom.xml (original)
+++ hadoop/common/branches/YARN-321/pom.xml Wed Oct 30 22:21:59 2013
@@ -107,8 +107,13 @@ xsi:schemaLocation="http://maven.apache.
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.4</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
- <version>1.0</version>
+ <version>1.3.1</version>
<configuration>
<rules>
<requireMavenVersion>
@@ -138,7 +143,7 @@ xsi:schemaLocation="http://maven.apache.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
- <version>1.6</version>
+ <version>1.7</version>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
@@ -444,6 +449,19 @@ xsi:schemaLocation="http://maven.apache.
<generateHtml>${cloverGenHtml}</generateHtml>
<generateXml>${cloverGenXml}</generateXml>
<generateHistorical>${cloverGenHistorical}</generateHistorical>
+ <excludes>
+ <exclude>**/examples/**/*.java</exclude>
+ <exclude>**/hamlet/*.java</exclude>
+ <exclude>**/ha/proto/*.java</exclude>
+ <exclude>**/protocol/proto/*.java</exclude>
+ <exclude>**/compiler/generated/*.java</exclude>
+ <exclude>**/protobuf/*.java</exclude>
+ <exclude>**/v2/proto/*.java</exclude>
+ <exclude>**/yarn/proto/*.java</exclude>
+ <exclude>**/security/proto/*.java</exclude>
+ <exclude>**/tools/proto/*.java</exclude>
+ <exclude>**/hs/proto/*.java</exclude>
+ </excludes>
</configuration>
<executions>
<execution>