You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2013/03/07 03:57:45 UTC
svn commit: r1453669 - in /hadoop/common/branches/HDFS-2802: ./
hadoop-assemblies/src/main/resources/assemblies/ hadoop-dist/
hadoop-project-dist/ hadoop-project/
hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/
hadoop-tools/had...
Author: szetszwo
Date: Thu Mar 7 02:57:40 2013
New Revision: 1453669
URL: http://svn.apache.org/r1453669
Log:
Merge r1449958 through r1453659 from trunk.
Modified:
hadoop/common/branches/HDFS-2802/ (props changed)
hadoop/common/branches/HDFS-2802/BUILDING.txt
hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
hadoop/common/branches/HDFS-2802/hadoop-dist/pom.xml
hadoop/common/branches/HDFS-2802/hadoop-project-dist/pom.xml
hadoop/common/branches/HDFS-2802/hadoop-project/pom.xml
hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java
hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java
hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java
Propchange: hadoop/common/branches/HDFS-2802/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk:r1449958-1453659
Modified: hadoop/common/branches/HDFS-2802/BUILDING.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/BUILDING.txt?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/BUILDING.txt (original)
+++ hadoop/common/branches/HDFS-2802/BUILDING.txt Thu Mar 7 02:57:40 2013
@@ -138,3 +138,70 @@ Create a local staging version of the we
$ mvn clean site; mvn site:stage -DstagingDirectory=/tmp/hadoop-site
----------------------------------------------------------------------------------
+
+Building on Windows
+
+----------------------------------------------------------------------------------
+Requirements:
+
+* Windows System
+* JDK 1.6
+* Maven 3.0
+* Findbugs 1.3.9 (if running findbugs)
+* ProtocolBuffer 2.4.1+ (for MapReduce and HDFS)
+* Unix command-line tools from GnuWin32 or Cygwin: sh, mkdir, rm, cp, tar, gzip
+* Windows SDK or Visual Studio 2010 Professional
+* Internet connection for first build (to fetch all Maven and Hadoop dependencies)
+
+If using Visual Studio, it must be Visual Studio 2010 Professional (not 2012).
+Do not use Visual Studio Express. It does not support compiling for 64-bit,
+which is problematic if running a 64-bit system. The Windows SDK is free to
+download here:
+
+http://www.microsoft.com/en-us/download/details.aspx?id=8279
+
+----------------------------------------------------------------------------------
+Building:
+
+Keep the source code tree in a short path to avoid running into problems related
+to Windows maximum path length limitation. (For example, C:\hdc).
+
+Run builds from a Windows SDK Command Prompt. (Start, All Programs,
+Microsoft Windows SDK v7.1, Windows SDK 7.1 Command Prompt.)
+
+JAVA_HOME must be set, and the path must not contain spaces. If the full path
+would contain spaces, then use the Windows short path instead.
+
+You must set the Platform environment variable to either x64 or Win32 depending
+on whether you're running a 64-bit or 32-bit system. Note that this is
+case-sensitive. It must be "Platform", not "PLATFORM" or "platform".
+Environment variables on Windows are usually case-insensitive, but Maven treats
+them as case-sensitive. Failure to set this environment variable correctly will
+cause msbuild to fail while building the native code in hadoop-common.
+
+set Platform=x64 (when building on a 64-bit system)
+set Platform=Win32 (when building on a 32-bit system)
+
+Several tests require that the user must have the Create Symbolic Links
+privilege.
+
+All Maven goals are the same as described above, with the addition of profile
+-Pnative-win to trigger building Windows native components. The native
+components are required (not optional) on Windows. For example:
+
+ * Run tests : mvn -Pnative-win test
+
+----------------------------------------------------------------------------------
+Building distributions:
+
+Create binary distribution with native code and with documentation:
+
+ $ mvn package -Pdist,native-win,docs -DskipTests -Dtar
+
+Create source distribution:
+
+ $ mvn package -Pnative-win,src -DskipTests
+
+Create source and binary distributions with native code and documentation:
+
+ $ mvn package -Pdist,native-win,docs,src -DskipTests -Dtar
Modified: hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml Thu Mar 7 02:57:40 2013
@@ -26,6 +26,9 @@
<outputDirectory>/bin</outputDirectory>
<excludes>
<exclude>*.sh</exclude>
+ <exclude>*-config.cmd</exclude>
+ <exclude>start-*.cmd</exclude>
+ <exclude>stop-*.cmd</exclude>
</excludes>
<fileMode>0755</fileMode>
</fileSet>
@@ -38,6 +41,7 @@
<outputDirectory>/libexec</outputDirectory>
<includes>
<include>*-config.sh</include>
+ <include>*-config.cmd</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
@@ -46,9 +50,13 @@
<outputDirectory>/sbin</outputDirectory>
<includes>
<include>*.sh</include>
+ <include>*.cmd</include>
</includes>
<excludes>
<exclude>hadoop-config.sh</exclude>
+ <exclude>hadoop.cmd</exclude>
+ <exclude>hdfs.cmd</exclude>
+ <exclude>hadoop-config.cmd</exclude>
</excludes>
<fileMode>0755</fileMode>
</fileSet>
Modified: hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml Thu Mar 7 02:57:40 2013
@@ -33,6 +33,7 @@
<outputDirectory>bin</outputDirectory>
<includes>
<include>yarn</include>
+ <include>yarn.cmd</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
@@ -41,6 +42,7 @@
<outputDirectory>libexec</outputDirectory>
<includes>
<include>yarn-config.sh</include>
+ <include>yarn-config.cmd</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
@@ -52,6 +54,8 @@
<include>yarn-daemons.sh</include>
<include>start-yarn.sh</include>
<include>stop-yarn.sh</include>
+ <include>start-yarn.cmd</include>
+ <include>stop-yarn.cmd</include>
</includes>
<fileMode>0755</fileMode>
</fileSet>
@@ -121,7 +125,7 @@
</includes>
<binaries>
<attachmentClassifier>tests</attachmentClassifier>
- <outputDirectory>share/hadoop/${hadoop.component}</outputDirectory>
+ <outputDirectory>share/hadoop/${hadoop.component}/test</outputDirectory>
<includeDependencies>false</includeDependencies>
<unpack>false</unpack>
</binaries>
Modified: hadoop/common/branches/HDFS-2802/hadoop-dist/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-dist/pom.xml?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-dist/pom.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-dist/pom.xml Thu Mar 7 02:57:40 2013
@@ -107,7 +107,7 @@
fi
}
- ROOT=`cd ${basedir}/..;pwd`
+ ROOT=`cd ../..;pwd`
echo
echo "Current directory `pwd`"
echo
@@ -151,7 +151,8 @@
fi
}
- run tar czf hadoop-${project.version}.tar.gz hadoop-${project.version}
+ run tar cf hadoop-${project.version}.tar hadoop-${project.version}
+ run gzip hadoop-${project.version}.tar
echo
echo "Hadoop dist tar available at: ${project.build.directory}/hadoop-${project.version}.tar.gz"
echo
Modified: hadoop/common/branches/HDFS-2802/hadoop-project-dist/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-project-dist/pom.xml?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-project-dist/pom.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-project-dist/pom.xml Thu Mar 7 02:57:40 2013
@@ -335,13 +335,7 @@
<target>
<!-- Using Unix script to preserve symlinks -->
<echo file="${project.build.directory}/dist-copynativelibs.sh">
-
- which cygpath 2> /dev/null
- if [ $? = 1 ]; then
- BUILD_DIR="${project.build.directory}"
- else
- BUILD_DIR=`cygpath --unix '${project.build.directory}'`
- fi
+ BUILD_DIR="${project.build.directory}"
TAR='tar cf -'
UNTAR='tar xfBp -'
LIB_DIR="${BUILD_DIR}/native/target/usr/local/lib"
@@ -355,6 +349,13 @@
$$TAR *snappy* | (cd $${TARGET_DIR}/; $$UNTAR)
fi
fi
+ BIN_DIR="${BUILD_DIR}/bin"
+ if [ -d $${BIN_DIR} ] ; then
+ TARGET_BIN_DIR="${BUILD_DIR}/${project.artifactId}-${project.version}/bin"
+ mkdir -p $${TARGET_BIN_DIR}
+ cd $${BIN_DIR}
+ $$TAR * | (cd $${TARGET_BIN_DIR}/; $$UNTAR)
+ fi
</echo>
<exec executable="sh" dir="${project.build.directory}" failonerror="true">
<arg line="./dist-copynativelibs.sh"/>
@@ -372,15 +373,8 @@
<target if="tar">
<!-- Using Unix script to preserve symlinks -->
<echo file="${project.build.directory}/dist-maketar.sh">
-
- which cygpath 2> /dev/null
- if [ $? = 1 ]; then
- BUILD_DIR="${project.build.directory}"
- else
- BUILD_DIR=`cygpath --unix '${project.build.directory}'`
- fi
- cd ${BUILD_DIR}
- tar czf ${project.artifactId}-${project.version}.tar.gz ${project.artifactId}-${project.version}
+ cd "${project.build.directory}"
+ tar cf - ${project.artifactId}-${project.version} | gzip > ${project.artifactId}-${project.version}.tar.gz
</echo>
<exec executable="sh" dir="${project.build.directory}" failonerror="true">
<arg line="./dist-maketar.sh"/>
Modified: hadoop/common/branches/HDFS-2802/hadoop-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-project/pom.xml?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-project/pom.xml Thu Mar 7 02:57:40 2013
@@ -391,9 +391,9 @@
</dependency>
<dependency>
- <groupId>org.jboss.netty</groupId>
+ <groupId>io.netty</groupId>
<artifactId>netty</artifactId>
- <version>3.2.4.Final</version>
+ <version>3.5.11.Final</version>
</dependency>
<dependency>
@@ -810,6 +810,8 @@
<forkedProcessTimeoutInSeconds>900</forkedProcessTimeoutInSeconds>
<argLine>-Xmx1024m -XX:+HeapDumpOnOutOfMemoryError</argLine>
<environmentVariables>
+ <!-- HADOOP_HOME required for tests on Windows to find winutils -->
+ <HADOOP_HOME>${basedir}/../../hadoop-common-project/hadoop-common/target</HADOOP_HOME>
<LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${basedir}/../../hadoop-common-project/hadoop-common/target/native/target/usr/local/lib/</LD_LIBRARY_PATH>
<MALLOC_ARENA_MAX>4</MALLOC_ARENA_MAX>
</environmentVariables>
@@ -884,6 +886,28 @@
</properties>
</profile>
<profile>
+ <id>native-win</id>
+ <activation>
+ <os>
+ <family>Windows</family>
+ </os>
+ </activation>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <environmentVariables>
+ <!-- Specify where to look for the native DLL on Windows -->
+ <PATH>${env.PATH};${basedir}/../../hadoop-common-project/hadoop-common/target/bin</PATH>
+ </environmentVariables>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ <profile>
<id>test-patch</id>
<activation>
<activeByDefault>false</activeByDefault>
Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/mapred/TestUniformSizeInputFormat.java Thu Mar 7 02:57:40 2013
@@ -33,8 +33,6 @@ import org.apache.hadoop.tools.CopyListi
import org.apache.hadoop.tools.DistCpOptions;
import org.apache.hadoop.tools.StubContext;
import org.apache.hadoop.security.Credentials;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
@@ -48,9 +46,6 @@ import java.util.Random;
public class TestUniformSizeInputFormat {
- private static final Log LOG
- = LogFactory.getLog(TestUniformSizeInputFormat.class);
-
private static MiniDFSCluster cluster;
private static final int N_FILES = 20;
private static final int SIZEOF_EACH_FILE=1024;
@@ -118,12 +113,9 @@ public class TestUniformSizeInputFormat
List<InputSplit> splits
= uniformSizeInputFormat.getSplits(jobContext);
- List<InputSplit> legacySplits = legacyGetSplits(listFile, nMaps);
-
int sizePerMap = totalFileSize/nMaps;
checkSplits(listFile, splits);
- checkAgainstLegacy(splits, legacySplits);
int doubleCheckedTotalSize = 0;
int previousSplitSize = -1;
@@ -155,57 +147,6 @@ public class TestUniformSizeInputFormat
Assert.assertEquals(totalFileSize, doubleCheckedTotalSize);
}
- // From
- // http://svn.apache.org/repos/asf/hadoop/mapreduce/trunk/src/tools/org/apache/hadoop/tools/DistCp.java
- private List<InputSplit> legacyGetSplits(Path listFile, int numSplits)
- throws IOException {
-
- FileSystem fs = cluster.getFileSystem();
- FileStatus srcst = fs.getFileStatus(listFile);
- Configuration conf = fs.getConf();
-
- ArrayList<InputSplit> splits = new ArrayList<InputSplit>(numSplits);
- FileStatus value = new FileStatus();
- Text key = new Text();
- final long targetsize = totalFileSize / numSplits;
- long pos = 0L;
- long last = 0L;
- long acc = 0L;
- long cbrem = srcst.getLen();
- SequenceFile.Reader sl = null;
-
- LOG.info("Average bytes per map: " + targetsize +
- ", Number of maps: " + numSplits + ", total size: " + totalFileSize);
-
- try {
- sl = new SequenceFile.Reader(conf, SequenceFile.Reader.file(listFile));
- for (; sl.next(key, value); last = sl.getPosition()) {
- // if adding this split would put this split past the target size,
- // cut the last split and put this next file in the next split.
- if (acc + value.getLen() > targetsize && acc != 0) {
- long splitsize = last - pos;
- FileSplit fileSplit = new FileSplit(listFile, pos, splitsize, null);
- LOG.info ("Creating split : " + fileSplit + ", bytes in split: " + splitsize);
- splits.add(fileSplit);
- cbrem -= splitsize;
- pos = last;
- acc = 0L;
- }
- acc += value.getLen();
- }
- }
- finally {
- IOUtils.closeStream(sl);
- }
- if (cbrem != 0) {
- FileSplit fileSplit = new FileSplit(listFile, pos, cbrem, null);
- LOG.info ("Creating split : " + fileSplit + ", bytes in split: " + cbrem);
- splits.add(fileSplit);
- }
-
- return splits;
- }
-
private void checkSplits(Path listFile, List<InputSplit> splits) throws IOException {
long lastEnd = 0;
@@ -233,18 +174,6 @@ public class TestUniformSizeInputFormat
}
}
- private void checkAgainstLegacy(List<InputSplit> splits,
- List<InputSplit> legacySplits)
- throws IOException, InterruptedException {
-
- Assert.assertEquals(legacySplits.size(), splits.size());
- for (int index = 0; index < splits.size(); index++) {
- FileSplit fileSplit = (FileSplit) splits.get(index);
- FileSplit legacyFileSplit = (FileSplit) legacySplits.get(index);
- Assert.assertEquals(fileSplit.getStart(), legacyFileSplit.getStart());
- }
- }
-
@Test
public void testGetSplits() throws Exception {
testGetSplits(9);
Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java Thu Mar 7 02:57:40 2013
@@ -83,6 +83,9 @@ public class JobBuilder {
private Map<ParsedHost, ParsedHost> allHosts =
new HashMap<ParsedHost, ParsedHost>();
+ private org.apache.hadoop.mapreduce.jobhistory.JhCounters EMPTY_COUNTERS =
+ new org.apache.hadoop.mapreduce.jobhistory.JhCounters();
+
/**
* The number of splits a task can have, before we ignore them all.
*/
@@ -459,7 +462,10 @@ public class JobBuilder {
TaskFailed t = (TaskFailed)(event.getDatum());
task.putDiagnosticInfo(t.error.toString());
task.putFailedDueToAttemptId(t.failedDueToAttempt.toString());
- // No counters in TaskFailedEvent
+ org.apache.hadoop.mapreduce.jobhistory.JhCounters counters =
+ ((TaskFailed) event.getDatum()).counters;
+ task.incorporateCounters(
+ counters == null ? EMPTY_COUNTERS : counters);
}
private void processTaskAttemptUnsuccessfulCompletionEvent(
@@ -481,7 +487,10 @@ public class JobBuilder {
}
attempt.setFinishTime(event.getFinishTime());
-
+ org.apache.hadoop.mapreduce.jobhistory.JhCounters counters =
+ ((TaskAttemptUnsuccessfulCompletion) event.getDatum()).counters;
+ attempt.incorporateCounters(
+ counters == null ? EMPTY_COUNTERS : counters);
attempt.arraySetClockSplits(event.getClockSplits());
attempt.arraySetCpuUsages(event.getCpuUsages());
attempt.arraySetVMemKbytes(event.getVMemKbytes());
@@ -489,7 +498,6 @@ public class JobBuilder {
TaskAttemptUnsuccessfulCompletion t =
(TaskAttemptUnsuccessfulCompletion) (event.getDatum());
attempt.putDiagnosticInfo(t.error.toString());
- // No counters in TaskAttemptUnsuccessfulCompletionEvent
}
private void processTaskAttemptStartedEvent(TaskAttemptStartedEvent event) {
Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingTaskLog.java Thu Mar 7 02:57:40 2013
@@ -83,7 +83,7 @@ public class TestStreamingTaskLog {
* (b) hadoop.tasklog.totalLogFileSize
* for the children of java tasks in streaming jobs.
*/
- @Test
+ @Test (timeout = 30000)
public void testStreamingTaskLogWithHadoopCmd() {
try {
final int numSlaves = 1;
@@ -124,8 +124,8 @@ public class TestStreamingTaskLog {
"echo $HADOOP_ROOT_LOGGER $HADOOP_CLIENT_OPTS").getBytes());
in.close();
- Shell.execCommand(new String[]{"chmod", "+x",
- scriptFile.getAbsolutePath()});
+ Shell.execCommand(Shell.getSetPermissionCommand("+x", false,
+ scriptFile.getAbsolutePath()));
return scriptFile;
}
Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestSymLink.java Thu Mar 7 02:57:40 2013
@@ -53,7 +53,7 @@ public class TestSymLink
String cacheString = "This is just the cache string";
StreamJob job;
- @Test
+ @Test (timeout = 60000)
public void testSymLink() throws Exception
{
boolean mayExit = false;