You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-dev@hadoop.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2011/07/29 22:45:44 UTC

Build failed in Jenkins: Hadoop-0.20.204-Build #13

See <https://builds.apache.org/job/Hadoop-0.20.204-Build/13/>

------------------------------------------
[...truncated 3389 lines...]
A         src/examples/org/apache/hadoop/examples/terasort/TeraGen.java
A         src/examples/org/apache/hadoop/examples/terasort/TeraOutputFormat.java
A         src/examples/org/apache/hadoop/examples/terasort/TeraValidate.java
A         src/examples/org/apache/hadoop/examples/terasort/package.html
A         src/examples/org/apache/hadoop/examples/dancing
A         src/examples/org/apache/hadoop/examples/dancing/puzzle1.dta
A         src/examples/org/apache/hadoop/examples/dancing/OneSidedPentomino.java
A         src/examples/org/apache/hadoop/examples/dancing/DancingLinks.java
A         src/examples/org/apache/hadoop/examples/dancing/Pentomino.java
A         src/examples/org/apache/hadoop/examples/dancing/Sudoku.java
A         src/examples/org/apache/hadoop/examples/dancing/DistributedPentomino.java
A         src/examples/org/apache/hadoop/examples/dancing/package.html
A         src/examples/org/apache/hadoop/examples/WordCount.java
A         src/examples/org/apache/hadoop/examples/DBCountPageView.java
A         src/examples/org/apache/hadoop/examples/Sort.java
A         src/examples/org/apache/hadoop/examples/AggregateWordCount.java
A         src/examples/org/apache/hadoop/examples/Grep.java
A         src/packages
A         src/packages/hadoop-create-user.sh
A         src/packages/hadoop-setup-hdfs.sh
A         src/packages/hadoop-setup-single-node.sh
A         src/packages/hadoop-setup-conf.sh
A         src/packages/update-hadoop-env.sh
A         src/packages/deb
A         src/packages/deb/init.d
A         src/packages/deb/init.d/hadoop-tasktracker
A         src/packages/deb/init.d/hadoop-datanode
A         src/packages/deb/init.d/hadoop-jobtracker
A         src/packages/deb/init.d/hadoop-namenode
A         src/packages/deb/hadoop.control
A         src/packages/deb/hadoop.control/control
A         src/packages/deb/hadoop.control/postinst
A         src/packages/deb/hadoop.control/postrm
A         src/packages/deb/hadoop.control/preinst
A         src/packages/deb/hadoop.control/conffile
A         src/packages/deb/hadoop.control/prerm
A         src/packages/rpm
A         src/packages/rpm/init.d
A         src/packages/rpm/init.d/hadoop-tasktracker
A         src/packages/rpm/init.d/hadoop-datanode
A         src/packages/rpm/init.d/hadoop-jobtracker
A         src/packages/rpm/init.d/hadoop-namenode
A         src/packages/rpm/spec
A         src/packages/rpm/spec/hadoop.spec
A         src/packages/templates
A         src/packages/templates/conf
A         src/packages/templates/conf/hdfs-site.xml
A         src/packages/templates/conf/core-site.xml
A         src/packages/templates/conf/hadoop-env.sh
A         src/packages/templates/conf/mapred-site.xml
A         bin
A         bin/stop-jobhistoryserver.sh
AU        bin/start-dfs.sh
AU        bin/hadoop-daemon.sh
A         bin/hadoop-config.sh
A         bin/start-jobhistoryserver.sh
AU        bin/stop-balancer.sh
AU        bin/stop-all.sh
AU        bin/stop-mapred.sh
AU        bin/slaves.sh
AU        bin/hadoop-daemons.sh
AU        bin/rcc
AU        bin/stop-dfs.sh
AU        bin/hadoop
AU        bin/start-balancer.sh
AU        bin/start-all.sh
AU        bin/start-mapred.sh
A         README.txt
A         build.xml
 U        .
At revision 1152369
no revision recorded for http://svn.apache.org/repos/asf/hadoop/nightly in the previous build
no revision recorded for http://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.20-security-204 in the previous build
[Hadoop-0.20.204-Build] $ /bin/bash -xe /tmp/hudson3424346922042454072.sh
+ export JAVA_HOME=/home/hudson/tools/java/latest1.6-64
+ JAVA_HOME=/home/hudson/tools/java/latest1.6-64
+ export ANT_HOME=/home/hudson/tools/ant/apache-ant-1.7.1
+ ANT_HOME=/home/hudson/tools/ant/apache-ant-1.7.1
+ export FORREST_HOME=/home/nigel/tools/forrest/latest
+ FORREST_HOME=/home/nigel/tools/forrest/latest
+ export ECLIPSE_HOME=/home/nigel/tools/eclipse/latest
+ ECLIPSE_HOME=/home/nigel/tools/eclipse/latest
+ export XERCES_HOME=/home/hudson/tools/xerces/c/latest
+ XERCES_HOME=/home/hudson/tools/xerces/c/latest
+ export JAVA5_HOME=/home/hudson/tools/java/latest1.5
+ JAVA5_HOME=/home/hudson/tools/java/latest1.5
+ export FINDBUGS_HOME=/home/hudson/tools/findbugs/latest
+ FINDBUGS_HOME=/home/hudson/tools/findbugs/latest
+ cd <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk>
+ /home/hudson/tools/ant/apache-ant-1.7.1/bin/ant -Dversion=0.20.204 -Dcompile.native=true -Dcompile.c++=true -Dlibhdfs=true -Dlibrecordio=true -Dtest.junit.output.format=xml-Dxercescroot=/home/hudson/tools/xerces/c/latest -Declipse.home=/home/nigel/tools/eclipse/latest -Djava5.home=/home/hudson/tools/java/latest1.5 -Dforrest.home=/home/nigel/tools/forrest/latest -Dfindbugs.home=/home/hudson/tools/findbugs/latest tar test-c++-libhdfs test findbugs
Buildfile: build.xml

clover.setup:

clover.info:
     [echo] 
     [echo]      Clover not found. Code coverage reports disabled.
     [echo]   

clover:

ivy-download:
      [get] Getting: http://repo2.maven.org/maven2/org/apache/ivy/ivy/2.1.0/ivy-2.1.0.jar
      [get] To: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivy-2.1.0.jar>

ivy-init-dirs:
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/ivy>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/ivy/lib>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/ivy/report>

ivy-probe-antlib:

ivy-init-antlib:

ivy-init:
[ivy:configure] :: Ivy 2.0.0-rc2 - 20081028224207 :: http://ant.apache.org/ivy/ ::
:: loading settings :: file = <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivysettings.xml>

ivy-resolve-common:
[ivy:resolve] :: resolving dependencies :: org.apache.hadoop#Hadoop;working@vesta.apache.org
[ivy:resolve] 	confs: [common]
[ivy:resolve] 	found commons-logging#commons-logging;1.0.4 in default
[ivy:resolve] 	found log4j#log4j;1.2.15 in maven2
[ivy:resolve] 	found commons-httpclient#commons-httpclient;3.0.1 in default
[ivy:resolve] 	found commons-codec#commons-codec;1.4 in maven2
[ivy:resolve] 	found commons-cli#commons-cli;1.2 in default
[ivy:resolve] 	found xmlenc#xmlenc;0.52 in default
[ivy:resolve] 	found commons-daemon#commons-daemon;1.0.1 in maven2
[ivy:resolve] 	found net.java.dev.jets3t#jets3t;0.6.1 in maven2
[ivy:resolve] 	found commons-net#commons-net;1.4.1 in default
[ivy:resolve] 	found oro#oro;2.0.8 in default
[ivy:resolve] 	found org.mortbay.jetty#jetty;6.1.26 in maven2
[ivy:resolve] 	found org.mortbay.jetty#jetty-util;6.1.26 in maven2
[ivy:resolve] 	found org.mortbay.jetty#servlet-api;2.5-20081211 in maven2
[ivy:resolve] 	found tomcat#jasper-runtime;5.5.12 in default
[ivy:resolve] 	found tomcat#jasper-compiler;5.5.12 in default
[ivy:resolve] 	found commons-el#commons-el;1.0 in default
[ivy:resolve] 	found org.apache.commons#commons-math;2.1 in maven2
[ivy:resolve] 	found junit#junit;4.5 in maven2
[ivy:resolve] 	found commons-logging#commons-logging-api;1.0.4 in maven2
[ivy:resolve] 	found org.slf4j#slf4j-api;1.4.3 in maven2
[ivy:resolve] 	found org.eclipse.jdt#core;3.1.1 in default
[ivy:resolve] 	found org.slf4j#slf4j-log4j12;1.4.3 in maven2
[ivy:resolve] 	found org.codehaus.jackson#jackson-mapper-asl;1.0.1 in maven2
[ivy:resolve] 	found org.codehaus.jackson#jackson-core-asl;1.0.1 in maven2
[ivy:resolve] 	found org.mockito#mockito-all;1.8.5 in maven2
[ivy:resolve] 	found com.jcraft#jsch;0.1.42 in maven2
[ivy:resolve] 	found org.aspectj#aspectjrt;1.6.5 in maven2
[ivy:resolve] 	found org.aspectj#aspectjtools;1.6.5 in maven2
[ivy:resolve] 	found org.vafer#jdeb;0.8 in maven2
[ivy:resolve] downloading http://repo1.maven.org/maven2/commons-codec/commons-codec/1.4/commons-codec-1.4.jar ...
[ivy:resolve] ...................................... (56kB)
[ivy:resolve] .. (0kB)
[ivy:resolve] 	[SUCCESSFUL ] commons-codec#commons-codec;1.4!commons-codec.jar (892ms)
[ivy:resolve] downloading http://repo1.maven.org/maven2/commons-daemon/commons-daemon/1.0.1/commons-daemon-1.0.1.jar ...
[ivy:resolve] ........ (13kB)
[ivy:resolve] .. (0kB)
[ivy:resolve] 	[SUCCESSFUL ] commons-daemon#commons-daemon;1.0.1!commons-daemon.jar (798ms)
[ivy:resolve] downloading http://repo1.maven.org/maven2/org/mortbay/jetty/jetty/6.1.26/jetty-6.1.26.jar ...
[ivy:resolve] .......................................................................................................................................................................................................................................................... (527kB)
[ivy:resolve] .. (0kB)
[ivy:resolve] 	[SUCCESSFUL ] org.mortbay.jetty#jetty;6.1.26!jetty.jar (1320ms)
[ivy:resolve] downloading http://repo1.maven.org/maven2/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar ...
[ivy:resolve] ................................................................................................................... (172kB)
[ivy:resolve] .. (0kB)
[ivy:resolve] 	[SUCCESSFUL ] org.mortbay.jetty#jetty-util;6.1.26!jetty-util.jar (992ms)
[ivy:resolve] downloading http://repo1.maven.org/maven2/org/mortbay/jetty/servlet-api/2.5-20081211/servlet-api-2.5-20081211.jar ...
[ivy:resolve] ................................................ (130kB)
[ivy:resolve] .. (0kB)
[ivy:resolve] 	[SUCCESSFUL ] org.mortbay.jetty#servlet-api;2.5-20081211!servlet-api.jar (985ms)
[ivy:resolve] downloading http://repo1.maven.org/maven2/org/apache/commons/commons-math/2.1/commons-math-2.1.jar ...
[ivy:resolve] ..................................................................................................................................................................................................................................................... (812kB)
[ivy:resolve] .. (0kB)
[ivy:resolve] 	[SUCCESSFUL ] org.apache.commons#commons-math;2.1!commons-math.jar (1374ms)
[ivy:resolve] downloading http://repo1.maven.org/maven2/org/mockito/mockito-all/1.8.5/mockito-all-1.8.5.jar ...
[ivy:resolve] ......................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................... (1386kB)
[ivy:resolve] .. (0kB)
[ivy:resolve] 	[SUCCESSFUL ] org.mockito#mockito-all;1.8.5!mockito-all.jar (1168ms)
[ivy:resolve] downloading http://repo1.maven.org/maven2/com/jcraft/jsch/0.1.42/jsch-0.1.42.jar ...
[ivy:resolve] ...................................................................................................... (181kB)
[ivy:resolve] .. (0kB)
[ivy:resolve] 	[SUCCESSFUL ] com.jcraft#jsch;0.1.42!jsch.jar (986ms)
[ivy:resolve] downloading http://repo1.maven.org/maven2/org/aspectj/aspectjrt/1.6.5/aspectjrt-1.6.5.jar ...
[ivy:resolve] ............................................................................ (113kB)
[ivy:resolve] .. (0kB)
[ivy:resolve] 	[SUCCESSFUL ] org.aspectj#aspectjrt;1.6.5!aspectjrt.jar (981ms)
[ivy:resolve] downloading http://repo1.maven.org/maven2/org/aspectj/aspectjtools/1.6.5/aspectjtools-1.6.5.jar ...
[ivy:resolve] .......................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................... (8562kB)
[ivy:resolve] .. (0kB)
[ivy:resolve] 	[SUCCESSFUL ] org.aspectj#aspectjtools;1.6.5!aspectjtools.jar (1932ms)
[ivy:resolve] downloading http://repo1.maven.org/maven2/org/vafer/jdeb/0.8/jdeb-0.8.jar ...
[ivy:resolve] ................................................ (215kB)
[ivy:resolve] .. (0kB)
[ivy:resolve] 	[SUCCESSFUL ] org.vafer#jdeb;0.8!jdeb.jar(maven-plugin) (948ms)
[ivy:resolve] :: resolution report :: resolve 25817ms :: artifacts dl 13130ms
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      common      |   30  |   11  |   11  |   0   ||   29  |   11  |
	---------------------------------------------------------------------
[ivy:resolve] 
[ivy:resolve] :: problems summary ::
[ivy:resolve] :::: WARNINGS
[ivy:resolve] 	io problem while parsing ivy file: http://repo1.maven.org/maven2/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.pom: Resetting to invalid mark
[ivy:resolve] 	io problem while parsing ivy file: https://oss.sonatype.org/content/groups/public/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.pom: Resetting to invalid mark
[ivy:resolve] 		module not found: commons-configuration#commons-configuration;1.6
[ivy:resolve] 	==== local: tried
[ivy:resolve] 	  /home/hudson/.ivy2/local/commons-configuration/commons-configuration/1.6/ivys/ivy.xml
[ivy:resolve] 	  -- artifact commons-configuration#commons-configuration;1.6!commons-configuration.jar:
[ivy:resolve] 	  /home/hudson/.ivy2/local/commons-configuration/commons-configuration/1.6/jars/commons-configuration.jar
[ivy:resolve] 	==== maven2: tried
[ivy:resolve] 	  http://repo1.maven.org/maven2/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.pom
[ivy:resolve] 	==== oss-sonatype: tried
[ivy:resolve] 	  https://oss.sonatype.org/content/groups/public/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.pom
[ivy:resolve] 		::::::::::::::::::::::::::::::::::::::::::::::
[ivy:resolve] 		::          UNRESOLVED DEPENDENCIES         ::
[ivy:resolve] 		::::::::::::::::::::::::::::::::::::::::::::::
[ivy:resolve] 		:: commons-configuration#commons-configuration;1.6: not found
[ivy:resolve] 		::::::::::::::::::::::::::::::::::::::::::::::
[ivy:resolve] :::: ERRORS
[ivy:resolve] 	unknown resolver ibiblio
[ivy:resolve] 	unknown resolver chain
[ivy:resolve] 	unknown resolver ibiblio
[ivy:resolve] 	unknown resolver chain
[ivy:resolve] 	unknown resolver ibiblio
[ivy:resolve] 	unknown resolver chain
[ivy:resolve] 	unknown resolver ibiblio
[ivy:resolve] 	unknown resolver chain
[ivy:resolve] 	unknown resolver ibiblio
[ivy:resolve] 	unknown resolver chain
[ivy:resolve] 	unknown resolver ibiblio
[ivy:resolve] 	unknown resolver chain
[ivy:resolve] 	unknown resolver public
[ivy:resolve] 	unknown resolver ibiblio
[ivy:resolve] 	unknown resolver chain
[ivy:resolve] 
[ivy:resolve] :: USE VERBOSE OR DEBUG MESSAGE LEVEL FOR MORE DETAILS

BUILD FAILED
<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build.xml>:2324: impossible to resolve dependencies:
	resolve failed - see output for details

Total time: 41 seconds
Archiving artifacts
Recording test results
Publishing Javadoc
Recording fingerprints
Description set: 


Jenkins build is back to normal : Hadoop-0.20.204-Build #24

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-0.20.204-Build/24/>



Build failed in Jenkins: Hadoop-0.20.204-Build #23

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-0.20.204-Build/23/>

------------------------------------------
Started by user gkesavan
Building remotely on ubuntu2
Cleaning workspace <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/>
hudson.util.IOException2: remote file operation failed: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/> at hudson.remoting.Channel@1b8093e6:ubuntu2
	at hudson.FilePath.act(FilePath.java:754)
	at hudson.FilePath.act(FilePath.java:740)
	at hudson.scm.SubversionSCM.checkout(SubversionSCM.java:684)
	at hudson.scm.SubversionSCM.checkout(SubversionSCM.java:633)
	at hudson.model.AbstractProject.checkout(AbstractProject.java:1181)
	at hudson.model.AbstractBuild$AbstractRunner.checkout(AbstractBuild.java:536)
	at hudson.model.AbstractBuild$AbstractRunner.run(AbstractBuild.java:424)
	at hudson.model.Run.run(Run.java:1374)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:46)
	at hudson.model.ResourceController.execute(ResourceController.java:88)
	at hudson.model.Executor.run(Executor.java:145)
Caused by: java.io.IOException: Unable to delete <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/test/org/apache/hadoop> - files in dir: [<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/test/org/apache/hadoop/mapred]>
	at hudson.Util.deleteFile(Util.java:262)
	at hudson.Util.deleteRecursive(Util.java:305)
	at hudson.Util.deleteContentsRecursive(Util.java:224)
	at hudson.Util.deleteRecursive(Util.java:304)
	at hudson.Util.deleteContentsRecursive(Util.java:224)
	at hudson.Util.deleteRecursive(Util.java:304)
	at hudson.Util.deleteContentsRecursive(Util.java:224)
	at hudson.Util.deleteRecursive(Util.java:304)
	at hudson.Util.deleteContentsRecursive(Util.java:224)
	at hudson.Util.deleteRecursive(Util.java:304)
	at hudson.Util.deleteContentsRecursive(Util.java:224)
	at hudson.Util.deleteRecursive(Util.java:304)
	at hudson.Util.deleteContentsRecursive(Util.java:224)
	at hudson.scm.subversion.CheckoutUpdater$1.perform(CheckoutUpdater.java:67)
	at hudson.scm.subversion.WorkspaceUpdater$UpdateTask.delegateTo(WorkspaceUpdater.java:135)
	at hudson.scm.SubversionSCM$CheckOutTask.perform(SubversionSCM.java:726)
	at hudson.scm.SubversionSCM$CheckOutTask.invoke(SubversionSCM.java:707)
	at hudson.scm.SubversionSCM$CheckOutTask.invoke(SubversionSCM.java:691)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:1979)
	at hudson.remoting.UserRequest.perform(UserRequest.java:118)
	at hudson.remoting.UserRequest.perform(UserRequest.java:48)
	at hudson.remoting.Request$2.run(Request.java:270)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
	at java.util.concurrent.FutureTask.run(FutureTask.java:166)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1110)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:603)
	at java.lang.Thread.run(Thread.java:636)
Archiving artifacts
Recording test results
Publishing Javadoc
Recording fingerprints
Description set: 


Build failed in Jenkins: Hadoop-0.20.204-Build #22

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-0.20.204-Build/22/>

------------------------------------------
Started by user gkesavan
Building remotely on ubuntu2
Cleaning workspace <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/>
hudson.util.IOException2: remote file operation failed: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/> at hudson.remoting.Channel@1b8093e6:ubuntu2
	at hudson.FilePath.act(FilePath.java:754)
	at hudson.FilePath.act(FilePath.java:740)
	at hudson.scm.SubversionSCM.checkout(SubversionSCM.java:684)
	at hudson.scm.SubversionSCM.checkout(SubversionSCM.java:633)
	at hudson.model.AbstractProject.checkout(AbstractProject.java:1181)
	at hudson.model.AbstractBuild$AbstractRunner.checkout(AbstractBuild.java:536)
	at hudson.model.AbstractBuild$AbstractRunner.run(AbstractBuild.java:424)
	at hudson.model.Run.run(Run.java:1374)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:46)
	at hudson.model.ResourceController.execute(ResourceController.java:88)
	at hudson.model.Executor.run(Executor.java:145)
Caused by: java.io.IOException: Unable to delete <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/lib> - files in dir: [<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/lib/hsqldb-1.8.0.10.LICENSE.txt,> <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/lib/kfs-0.2.LICENSE.txt,> <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/lib/kfs-0.2.2.jar,> <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/lib/hsqldb-1.8.0.10.jar]>
	at hudson.Util.deleteFile(Util.java:262)
	at hudson.Util.deleteRecursive(Util.java:305)
	at hudson.Util.deleteContentsRecursive(Util.java:224)
	at hudson.Util.deleteRecursive(Util.java:304)
	at hudson.Util.deleteContentsRecursive(Util.java:224)
	at hudson.scm.subversion.CheckoutUpdater$1.perform(CheckoutUpdater.java:67)
	at hudson.scm.subversion.WorkspaceUpdater$UpdateTask.delegateTo(WorkspaceUpdater.java:135)
	at hudson.scm.SubversionSCM$CheckOutTask.perform(SubversionSCM.java:726)
	at hudson.scm.SubversionSCM$CheckOutTask.invoke(SubversionSCM.java:707)
	at hudson.scm.SubversionSCM$CheckOutTask.invoke(SubversionSCM.java:691)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:1979)
	at hudson.remoting.UserRequest.perform(UserRequest.java:118)
	at hudson.remoting.UserRequest.perform(UserRequest.java:48)
	at hudson.remoting.Request$2.run(Request.java:270)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
	at java.util.concurrent.FutureTask.run(FutureTask.java:166)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1110)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:603)
	at java.lang.Thread.run(Thread.java:636)
Archiving artifacts
Recording test results
Publishing Javadoc
Recording fingerprints
Description set: 


Build failed in Jenkins: Hadoop-0.20.204-Build #21

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-0.20.204-Build/21/>

------------------------------------------
Started by user gkesavan
Building remotely on ubuntu2
Cleaning workspace <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/>
SCM check out aborted
Archiving artifacts
ERROR: Failed to archive artifacts: trunk/build/*.tar.gz
hudson.util.IOException2: hudson.util.IOException2: Failed to extract <https://builds.apache.org/job/Hadoop-0.20.204-Build/21/artifact/trunk/build/*.tar.gz>
	at hudson.FilePath.readFromTar(FilePath.java:1647)
	at hudson.FilePath.copyRecursiveTo(FilePath.java:1565)
	at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:117)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:19)
	at hudson.model.AbstractBuild$AbstractRunner.perform(AbstractBuild.java:662)
	at hudson.model.AbstractBuild$AbstractRunner.performAllBuildSteps(AbstractBuild.java:638)
	at hudson.model.AbstractBuild$AbstractRunner.performAllBuildSteps(AbstractBuild.java:616)
	at hudson.model.Build$RunnerImpl.post2(Build.java:161)
	at hudson.model.AbstractBuild$AbstractRunner.post(AbstractBuild.java:585)
	at hudson.model.Run.run(Run.java:1398)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:46)
	at hudson.model.ResourceController.execute(ResourceController.java:88)
	at hudson.model.Executor.run(Executor.java:145)
Caused by: java.io.IOException
	at hudson.remoting.FastPipedInputStream.read(FastPipedInputStream.java:175)
	at hudson.util.HeadBufferingStream.read(HeadBufferingStream.java:61)
	at java.util.zip.InflaterInputStream.fill(InflaterInputStream.java:221)
	at java.util.zip.InflaterInputStream.read(InflaterInputStream.java:141)
	at java.util.zip.GZIPInputStream.read(GZIPInputStream.java:92)
	at org.apache.tools.tar.TarBuffer.readBlock(TarBuffer.java:257)
	at org.apache.tools.tar.TarBuffer.readRecord(TarBuffer.java:223)
	at hudson.org.apache.tools.tar.TarInputStream.read(TarInputStream.java:345)
	at java.io.FilterInputStream.read(FilterInputStream.java:90)
	at org.apache.commons.io.IOUtils.copyLarge(IOUtils.java:1025)
	at org.apache.commons.io.IOUtils.copy(IOUtils.java:999)
	at hudson.util.IOUtils.copy(IOUtils.java:38)
	at hudson.FilePath.readFromTar(FilePath.java:1639)
	... 12 more

	at hudson.FilePath.copyRecursiveTo(FilePath.java:1572)
	at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:117)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:19)
	at hudson.model.AbstractBuild$AbstractRunner.perform(AbstractBuild.java:662)
	at hudson.model.AbstractBuild$AbstractRunner.performAllBuildSteps(AbstractBuild.java:638)
	at hudson.model.AbstractBuild$AbstractRunner.performAllBuildSteps(AbstractBuild.java:616)
	at hudson.model.Build$RunnerImpl.post2(Build.java:161)
	at hudson.model.AbstractBuild$AbstractRunner.post(AbstractBuild.java:585)
	at hudson.model.Run.run(Run.java:1398)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:46)
	at hudson.model.ResourceController.execute(ResourceController.java:88)
	at hudson.model.Executor.run(Executor.java:145)
Caused by: java.util.concurrent.ExecutionException: java.io.IOException: Pipe is already closed
	at hudson.remoting.Channel$2.adapt(Channel.java:694)
	at hudson.remoting.Channel$2.adapt(Channel.java:689)
	at hudson.remoting.FutureAdapter.get(FutureAdapter.java:59)
	at hudson.FilePath.copyRecursiveTo(FilePath.java:1568)
	... 11 more
Caused by: java.io.IOException: Pipe is already closed
	at hudson.remoting.PipeWindow.checkDeath(PipeWindow.java:83)
	at hudson.remoting.PipeWindow$Real.get(PipeWindow.java:165)
	at hudson.remoting.ProxyOutputStream._write(ProxyOutputStream.java:118)
	at hudson.remoting.ProxyOutputStream.write(ProxyOutputStream.java:103)
	at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:82)
	at java.io.BufferedOutputStream.write(BufferedOutputStream.java:126)
	at java.util.zip.DeflaterOutputStream.deflate(DeflaterOutputStream.java:178)
	at java.util.zip.DeflaterOutputStream.write(DeflaterOutputStream.java:135)
	at java.util.zip.GZIPOutputStream.write(GZIPOutputStream.java:89)
	at java.io.BufferedOutputStream.write(BufferedOutputStream.java:122)
	at org.apache.tools.tar.TarBuffer.writeBlock(TarBuffer.java:410)
	at org.apache.tools.tar.TarBuffer.writeRecord(TarBuffer.java:351)
	at hudson.org.apache.tools.tar.TarOutputStream.writeEOFRecord(TarOutputStream.java:356)
	at hudson.org.apache.tools.tar.TarOutputStream.finish(TarOutputStream.java:137)
	at hudson.org.apache.tools.tar.TarOutputStream.close(TarOutputStream.java:149)
	at hudson.util.io.TarArchiver.close(TarArchiver.java:119)
	at hudson.FilePath.writeToTar(FilePath.java:1619)
	at hudson.FilePath.access$900(FilePath.java:164)
	at hudson.FilePath$33.invoke(FilePath.java:1558)
	at hudson.FilePath$33.invoke(FilePath.java:1555)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:1979)
	at hudson.remoting.UserRequest.perform(UserRequest.java:118)
	at hudson.remoting.UserRequest.perform(UserRequest.java:48)
	at hudson.remoting.Request$2.run(Request.java:270)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
	at java.util.concurrent.FutureTask.run(FutureTask.java:166)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1110)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:603)
	at java.lang.Thread.run(Thread.java:636)
Caused by: java.io.IOException: Pipe is already closed
	at hudson.remoting.FastPipedOutputStream.write(FastPipedOutputStream.java:147)
	at hudson.remoting.FastPipedOutputStream.write(FastPipedOutputStream.java:131)
	at hudson.remoting.ProxyOutputStream$Chunk$1.run(ProxyOutputStream.java:185)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:441)
	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:303)
	at java.util.concurrent.FutureTask.run(FutureTask.java:138)
	at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:886)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:908)
	at java.lang.Thread.run(Thread.java:662)
Caused by: hudson.remoting.FastPipedInputStream$ClosedBy: The pipe was closed at...
	at hudson.remoting.FastPipedInputStream.close(FastPipedInputStream.java:112)
	at java.io.FilterInputStream.close(FilterInputStream.java:155)
	at java.util.zip.InflaterInputStream.close(InflaterInputStream.java:210)
	at java.util.zip.GZIPInputStream.close(GZIPInputStream.java:113)
	at org.apache.tools.tar.TarBuffer.close(TarBuffer.java:456)
	at hudson.org.apache.tools.tar.TarInputStream.close(TarInputStream.java:110)
	at hudson.FilePath.readFromTar(FilePath.java:1649)
	at hudson.FilePath.copyRecursiveTo(FilePath.java:1565)
	at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:117)
	at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:19)
	at hudson.model.AbstractBuild$AbstractRunner.perform(AbstractBuild.java:662)
	at hudson.model.AbstractBuild$AbstractRunner.performAllBuildSteps(AbstractBuild.java:638)
	at hudson.model.AbstractBuild$AbstractRunner.performAllBuildSteps(AbstractBuild.java:616)
	at hudson.model.Build$RunnerImpl.post2(Build.java:161)
	at hudson.model.AbstractBuild$AbstractRunner.post(AbstractBuild.java:585)
	at hudson.model.Run.run(Run.java:1398)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:46)
	at hudson.model.ResourceController.execute(ResourceController.java:88)
	at hudson.model.Executor.run(Executor.java:145)
Recording test results
Publishing Javadoc
Recording fingerprints
Description set: 


Build failed in Jenkins: Hadoop-0.20.204-Build #20

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-0.20.204-Build/20/>

------------------------------------------
[...truncated 12274 lines...]
     [exec] checking for pthread_create in -lpthread... yes
     [exec] checking for HMAC_Init in -lssl... yes
     [exec] checking for g++... g++
     [exec] checking whether we are using the GNU C++ compiler... yes
     [exec] checking whether g++ accepts -g... yes
     [exec] checking dependency style of g++... gcc3
     [exec] checking for a BSD-compatible install... /usr/bin/install -c
     [exec] checking build system type... x86_64-unknown-linux-gnu
     [exec] checking host system type... x86_64-unknown-linux-gnu
     [exec] checking for a sed that does not truncate output... /bin/sed
     [exec] checking for ld used by gcc... /usr/bin/ld
     [exec] checking if the linker (/usr/bin/ld) is GNU ld... yes
     [exec] checking for /usr/bin/ld option to reload object files... -r
     [exec] checking for BSD-compatible nm... /usr/bin/nm -B
     [exec] checking whether ln -s works... yes
     [exec] checking how to recognise dependent libraries... pass_all
     [exec] checking dlfcn.h usability... yes
     [exec] checking dlfcn.h presence... yes
     [exec] checking for dlfcn.h... yes
     [exec] checking how to run the C++ preprocessor... g++ -E
     [exec] checking for g77... no
     [exec] checking for xlf... no
     [exec] checking for f77... no
     [exec] checking for frt... no
     [exec] checking for pgf77... no
     [exec] checking for cf77... no
     [exec] checking for fort77... no
     [exec] checking for fl32... no
     [exec] checking for af77... no
     [exec] checking for xlf90... no
     [exec] checking for f90... no
     [exec] checking for pgf90... no
     [exec] checking for pghpf... no
     [exec] checking for epcf90... no
     [exec] checking for gfortran... no
     [exec] checking for g95... no
     [exec] checking for xlf95... no
     [exec] checking for f95... no
     [exec] checking for fort... no
     [exec] checking for ifort... no
     [exec] checking for ifc... no
     [exec] checking for efc... no
     [exec] checking for pgf95... no
     [exec] checking for lf95... no
     [exec] checking for ftn... no
     [exec] checking whether we are using the GNU Fortran 77 compiler... no
     [exec] checking whether  accepts -g... no
     [exec] checking the maximum length of command line arguments... 32768
     [exec] checking command to parse /usr/bin/nm -B output from gcc object... ok
     [exec] checking for objdir... .libs
     [exec] checking for ar... ar
     [exec] checking for ranlib... ranlib
     [exec] checking for strip... strip
     [exec] checking if gcc static flag  works... yes
     [exec] checking if gcc supports -fno-rtti -fno-exceptions... no
     [exec] checking for gcc option to produce PIC... -fPIC
     [exec] checking if gcc PIC flag -fPIC works... yes
     [exec] checking if gcc supports -c -o file.o... yes
     [exec] checking whether the gcc linker (/usr/bin/ld -m elf_x86_64) supports shared libraries... yes
     [exec] checking whether -lc should be explicitly linked in... no
     [exec] checking dynamic linker characteristics... GNU/Linux ld.so
     [exec] checking how to hardcode library paths into programs... immediate
     [exec] checking whether stripping libraries is possible... yes
     [exec] checking if libtool supports shared libraries... yes
     [exec] checking whether to build shared libraries... yes
     [exec] checking whether to build static libraries... yes
     [exec] configure: creating libtool
     [exec] appending configuration tag "CXX" to libtool
     [exec] checking for ld used by g++... /usr/bin/ld -m elf_x86_64
     [exec] checking if the linker (/usr/bin/ld -m elf_x86_64) is GNU ld... yes
     [exec] checking whether the g++ linker (/usr/bin/ld -m elf_x86_64) supports shared libraries... yes
     [exec] checking for g++ option to produce PIC... -fPIC
     [exec] checking if g++ PIC flag -fPIC works... yes
     [exec] checking if g++ supports -c -o file.o... yes
     [exec] checking whether the g++ linker (/usr/bin/ld -m elf_x86_64) supports shared libraries... yes
     [exec] checking dynamic linker characteristics... GNU/Linux ld.so
     [exec] checking how to hardcode library paths into programs... immediate
     [exec] checking whether stripping libraries is possible... yes
     [exec] appending configuration tag "F77" to libtool
     [exec] checking for unistd.h... (cached) yes
     [exec] checking for stdbool.h that conforms to C99... yes
     [exec] checking for _Bool... no
     [exec] checking for an ANSI C-conforming const... yes
     [exec] checking for off_t... yes
     [exec] checking for size_t... yes
     [exec] checking whether strerror_r is declared... yes
     [exec] checking for strerror_r... yes
     [exec] checking whether strerror_r returns char *... yes
     [exec] checking for mkdir... yes
     [exec] checking for uname... yes
     [exec] checking for shutdown in -lsocket... no
     [exec] checking for xdr_float in -lnsl... yes
     [exec] configure: creating ./config.status
     [exec] config.status: creating Makefile
     [exec] config.status: creating impl/config.h
     [exec] config.status: impl/config.h is unchanged
     [exec] config.status: executing depfiles commands

compile-c++-examples-pipes:
     [exec] make[1]: Entering directory `<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-build/Linux-amd64-64/examples/pipes'>
     [exec] test -z "<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin"> || mkdir -p -- "<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin">
     [exec]   /bin/bash ./libtool --mode=install /usr/bin/install -c 'wordcount-simple' '<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-simple'>
     [exec] /usr/bin/install -c wordcount-simple <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-simple>
     [exec]   /bin/bash ./libtool --mode=install /usr/bin/install -c 'wordcount-part' '<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-part'>
     [exec] /usr/bin/install -c wordcount-part <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-part>
     [exec]   /bin/bash ./libtool --mode=install /usr/bin/install -c 'wordcount-nopipe' '<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-nopipe'>
     [exec] /usr/bin/install -c wordcount-nopipe <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-nopipe>
     [exec]   /bin/bash ./libtool --mode=install /usr/bin/install -c 'pipes-sort' '<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/pipes-sort'>
     [exec] /usr/bin/install -c pipes-sort <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/pipes-sort>
     [exec] make[1]: Nothing to be done for `install-data-am'.
     [exec] make[1]: Leaving directory `<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-build/Linux-amd64-64/examples/pipes'>

compile-c++-examples:

compile-examples:

generate-test-records:

compile-core-test:
    [javac] Compiling 7 source files to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/classes>
    [javac] Note: Some input files use unchecked or unsafe operations.
    [javac] Note: Recompile with -Xlint:unchecked for details.
    [javac] Compiling 1 source file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/classes>
    [javac] Compiling 7 source files to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/testjar>
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
   [delete] Deleting: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/testjar/testjob.jar>
      [jar] Building jar: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/testjar/testjob.jar>
    [javac] Compiling 1 source file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/testshell>
    [javac] Note: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/test/testshell/ExternalMapReduce.java> uses or overrides a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
   [delete] Deleting: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/testshell/testshell.jar>
      [jar] Building jar: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/testshell/testshell.jar>
   [delete] Deleting directory <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
   [delete] Deleting directory <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/debug>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/debug>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/debug>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>

test-contrib:

test:
Trying to override old definition of task macro_tar

check-contrib:

init:
     [echo] contrib: hdfsproxy

init-contrib:

ivy-download:
      [get] Getting: http://repo2.maven.org/maven2/org/apache/ivy/ivy/2.1.0/ivy-2.1.0.jar
      [get] To: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivy-2.1.0.jar>
      [get] Not modified - so not downloaded

ivy-probe-antlib:

ivy-init-antlib:

ivy-init:

ivy-resolve-common:
[ivy:resolve] :: loading settings :: file = <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivysettings.xml>
[ivy:resolve] :: resolving dependencies :: org.apache.hadoop#hdfsproxy;working@vesta.apache.org
[ivy:resolve] 	confs: [common]
[ivy:resolve] 	found commons-httpclient#commons-httpclient;3.0.1 in default
[ivy:resolve] 	found commons-logging#commons-logging;1.0.4 in default
[ivy:resolve] 	found commons-cli#commons-cli;1.2 in default
[ivy:resolve] 	found log4j#log4j;1.2.15 in maven2
[ivy:resolve] 	found commons-logging#commons-logging-api;1.0.4 in maven2
[ivy:resolve] 	found junit#junit;4.5 in maven2
[ivy:resolve] 	found org.slf4j#slf4j-api;1.4.3 in maven2
[ivy:resolve] 	found org.slf4j#slf4j-log4j12;1.4.3 in maven2
[ivy:resolve] 	found xmlenc#xmlenc;0.52 in default
[ivy:resolve] 	found org.mortbay.jetty#jetty;6.1.26 in maven2
[ivy:resolve] 	found org.mortbay.jetty#jetty-util;6.1.26 in maven2
[ivy:resolve] 	found org.mortbay.jetty#servlet-api;2.5-20081211 in maven2
[ivy:resolve] 	found org.eclipse.jdt#core;3.1.1 in default
[ivy:resolve] 	found org.codehaus.jackson#jackson-mapper-asl;1.0.1 in maven2
[ivy:resolve] 	found org.codehaus.jackson#jackson-core-asl;1.0.1 in maven2
[ivy:resolve] 	found commons-configuration#commons-configuration;1.6 in maven2
[ivy:resolve] 	found commons-collections#commons-collections;3.2.1 in maven2
[ivy:resolve] 	found commons-lang#commons-lang;2.4 in default
[ivy:resolve] 	found commons-logging#commons-logging;1.1.1 in default
[ivy:resolve] 	found commons-digester#commons-digester;1.8 in maven2
[ivy:resolve] 	found commons-beanutils#commons-beanutils;1.7.0 in maven2
[ivy:resolve] 	found commons-beanutils#commons-beanutils-core;1.8.0 in maven2
[ivy:resolve] 	found org.apache.commons#commons-math;2.1 in maven2
[ivy:resolve] :: resolution report :: resolve 152ms :: artifacts dl 7ms
[ivy:resolve] 	:: evicted modules:
[ivy:resolve] 	commons-logging#commons-logging;1.0.4 by [commons-logging#commons-logging;1.1.1] in [common]
[ivy:resolve] 	commons-logging#commons-logging;1.0.3 by [commons-logging#commons-logging;1.1.1] in [common]
[ivy:resolve] 	commons-logging#commons-logging;1.1 by [commons-logging#commons-logging;1.1.1] in [common]
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      common      |   25  |   0   |   0   |   3   ||   22  |   0   |
	---------------------------------------------------------------------

ivy-retrieve-common:
[ivy:retrieve] :: retrieving :: org.apache.hadoop#hdfsproxy [sync]
[ivy:retrieve] 	confs: [common]
[ivy:retrieve] 	0 artifacts copied, 22 already retrieved (0kB/5ms)
[ivy:cachepath] :: loading settings :: file = <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivysettings.xml>

compile:
     [echo] contrib: hdfsproxy

compile-examples:

compile-test:
     [echo] contrib: hdfsproxy
    [javac] Compiling 5 source files to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/hdfsproxy/test>

test-junit:
     [copy] Copying 11 files to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/contrib/hdfsproxy/src/test/resources/proxy-config>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/contrib/hdfsproxy/src/test/resources>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/contrib/hdfsproxy/src/test/resources>
    [junit] Running org.apache.hadoop.hdfsproxy.TestHdfsProxy
    [junit] Tests run: 1, Failures: 0, Errors: 1, Time elapsed: 6.742 sec
    [junit] Test org.apache.hadoop.hdfsproxy.TestHdfsProxy FAILED

BUILD FAILED
<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build.xml>:1114: The following error occurred while executing this line:
<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build.xml>:1103: The following error occurred while executing this line:
<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/contrib/build.xml>:51: The following error occurred while executing this line:
<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/contrib/hdfsproxy/build.xml>:278: Tests failed!

Total time: 250 minutes 25 seconds
Archiving artifacts
Recording test results
Publishing Javadoc
Recording fingerprints
Description set: 


Build failed in Jenkins: Hadoop-0.20.204-Build #19

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-0.20.204-Build/19/>

------------------------------------------
[...truncated 7179 lines...]
     [exec] aclocal.m4:3644: _LT_AC_LANG_F77_CONFIG is expanded from...
     [exec] aclocal.m4:3643: AC_LIBTOOL_LANG_F77_CONFIG is expanded from...
     [exec] configure.ac:36: warning: AC_CACHE_VAL(lt_prog_compiler_pic_works_GCJ, ...): suspicious cache-id, must contain _cv_ to be cached
     [exec] aclocal.m4:3744: _LT_AC_LANG_GCJ_CONFIG is expanded from...
     [exec] aclocal.m4:3743: AC_LIBTOOL_LANG_GCJ_CONFIG is expanded from...
     [exec] /bin/bash ./config.status --recheck
     [exec] running CONFIG_SHELL=/bin/bash /bin/bash <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes/configure> --prefix=<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64> --with-hadoop-utils=<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64> --with-hadoop-pipes=<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64> --no-create --no-recursion
     [exec] checking for a BSD-compatible install... /usr/bin/install -c
     [exec] checking whether build environment is sane... yes
     [exec] checking for gawk... no
     [exec] checking for mawk... mawk
     [exec] checking whether make sets $(MAKE)... yes
     [exec] checking for style of include used by make... GNU
     [exec] checking for gcc... gcc
     [exec] checking whether the C compiler works... yes
     [exec] checking for C compiler default output file name... a.out
     [exec] checking for suffix of executables... 
     [exec] checking whether we are cross compiling... no
     [exec] checking for suffix of object files... o
     [exec] checking whether we are using the GNU C compiler... yes
     [exec] checking whether gcc accepts -g... yes
     [exec] checking for gcc option to accept ISO C89... none needed
     [exec] checking dependency style of gcc... gcc3
     [exec] checking how to run the C preprocessor... gcc -E
     [exec] checking for grep that handles long lines and -e... /bin/grep
     [exec] checking for egrep... /bin/grep -E
     [exec] checking for ANSI C header files... yes
     [exec] checking for sys/types.h... yes
     [exec] checking for sys/stat.h... yes
     [exec] checking for stdlib.h... yes
     [exec] checking for string.h... yes
     [exec] checking for memory.h... yes
     [exec] checking for strings.h... yes
     [exec] checking for inttypes.h... yes
     [exec] checking for stdint.h... yes
     [exec] checking for unistd.h... yes
     [exec] checking minix/config.h usability... no
     [exec] checking minix/config.h presence... no
     [exec] checking for minix/config.h... no
     [exec] checking whether it is safe to define __EXTENSIONS__... yes
     [exec] checking for special C compiler options needed for large files... no
     [exec] checking for _FILE_OFFSET_BITS value needed for large files... no
     [exec] checking pthread.h usability... yes
     [exec] checking pthread.h presence... yes
     [exec] checking for pthread.h... yes
     [exec] checking for pthread_create in -lpthread... yes
     [exec] checking for HMAC_Init in -lssl... yes
     [exec] checking for g++... g++
     [exec] checking whether we are using the GNU C++ compiler... yes
     [exec] checking whether g++ accepts -g... yes
     [exec] checking dependency style of g++... gcc3
     [exec] checking build system type... x86_64-unknown-linux-gnu
     [exec] checking host system type... x86_64-unknown-linux-gnu
     [exec] checking for a sed that does not truncate output... /bin/sed
     [exec] checking for ld used by gcc... /usr/bin/ld
     [exec] checking if the linker (/usr/bin/ld) is GNU ld... yes
     [exec] checking for /usr/bin/ld option to reload object files... -r
     [exec] checking for BSD-compatible nm... /usr/bin/nm -B
     [exec] checking whether ln -s works... yes
     [exec] checking how to recognise dependent libraries... pass_all
     [exec] checking dlfcn.h usability... yes
     [exec] checking dlfcn.h presence... yes
     [exec] checking for dlfcn.h... yes
     [exec] checking how to run the C++ preprocessor... g++ -E
     [exec] checking for g77... no
     [exec] checking for xlf... no
     [exec] checking for f77... no
     [exec] checking for frt... no
     [exec] checking for pgf77... no
     [exec] checking for cf77... no
     [exec] checking for fort77... no
     [exec] checking for fl32... no
     [exec] checking for af77... no
     [exec] checking for xlf90... no
     [exec] checking for f90... no
     [exec] checking for pgf90... no
     [exec] checking for pghpf... no
     [exec] checking for epcf90... no
     [exec] checking for gfortran... no
     [exec] checking for g95... no
     [exec] checking for xlf95... no
     [exec] checking for f95... no
     [exec] checking for fort... no
     [exec] checking for ifort... no
     [exec] checking for ifc... no
     [exec] checking for efc... no
     [exec] checking for pgf95... no
     [exec] checking for lf95... no
     [exec] checking for ftn... no
     [exec] checking whether we are using the GNU Fortran 77 compiler... no
     [exec] checking whether  accepts -g... no
     [exec] checking the maximum length of command line arguments... 32768
     [exec] checking command to parse /usr/bin/nm -B output from gcc object... ok
     [exec] checking for objdir... .libs
     [exec] checking for ar... ar
     [exec] checking for ranlib... ranlib
     [exec] checking for strip... strip
     [exec] checking if gcc static flag  works... yes
     [exec] checking if gcc supports -fno-rtti -fno-exceptions... no
     [exec] checking for gcc option to produce PIC... -fPIC
     [exec] checking if gcc PIC flag -fPIC works... yes
     [exec] checking if gcc supports -c -o file.o... yes
     [exec] checking whether the gcc linker (/usr/bin/ld -m elf_x86_64) supports shared libraries... yes
     [exec] checking whether -lc should be explicitly linked in... no
     [exec] checking dynamic linker characteristics... GNU/Linux ld.so
     [exec] checking how to hardcode library paths into programs... immediate
     [exec] checking whether stripping libraries is possible... yes
     [exec] checking if libtool supports shared libraries... yes
     [exec] checking whether to build shared libraries... yes
     [exec] checking whether to build static libraries... yes
     [exec] configure: creating libtool
     [exec] appending configuration tag "CXX" to libtool
     [exec] checking for ld used by g++... /usr/bin/ld -m elf_x86_64
     [exec] checking if the linker (/usr/bin/ld -m elf_x86_64) is GNU ld... yes
     [exec] checking whether the g++ linker (/usr/bin/ld -m elf_x86_64) supports shared libraries... yes
     [exec] checking for g++ option to produce PIC... -fPIC
     [exec] checking if g++ PIC flag -fPIC works... yes
     [exec] checking if g++ supports -c -o file.o... yes
     [exec] checking whether the g++ linker (/usr/bin/ld -m elf_x86_64) supports shared libraries... yes
     [exec] checking dynamic linker characteristics... GNU/Linux ld.so
     [exec] checking how to hardcode library paths into programs... immediate
     [exec] checking whether stripping libraries is possible... yes
     [exec] appending configuration tag "F77" to libtool
     [exec] checking for unistd.h... (cached) yes
     [exec] checking for stdbool.h that conforms to C99... yes
     [exec] checking for _Bool... no
     [exec] checking for an ANSI C-conforming const... yes
     [exec] checking for off_t... yes
     [exec] checking for size_t... yes
     [exec] checking whether strerror_r is declared... yes
     [exec] checking for strerror_r... yes
     [exec] checking whether strerror_r returns char *... yes
     [exec] checking for mkdir... yes
     [exec] checking for uname... yes
     [exec] checking for shutdown in -lsocket... no
     [exec] checking for xdr_float in -lnsl... yes
     [exec] configure: creating ./config.status
     [exec]  /bin/bash ./config.status
     [exec] config.status: creating Makefile
     [exec] config.status: creating impl/config.h
     [exec] config.status: impl/config.h is unchanged
     [exec] config.status: executing depfiles commands
     [exec] depbase=`echo impl/wordcount-simple.o | sed 's|[^/]*$|.deps/&|;s|\.o$||'`; \
     [exec] 	if g++ -DHAVE_CONFIG_H -I. -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes> -I./impl    -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -MT impl/wordcount-simple.o -MD -MP -MF "$depbase.Tpo" -c -o impl/wordcount-simple.o <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes/impl/wordcount-simple.cc;> \
     [exec] 	then mv -f "$depbase.Tpo" "$depbase.Po"; else rm -f "$depbase.Tpo"; exit 1; fi
     [exec] /bin/bash ./libtool --mode=link --tag=CXX g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2   -o wordcount-simple  impl/wordcount-simple.o -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread 
     [exec] mkdir .libs
     [exec] g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -o wordcount-simple impl/wordcount-simple.o  -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread
     [exec] depbase=`echo impl/wordcount-part.o | sed 's|[^/]*$|.deps/&|;s|\.o$||'`; \
     [exec] 	if g++ -DHAVE_CONFIG_H -I. -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes> -I./impl    -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -MT impl/wordcount-part.o -MD -MP -MF "$depbase.Tpo" -c -o impl/wordcount-part.o <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes/impl/wordcount-part.cc;> \
     [exec] 	then mv -f "$depbase.Tpo" "$depbase.Po"; else rm -f "$depbase.Tpo"; exit 1; fi
     [exec] /bin/bash ./libtool --mode=link --tag=CXX g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2   -o wordcount-part  impl/wordcount-part.o -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread 
     [exec] g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -o wordcount-part impl/wordcount-part.o  -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread
     [exec] depbase=`echo impl/wordcount-nopipe.o | sed 's|[^/]*$|.deps/&|;s|\.o$||'`; \
     [exec] 	if g++ -DHAVE_CONFIG_H -I. -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes> -I./impl    -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -MT impl/wordcount-nopipe.o -MD -MP -MF "$depbase.Tpo" -c -o impl/wordcount-nopipe.o <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes/impl/wordcount-nopipe.cc;> \
     [exec] 	then mv -f "$depbase.Tpo" "$depbase.Po"; else rm -f "$depbase.Tpo"; exit 1; fi
     [exec] /bin/bash ./libtool --mode=link --tag=CXX g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2   -o wordcount-nopipe  impl/wordcount-nopipe.o -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread 
     [exec] g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -o wordcount-nopipe impl/wordcount-nopipe.o  -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread
     [exec] depbase=`echo impl/sort.o | sed 's|[^/]*$|.deps/&|;s|\.o$||'`; \
     [exec] 	if g++ -DHAVE_CONFIG_H -I. -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes> -I./impl    -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -MT impl/sort.o -MD -MP -MF "$depbase.Tpo" -c -o impl/sort.o <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes/impl/sort.cc;> \
     [exec] 	then mv -f "$depbase.Tpo" "$depbase.Po"; else rm -f "$depbase.Tpo"; exit 1; fi
     [exec] /bin/bash ./libtool --mode=link --tag=CXX g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2   -o pipes-sort  impl/sort.o -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread 
     [exec] g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -o pipes-sort impl/sort.o  -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread
     [exec] make[1]: Entering directory `<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-build/Linux-amd64-64/examples/pipes'>
     [exec] test -z "<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin"> || mkdir -p -- "<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin">
     [exec]   /bin/bash ./libtool --mode=install /usr/bin/install -c 'wordcount-simple' '<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-simple'>
     [exec] /usr/bin/install -c wordcount-simple <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-simple>
     [exec]   /bin/bash ./libtool --mode=install /usr/bin/install -c 'wordcount-part' '<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-part'>
     [exec] /usr/bin/install -c wordcount-part <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-part>
     [exec]   /bin/bash ./libtool --mode=install /usr/bin/install -c 'wordcount-nopipe' '<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-nopipe'>
     [exec] /usr/bin/install -c wordcount-nopipe <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-nopipe>
     [exec]   /bin/bash ./libtool --mode=install /usr/bin/install -c 'pipes-sort' '<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/pipes-sort'>
     [exec] /usr/bin/install -c pipes-sort <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/pipes-sort>
     [exec] make[1]: Nothing to be done for `install-data-am'.
     [exec] make[1]: Leaving directory `<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-build/Linux-amd64-64/examples/pipes'>

compile-c++-examples:

compile-examples:
    [javac] Compiling 24 source files to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/examples>
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.

examples:
      [jar] Building jar: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/hadoop-examples-0.20.204.jar>

generate-test-records:

compile-core-test:
    [javac] Compiling 7 source files to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/classes>
    [javac] Note: Some input files use unchecked or unsafe operations.
    [javac] Note: Recompile with -Xlint:unchecked for details.
    [javac] Compiling 496 source files to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/classes>
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
    [javac] Compiling 7 source files to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/testjar>
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
      [jar] Building jar: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/testjar/testjob.jar>
    [javac] Compiling 1 source file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/testshell>
    [javac] Note: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/test/testshell/ExternalMapReduce.java> uses or overrides a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
      [jar] Building jar: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/testshell/testshell.jar>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/debug>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/debug>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>

jar-test:
      [jar] Building jar: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/hadoop-test-0.20.204.jar>

ant-tasks:
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/ant/org/apache/hadoop/ant>
      [jar] Building jar: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/hadoop-ant-0.20.204.jar>

compile-librecordio:
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/librecordio>
     [exec] g++ -g3 -O0 -Wall -c -I/include -o <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/librecordio/recordio.o> recordio.cc
     [exec] In file included from recordio.cc:22:
     [exec] xmlarchive.hh:22:41: error: xercesc/parsers/SAXParser.hpp: No such file or directory
     [exec] xmlarchive.hh:23:42: error: xercesc/util/PlatformUtils.hpp: No such file or directory
     [exec] xmlarchive.hh:24:43: error: xercesc/util/BinInputStream.hpp: No such file or directory
     [exec] xmlarchive.hh:25:39: error: xercesc/sax/HandlerBase.hpp: No such file or directory
     [exec] xmlarchive.hh:26:39: error: xercesc/sax/InputSource.hpp: No such file or directory
     [exec] In file included from recordio.cc:22:
     [exec] xmlarchive.hh:31: error: expected constructor, destructor, or type conversion before 'namespace'
     [exec] make: *** [<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/librecordio/recordio.o]> Error 1

BUILD FAILED
<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build.xml>:1878: exec returned: 2

Total time: 4 minutes 1 second
Archiving artifacts
Recording test results
Publishing Javadoc
Recording fingerprints
Description set: 


Build failed in Jenkins: Hadoop-0.20.204-Build #18

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-0.20.204-Build/18/>

------------------------------------------
[...truncated 7218 lines...]
  [javadoc] Constructing Javadoc information...
  [javadoc] JDiff: doclet started ...
  [javadoc] Error: file '<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/lib/jdiff/hadoop_0.20.9.xml'> does not exist for the old API

create-c++-examples-pipes-makefile:
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-build/Linux-amd64-64/examples/pipes>
     [exec] checking for a BSD-compatible install... /usr/bin/install -c
     [exec] checking whether build environment is sane... yes
     [exec] checking for gawk... no
     [exec] checking for mawk... mawk
     [exec] checking whether make sets $(MAKE)... yes
     [exec] checking for style of include used by make... GNU
     [exec] checking for gcc... gcc
     [exec] checking for C compiler default output file name... a.out
     [exec] checking whether the C compiler works... yes
     [exec] checking whether we are cross compiling... no
     [exec] checking for suffix of executables... 
     [exec] checking for suffix of object files... o
     [exec] checking whether we are using the GNU C compiler... yes
     [exec] checking whether gcc accepts -g... yes
     [exec] checking for gcc option to accept ISO C89... none needed
     [exec] checking dependency style of gcc... gcc3
     [exec] checking how to run the C preprocessor... gcc -E
     [exec] checking for grep that handles long lines and -e... /bin/grep
     [exec] checking for egrep... /bin/grep -E
     [exec] checking for ANSI C header files... yes
     [exec] checking for sys/types.h... yes
     [exec] checking for sys/stat.h... yes
     [exec] checking for stdlib.h... yes
     [exec] checking for string.h... yes
     [exec] checking for memory.h... yes
     [exec] checking for strings.h... yes
     [exec] checking for inttypes.h... yes
     [exec] checking for stdint.h... yes
     [exec] checking for unistd.h... yes
     [exec] checking minix/config.h usability... no
     [exec] checking minix/config.h presence... no
     [exec] checking for minix/config.h... no
     [exec] checking whether it is safe to define __EXTENSIONS__... yes
     [exec] checking for special C compiler options needed for large files... no
     [exec] checking for _FILE_OFFSET_BITS value needed for large files... no
     [exec] checking pthread.h usability... yes
     [exec] checking pthread.h presence... yes
     [exec] checking for pthread.h... yes
     [exec] checking for pthread_create in -lpthread... yes
     [exec] checking for HMAC_Init in -lssl... yes
     [exec] checking for g++... g++
     [exec] checking whether we are using the GNU C++ compiler... yes
     [exec] checking whether g++ accepts -g... yes
     [exec] checking dependency style of g++... gcc3
     [exec] checking for a BSD-compatible install... /usr/bin/install -c
     [exec] checking build system type... x86_64-unknown-linux-gnu
     [exec] checking host system type... x86_64-unknown-linux-gnu
     [exec] checking for a sed that does not truncate output... /bin/sed
     [exec] checking for ld used by gcc... /usr/bin/ld
     [exec] checking if the linker (/usr/bin/ld) is GNU ld... yes
     [exec] checking for /usr/bin/ld option to reload object files... -r
     [exec] checking for BSD-compatible nm... /usr/bin/nm -B
     [exec] checking whether ln -s works... yes
     [exec] checking how to recognise dependent libraries... pass_all
     [exec] checking dlfcn.h usability... yes
     [exec] checking dlfcn.h presence... yes
     [exec] checking for dlfcn.h... yes
     [exec] checking how to run the C++ preprocessor... g++ -E
     [exec] checking for g77... no
     [exec] checking for xlf... no
     [exec] checking for f77... no
     [exec] checking for frt... no
     [exec] checking for pgf77... no
     [exec] checking for cf77... no
     [exec] checking for fort77... no
     [exec] checking for fl32... no
     [exec] checking for af77... no
     [exec] checking for xlf90... no
     [exec] checking for f90... no
     [exec] checking for pgf90... no
     [exec] checking for pghpf... no
     [exec] checking for epcf90... no
     [exec] checking for gfortran... no
     [exec] checking for g95... no
     [exec] checking for xlf95... no
     [exec] checking for f95... no
     [exec] checking for fort... no
     [exec] checking for ifort... no
     [exec] checking for ifc... no
     [exec] checking for efc... no
     [exec] checking for pgf95... no
     [exec] checking for lf95... no
     [exec] checking for ftn... no
     [exec] checking whether we are using the GNU Fortran 77 compiler... no
     [exec] checking whether  accepts -g... no
     [exec] checking the maximum length of command line arguments... 32768
     [exec] checking command to parse /usr/bin/nm -B output from gcc object... ok
     [exec] checking for objdir... .libs
     [exec] checking for ar... ar
     [exec] checking for ranlib... ranlib
     [exec] checking for strip... strip
     [exec] checking if gcc static flag  works... yes
     [exec] checking if gcc supports -fno-rtti -fno-exceptions... no
     [exec] checking for gcc option to produce PIC... -fPIC
     [exec] checking if gcc PIC flag -fPIC works... yes
     [exec] checking if gcc supports -c -o file.o... yes
     [exec] checking whether the gcc linker (/usr/bin/ld -m elf_x86_64) supports shared libraries... yes
     [exec] checking whether -lc should be explicitly linked in... no
     [exec] checking dynamic linker characteristics... GNU/Linux ld.so
     [exec] checking how to hardcode library paths into programs... immediate
     [exec] checking whether stripping libraries is possible... yes
     [exec] checking if libtool supports shared libraries... yes
     [exec] checking whether to build shared libraries... yes
     [exec] checking whether to build static libraries... yes
     [exec] configure: creating libtool
     [exec] appending configuration tag "CXX" to libtool
     [exec] checking for ld used by g++... /usr/bin/ld -m elf_x86_64
     [exec] checking if the linker (/usr/bin/ld -m elf_x86_64) is GNU ld... yes
     [exec] checking whether the g++ linker (/usr/bin/ld -m elf_x86_64) supports shared libraries... yes
     [exec] checking for g++ option to produce PIC... -fPIC
     [exec] checking if g++ PIC flag -fPIC works... yes
     [exec] checking if g++ supports -c -o file.o... yes
     [exec] checking whether the g++ linker (/usr/bin/ld -m elf_x86_64) supports shared libraries... yes
     [exec] checking dynamic linker characteristics... GNU/Linux ld.so
     [exec] checking how to hardcode library paths into programs... immediate
     [exec] checking whether stripping libraries is possible... yes
     [exec] appending configuration tag "F77" to libtool
     [exec] checking for unistd.h... (cached) yes
     [exec] checking for stdbool.h that conforms to C99... yes
     [exec] checking for _Bool... no
     [exec] checking for an ANSI C-conforming const... yes
     [exec] checking for off_t... yes
     [exec] checking for size_t... yes
     [exec] checking whether strerror_r is declared... yes
     [exec] checking for strerror_r... yes
     [exec] checking whether strerror_r returns char *... yes
     [exec] checking for mkdir... yes
     [exec] checking for uname... yes
     [exec] checking for shutdown in -lsocket... no
     [exec] checking for xdr_float in -lnsl... yes
     [exec] configure: creating ./config.status
     [exec] config.status: creating Makefile
     [exec] config.status: creating impl/config.h
     [exec] config.status: executing depfiles commands

compile-c++-examples-pipes:
     [exec] depbase=`echo impl/wordcount-simple.o | sed 's|[^/]*$|.deps/&|;s|\.o$||'`; \
     [exec] 	if g++ -DHAVE_CONFIG_H -I. -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes> -I./impl    -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -MT impl/wordcount-simple.o -MD -MP -MF "$depbase.Tpo" -c -o impl/wordcount-simple.o <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes/impl/wordcount-simple.cc;> \
     [exec] 	then mv -f "$depbase.Tpo" "$depbase.Po"; else rm -f "$depbase.Tpo"; exit 1; fi
     [exec] /bin/bash ./libtool --mode=link --tag=CXX g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2   -o wordcount-simple  impl/wordcount-simple.o -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread 
     [exec] mkdir .libs
     [exec] g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -o wordcount-simple impl/wordcount-simple.o  -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread
     [exec] depbase=`echo impl/wordcount-part.o | sed 's|[^/]*$|.deps/&|;s|\.o$||'`; \
     [exec] 	if g++ -DHAVE_CONFIG_H -I. -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes> -I./impl    -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -MT impl/wordcount-part.o -MD -MP -MF "$depbase.Tpo" -c -o impl/wordcount-part.o <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes/impl/wordcount-part.cc;> \
     [exec] 	then mv -f "$depbase.Tpo" "$depbase.Po"; else rm -f "$depbase.Tpo"; exit 1; fi
     [exec] /bin/bash ./libtool --mode=link --tag=CXX g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2   -o wordcount-part  impl/wordcount-part.o -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread 
     [exec] g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -o wordcount-part impl/wordcount-part.o  -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread
     [exec] depbase=`echo impl/wordcount-nopipe.o | sed 's|[^/]*$|.deps/&|;s|\.o$||'`; \
     [exec] 	if g++ -DHAVE_CONFIG_H -I. -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes> -I./impl    -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -MT impl/wordcount-nopipe.o -MD -MP -MF "$depbase.Tpo" -c -o impl/wordcount-nopipe.o <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes/impl/wordcount-nopipe.cc;> \
     [exec] 	then mv -f "$depbase.Tpo" "$depbase.Po"; else rm -f "$depbase.Tpo"; exit 1; fi
     [exec] /bin/bash ./libtool --mode=link --tag=CXX g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2   -o wordcount-nopipe  impl/wordcount-nopipe.o -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread 
     [exec] g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -o wordcount-nopipe impl/wordcount-nopipe.o  -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread
     [exec] depbase=`echo impl/sort.o | sed 's|[^/]*$|.deps/&|;s|\.o$||'`; \
     [exec] 	if g++ -DHAVE_CONFIG_H -I. -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes> -I./impl    -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -MT impl/sort.o -MD -MP -MF "$depbase.Tpo" -c -o impl/sort.o <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/examples/pipes/impl/sort.cc;> \
     [exec] 	then mv -f "$depbase.Tpo" "$depbase.Po"; else rm -f "$depbase.Tpo"; exit 1; fi
     [exec] /bin/bash ./libtool --mode=link --tag=CXX g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2   -o pipes-sort  impl/sort.o -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread 
     [exec] g++ -Wall -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -I<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/include> -g -O2 -o pipes-sort impl/sort.o  -L<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++/Linux-amd64-64/lib> -lhadooppipes -lhadooputils -lnsl -lssl -lpthread
     [exec] make[1]: Entering directory `<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-build/Linux-amd64-64/examples/pipes'>
     [exec] test -z "<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin"> || mkdir -p -- "<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin">
     [exec]   /bin/bash ./libtool --mode=install /usr/bin/install -c 'wordcount-simple' '<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-simple'>
     [exec] /usr/bin/install -c wordcount-simple <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-simple>
     [exec]   /bin/bash ./libtool --mode=install /usr/bin/install -c 'wordcount-part' '<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-part'>
     [exec] /usr/bin/install -c wordcount-part <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-part>
     [exec]   /bin/bash ./libtool --mode=install /usr/bin/install -c 'wordcount-nopipe' '<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-nopipe'>
     [exec] /usr/bin/install -c wordcount-nopipe <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/wordcount-nopipe>
     [exec]   /bin/bash ./libtool --mode=install /usr/bin/install -c 'pipes-sort' '<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/pipes-sort'>
     [exec] /usr/bin/install -c pipes-sort <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-examples/Linux-amd64-64/bin/pipes-sort>
     [exec] make[1]: Nothing to be done for `install-data-am'.
     [exec] make[1]: Leaving directory `<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-build/Linux-amd64-64/examples/pipes'>

compile-c++-examples:

compile-examples:
    [javac] Compiling 24 source files to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/examples>
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.

examples:
      [jar] Building jar: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/hadoop-examples-0.20.204.jar>

generate-test-records:

compile-core-test:
    [javac] Compiling 7 source files to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/classes>
    [javac] Note: Some input files use unchecked or unsafe operations.
    [javac] Note: Recompile with -Xlint:unchecked for details.
    [javac] Compiling 496 source files to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/classes>
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
    [javac] Compiling 7 source files to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/testjar>
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
      [jar] Building jar: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/testjar/testjob.jar>
    [javac] Compiling 1 source file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/testshell>
    [javac] Note: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/test/testshell/ExternalMapReduce.java> uses or overrides a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
      [jar] Building jar: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/testshell/testshell.jar>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/debug>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/debug>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/test/cache>

jar-test:
      [jar] Building jar: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/hadoop-test-0.20.204.jar>

ant-tasks:
     [copy] Copying 1 file to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/ant/org/apache/hadoop/ant>
      [jar] Building jar: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/hadoop-ant-0.20.204.jar>

compile-librecordio:
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/librecordio>
     [exec] g++ -g3 -O0 -Wall -c -I/include -o <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/librecordio/recordio.o> recordio.cc
     [exec] In file included from recordio.cc:22:
     [exec] xmlarchive.hh:22:41: error: xercesc/parsers/SAXParser.hpp: No such file or directory
     [exec] xmlarchive.hh:23:42: error: xercesc/util/PlatformUtils.hpp: No such file or directory
     [exec] xmlarchive.hh:24:43: error: xercesc/util/BinInputStream.hpp: No such file or directory
     [exec] xmlarchive.hh:25:39: error: xercesc/sax/HandlerBase.hpp: No such file or directory
     [exec] xmlarchive.hh:26:39: error: xercesc/sax/InputSource.hpp: No such file or directory
     [exec] In file included from recordio.cc:22:
     [exec] xmlarchive.hh:31: error: expected constructor, destructor, or type conversion before 'namespace'
     [exec] make: *** [<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/librecordio/recordio.o]> Error 1

BUILD FAILED
<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build.xml>:1878: exec returned: 2

Total time: 4 minutes 2 seconds
Archiving artifacts
Recording test results
Publishing Javadoc
Recording fingerprints
Description set: 


Build failed in Jenkins: Hadoop-0.20.204-Build #17

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-0.20.204-Build/17/>

------------------------------------------
[...truncated 4991 lines...]
     [exec]      during execution
     [exec]    - add LIBDIR to the `LD_RUN_PATH' environment variable
     [exec]      during linking
     [exec]    - use the `-Wl,-rpath -Wl,LIBDIR' linker flag
     [exec]    - have your system administrator add LIBDIR to `/etc/ld.so.conf'
     [exec] 
     [exec] See any operating system documentation about shared libraries for
     [exec] more information, such as the ld(1) and ld.so(8) manual pages.
     [exec] ----------------------------------------------------------------------
     [exec] make[1]: Nothing to be done for `install-data-am'.
     [exec] make[1]: Leaving directory `<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/c++-build/Linux-amd64-64/libhdfs'>

compile-contrib:

compile:

check-contrib:

init:
     [echo] contrib: capacity-scheduler
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/capacity-scheduler>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/capacity-scheduler/classes>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/capacity-scheduler/test>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/capacity-scheduler/system>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/capacity-scheduler/system/classes>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/capacity-scheduler/examples>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/capacity-scheduler/test/logs>

init-contrib:

ivy-download:
      [get] Getting: http://repo2.maven.org/maven2/org/apache/ivy/ivy/2.1.0/ivy-2.1.0.jar
      [get] To: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivy-2.1.0.jar>
      [get] Not modified - so not downloaded

ivy-probe-antlib:

ivy-init-antlib:

ivy-init:
[ivy:configure] :: loading settings :: file = <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivysettings.xml>

ivy-resolve-common:
[ivy:resolve] :: resolving dependencies :: org.apache.hadoop#capacity-scheduler;working@vesta.apache.org
[ivy:resolve] 	confs: [common]
[ivy:resolve] 	found commons-logging#commons-logging;1.0.4 in default
[ivy:resolve] 	found junit#junit;4.5 in maven2
[ivy:resolve] 	found log4j#log4j;1.2.15 in maven2
[ivy:resolve] 	found org.mortbay.jetty#jetty-util;6.1.26 in maven2
[ivy:resolve] 	found org.mortbay.jetty#jetty;6.1.26 in maven2
[ivy:resolve] 	found org.mortbay.jetty#servlet-api;2.5-20081211 in maven2
[ivy:resolve] 	found commons-httpclient#commons-httpclient;3.0.1 in default
[ivy:resolve] 	found commons-codec#commons-codec;1.4 in maven2
[ivy:resolve] 	found org.codehaus.jackson#jackson-mapper-asl;1.0.1 in maven2
[ivy:resolve] 	found org.codehaus.jackson#jackson-core-asl;1.0.1 in maven2
[ivy:resolve] 	found commons-configuration#commons-configuration;1.6 in maven2
[ivy:resolve] 	found commons-collections#commons-collections;3.2.1 in maven2
[ivy:resolve] 	found commons-lang#commons-lang;2.4 in default
[ivy:resolve] 	found commons-logging#commons-logging;1.1.1 in default
[ivy:resolve] 	found commons-digester#commons-digester;1.8 in maven2
[ivy:resolve] 	found commons-beanutils#commons-beanutils;1.7.0 in maven2
[ivy:resolve] 	found commons-beanutils#commons-beanutils-core;1.8.0 in maven2
[ivy:resolve] 	found org.apache.commons#commons-math;2.1 in maven2
[ivy:resolve] :: resolution report :: resolve 249ms :: artifacts dl 12ms
[ivy:resolve] 	:: evicted modules:
[ivy:resolve] 	commons-logging#commons-logging;1.0.4 by [commons-logging#commons-logging;1.1.1] in [common]
[ivy:resolve] 	commons-logging#commons-logging;1.0.3 by [commons-logging#commons-logging;1.1.1] in [common]
[ivy:resolve] 	commons-logging#commons-logging;1.1 by [commons-logging#commons-logging;1.1.1] in [common]
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      common      |   20  |   0   |   0   |   3   ||   17  |   0   |
	---------------------------------------------------------------------

ivy-retrieve-common:
[ivy:retrieve] :: retrieving :: org.apache.hadoop#capacity-scheduler [sync]
[ivy:retrieve] 	confs: [common]
[ivy:retrieve] 	17 artifacts copied, 0 already retrieved (4642kB/25ms)
[ivy:cachepath] DEPRECATED: 'ivy.conf.file' is deprecated, use 'ivy.settings.file' instead
[ivy:cachepath] :: loading settings :: file = <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivysettings.xml>

compile:
     [echo] contrib: capacity-scheduler
    [javac] Compiling 7 source files to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/capacity-scheduler/classes>
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.

check-contrib:

init:
     [echo] contrib: datajoin
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/datajoin>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/datajoin/classes>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/datajoin/test>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/datajoin/system>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/datajoin/system/classes>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/datajoin/examples>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/datajoin/test/logs>

init-contrib:

ivy-download:
      [get] Getting: http://repo2.maven.org/maven2/org/apache/ivy/ivy/2.1.0/ivy-2.1.0.jar
      [get] To: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivy-2.1.0.jar>
      [get] Not modified - so not downloaded

ivy-probe-antlib:

ivy-init-antlib:

ivy-init:
[ivy:configure] :: loading settings :: file = <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivysettings.xml>

ivy-resolve-common:
[ivy:resolve] :: resolving dependencies :: org.apache.hadoop#datajoin;working@vesta.apache.org
[ivy:resolve] 	confs: [common]
[ivy:resolve] 	found commons-logging#commons-logging;1.0.4 in default
[ivy:resolve] 	found log4j#log4j;1.2.15 in maven2
[ivy:resolve] 	found commons-configuration#commons-configuration;1.6 in maven2
[ivy:resolve] 	found commons-collections#commons-collections;3.2.1 in maven2
[ivy:resolve] 	found commons-lang#commons-lang;2.4 in default
[ivy:resolve] 	found commons-logging#commons-logging;1.1.1 in default
[ivy:resolve] 	found commons-digester#commons-digester;1.8 in maven2
[ivy:resolve] 	found commons-beanutils#commons-beanutils;1.7.0 in maven2
[ivy:resolve] 	found commons-beanutils#commons-beanutils-core;1.8.0 in maven2
[ivy:resolve] 	found org.apache.commons#commons-math;2.1 in maven2
[ivy:resolve] 	found junit#junit;4.5 in maven2
[ivy:resolve] 	found org.mortbay.jetty#jetty-util;6.1.26 in maven2
[ivy:resolve] 	found org.mortbay.jetty#jetty;6.1.26 in maven2
[ivy:resolve] 	found org.mortbay.jetty#servlet-api;2.5-20081211 in maven2
[ivy:resolve] 	found org.codehaus.jackson#jackson-core-asl;1.0.1 in maven2
[ivy:resolve] 	found org.codehaus.jackson#jackson-mapper-asl;1.0.1 in maven2
[ivy:resolve] 	found commons-httpclient#commons-httpclient;3.0.1 in default
[ivy:resolve] :: resolution report :: resolve 189ms :: artifacts dl 11ms
[ivy:resolve] 	:: evicted modules:
[ivy:resolve] 	commons-logging#commons-logging;1.0.4 by [commons-logging#commons-logging;1.1.1] in [common]
[ivy:resolve] 	commons-logging#commons-logging;1.0.3 by [commons-logging#commons-logging;1.1.1] in [common]
[ivy:resolve] 	commons-logging#commons-logging;1.1 by [commons-logging#commons-logging;1.1.1] in [common]
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      common      |   19  |   0   |   0   |   3   ||   16  |   0   |
	---------------------------------------------------------------------

ivy-retrieve-common:
[ivy:retrieve] :: retrieving :: org.apache.hadoop#datajoin [sync]
[ivy:retrieve] 	confs: [common]
[ivy:retrieve] 	16 artifacts copied, 0 already retrieved (4585kB/23ms)
[ivy:cachepath] DEPRECATED: 'ivy.conf.file' is deprecated, use 'ivy.settings.file' instead
[ivy:cachepath] :: loading settings :: file = <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivysettings.xml>

compile:
     [echo] contrib: datajoin
    [javac] Compiling 7 source files to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/datajoin/classes>
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
    [javac] Note: Some input files use unchecked or unsafe operations.
    [javac] Note: Recompile with -Xlint:unchecked for details.

check-contrib:

init:
     [echo] contrib: eclipse-plugin
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/eclipse-plugin>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/eclipse-plugin/classes>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/eclipse-plugin/test>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/eclipse-plugin/system>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/eclipse-plugin/system/classes>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/eclipse-plugin/examples>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/eclipse-plugin/test/logs>

init-contrib:

ivy-download:
      [get] Getting: http://repo2.maven.org/maven2/org/apache/ivy/ivy/2.1.0/ivy-2.1.0.jar
      [get] To: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivy-2.1.0.jar>
      [get] Not modified - so not downloaded

ivy-probe-antlib:

ivy-init-antlib:

ivy-init:
[ivy:configure] :: loading settings :: file = <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivysettings.xml>

ivy-resolve-common:
[ivy:resolve] :: resolving dependencies :: org.apache.hadoop#eclipse-plugin;working@vesta.apache.org
[ivy:resolve] 	confs: [common]
[ivy:resolve] 	found commons-logging#commons-logging;1.0.4 in default
[ivy:resolve] 	found log4j#log4j;1.2.15 in maven2
[ivy:resolve] :: resolution report :: resolve 25ms :: artifacts dl 1ms
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      common      |   2   |   0   |   0   |   0   ||   2   |   0   |
	---------------------------------------------------------------------

ivy-retrieve-common:
[ivy:retrieve] :: retrieving :: org.apache.hadoop#eclipse-plugin [sync]
[ivy:retrieve] 	confs: [common]
[ivy:retrieve] 	2 artifacts copied, 0 already retrieved (419kB/9ms)
[ivy:cachepath] DEPRECATED: 'ivy.conf.file' is deprecated, use 'ivy.settings.file' instead
[ivy:cachepath] :: loading settings :: file = <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivysettings.xml>

compile:
     [echo] contrib: eclipse-plugin
    [javac] Compiling 45 source files to <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/contrib/eclipse-plugin/classes>
    [javac] <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/HadoopApplicationLaunchShortcut.java>:35: cannot find symbol
    [javac] symbol  : class JavaApplicationLaunchShortcut
    [javac] location: package org.eclipse.jdt.debug.ui.launchConfigurations
    [javac] import org.eclipse.jdt.debug.ui.launchConfigurations.JavaApplicationLaunchShortcut;
    [javac]                                                     ^
    [javac] <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/HadoopApplicationLaunchShortcut.java>:49: cannot find symbol
    [javac] symbol: class JavaApplicationLaunchShortcut
    [javac]     JavaApplicationLaunchShortcut {
    [javac]     ^
    [javac] <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/HadoopApplicationLaunchShortcut.java>:66: cannot find symbol
    [javac] symbol  : variable super
    [javac] location: class org.apache.hadoop.eclipse.launch.HadoopApplicationLaunchShortcut
    [javac]         super.findLaunchConfiguration(type, configType);
    [javac]         ^
    [javac] <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/HadoopApplicationLaunchShortcut.java>:67: cannot find symbol
    [javac] symbol  : variable super
    [javac] location: class org.apache.hadoop.eclipse.launch.HadoopApplicationLaunchShortcut
    [javac]     if (iConf == null) iConf = super.createConfiguration(type);
    [javac]                                ^
    [javac] <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/contrib/eclipse-plugin/src/java/org/apache/hadoop/eclipse/launch/HadoopApplicationLaunchShortcut.java>:60: method does not override or implement a method from a supertype
    [javac]   @Override
    [javac]   ^
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
    [javac] Note: Some input files use unchecked or unsafe operations.
    [javac] Note: Recompile with -Xlint:unchecked for details.
    [javac] 5 errors

BUILD FAILED
<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build.xml>:645: The following error occurred while executing this line:
<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/contrib/build.xml>:30: The following error occurred while executing this line:
<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/contrib/eclipse-plugin/build.xml>:61: Compile failed; see the compiler error output for details.

Total time: 1 minute 36 seconds
Archiving artifacts
Recording test results
Publishing Javadoc
Recording fingerprints
Description set: 


Build failed in Jenkins: Hadoop-0.20.204-Build #16

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-0.20.204-Build/16/>

------------------------------------------
[...truncated 3366 lines...]
A         src/examples/org/apache/hadoop/examples/ExampleDriver.java
A         src/examples/org/apache/hadoop/examples/RandomWriter.java
A         src/examples/org/apache/hadoop/examples/package.html
A         src/examples/org/apache/hadoop/examples/RandomTextWriter.java
A         src/examples/org/apache/hadoop/examples/terasort
A         src/examples/org/apache/hadoop/examples/terasort/job_history_summary.py
A         src/examples/org/apache/hadoop/examples/terasort/TeraSort.java
A         src/examples/org/apache/hadoop/examples/terasort/TeraInputFormat.java
A         src/examples/org/apache/hadoop/examples/terasort/TeraGen.java
A         src/examples/org/apache/hadoop/examples/terasort/TeraOutputFormat.java
A         src/examples/org/apache/hadoop/examples/terasort/TeraValidate.java
A         src/examples/org/apache/hadoop/examples/terasort/package.html
A         src/examples/org/apache/hadoop/examples/dancing
A         src/examples/org/apache/hadoop/examples/dancing/puzzle1.dta
A         src/examples/org/apache/hadoop/examples/dancing/OneSidedPentomino.java
A         src/examples/org/apache/hadoop/examples/dancing/DancingLinks.java
A         src/examples/org/apache/hadoop/examples/dancing/Pentomino.java
A         src/examples/org/apache/hadoop/examples/dancing/Sudoku.java
A         src/examples/org/apache/hadoop/examples/dancing/DistributedPentomino.java
A         src/examples/org/apache/hadoop/examples/dancing/package.html
A         src/examples/org/apache/hadoop/examples/WordCount.java
A         src/examples/org/apache/hadoop/examples/DBCountPageView.java
A         src/examples/org/apache/hadoop/examples/Sort.java
A         src/examples/org/apache/hadoop/examples/AggregateWordCount.java
A         src/examples/org/apache/hadoop/examples/Grep.java
A         src/packages
A         src/packages/hadoop-create-user.sh
A         src/packages/hadoop-setup-hdfs.sh
A         src/packages/hadoop-setup-single-node.sh
A         src/packages/hadoop-setup-conf.sh
A         src/packages/update-hadoop-env.sh
A         src/packages/deb
A         src/packages/deb/init.d
A         src/packages/deb/init.d/hadoop-tasktracker
A         src/packages/deb/init.d/hadoop-datanode
A         src/packages/deb/init.d/hadoop-jobtracker
A         src/packages/deb/init.d/hadoop-namenode
A         src/packages/deb/hadoop.control
A         src/packages/deb/hadoop.control/control
A         src/packages/deb/hadoop.control/postinst
A         src/packages/deb/hadoop.control/postrm
A         src/packages/deb/hadoop.control/preinst
A         src/packages/deb/hadoop.control/conffile
A         src/packages/deb/hadoop.control/prerm
A         src/packages/rpm
A         src/packages/rpm/init.d
A         src/packages/rpm/init.d/hadoop-tasktracker
A         src/packages/rpm/init.d/hadoop-datanode
A         src/packages/rpm/init.d/hadoop-jobtracker
A         src/packages/rpm/init.d/hadoop-namenode
A         src/packages/rpm/spec
A         src/packages/rpm/spec/hadoop.spec
A         src/packages/templates
A         src/packages/templates/conf
A         src/packages/templates/conf/hdfs-site.xml
A         src/packages/templates/conf/core-site.xml
A         src/packages/templates/conf/hadoop-env.sh
A         src/packages/templates/conf/mapred-site.xml
A         bin
A         bin/stop-jobhistoryserver.sh
AU        bin/start-dfs.sh
AU        bin/hadoop-daemon.sh
A         bin/hadoop-config.sh
A         bin/start-jobhistoryserver.sh
AU        bin/stop-balancer.sh
AU        bin/stop-all.sh
AU        bin/stop-mapred.sh
AU        bin/slaves.sh
AU        bin/hadoop-daemons.sh
AU        bin/rcc
AU        bin/stop-dfs.sh
AU        bin/hadoop
AU        bin/start-balancer.sh
AU        bin/start-all.sh
AU        bin/start-mapred.sh
A         README.txt
A         build.xml
 U        .
At revision 1152390
no revision recorded for http://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.20-security-204 in the previous build
[Hadoop-0.20.204-Build] $ /bin/bash -xe /tmp/hudson5966530285364984988.sh
+ export JAVA_HOME=/home/hudson/tools/java/latest1.6-64
+ JAVA_HOME=/home/hudson/tools/java/latest1.6-64
+ export ANT_HOME=/home/hudson/tools/ant/apache-ant-1.7.1
+ ANT_HOME=/home/hudson/tools/ant/apache-ant-1.7.1
+ export FORREST_HOME=/home/nigel/tools/forrest/latest
+ FORREST_HOME=/home/nigel/tools/forrest/latest
+ export ECLIPSE_HOME=/home/nigel/tools/eclipse/latest
+ ECLIPSE_HOME=/home/nigel/tools/eclipse/latest
+ export XERCES_HOME=/home/hudson/tools/xerces/c/latest
+ XERCES_HOME=/home/hudson/tools/xerces/c/latest
+ export JAVA5_HOME=/home/hudson/tools/java/latest1.5
+ JAVA5_HOME=/home/hudson/tools/java/latest1.5
+ export FINDBUGS_HOME=/home/hudson/tools/findbugs/latest
+ FINDBUGS_HOME=/home/hudson/tools/findbugs/latest
+ cd <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk>
+ /home/hudson/tools/ant/apache-ant-1.7.1/bin/ant -Dversion=0.20.204 -Dcompile.native=true -Dcompile.c++=true -Dlibhdfs=true -Dlibrecordio=true -Dtest.junit.output.format=xml-Dxercescroot=/home/hudson/tools/xerces/c/latest -Declipse.home=/home/nigel/tools/eclipse/latest -Djava5.home=/home/hudson/tools/java/latest1.5 -Dforrest.home=/home/nigel/tools/forrest/latest -Dfindbugs.home=/home/hudson/tools/findbugs/latest veryclean tar test-c++-libhdfs test findbugs
Buildfile: build.xml

clean-contrib:

clean:

clean:
     [echo] contrib: capacity-scheduler

clean:
     [echo] contrib: datajoin

clean:
     [echo] contrib: eclipse-plugin

clean:
     [echo] contrib: failmon

clean:
     [echo] contrib: fairscheduler

check-libhdfs-fuse:

clean:

clean:
     [echo] contrib: gridmix
Trying to override old definition of task macro_tar

clean:
     [echo] contrib: hdfsproxy

clean:
     [echo] contrib: hod

clean:
     [echo] contrib: index

clean:
     [echo] contrib: streaming

clean:
     [echo] contrib: thriftfs

clean:
     [echo] contrib: vaidya

clean-sign:

clean-fi:

clean:

veryclean:

clover.setup:

clover.info:
     [echo] 
     [echo]      Clover not found. Code coverage reports disabled.
     [echo]   

clover:

ivy-download:
      [get] Getting: http://repo2.maven.org/maven2/org/apache/ivy/ivy/2.1.0/ivy-2.1.0.jar
      [get] To: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivy-2.1.0.jar>

ivy-init-dirs:
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/ivy>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/ivy/lib>
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build/ivy/report>

ivy-probe-antlib:

ivy-init-antlib:

ivy-init:
[ivy:configure] :: Ivy 2.0.0-rc2 - 20081028224207 :: http://ant.apache.org/ivy/ ::
:: loading settings :: file = <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/ivy/ivysettings.xml>

ivy-resolve-common:
[ivy:resolve] :: resolving dependencies :: org.apache.hadoop#Hadoop;working@vesta.apache.org
[ivy:resolve] 	confs: [common]
[ivy:resolve] 	found commons-logging#commons-logging;1.0.4 in default
[ivy:resolve] 	found log4j#log4j;1.2.15 in maven2
[ivy:resolve] 	found commons-httpclient#commons-httpclient;3.0.1 in default
[ivy:resolve] 	found commons-codec#commons-codec;1.4 in maven2
[ivy:resolve] 	found commons-cli#commons-cli;1.2 in default
[ivy:resolve] 	found xmlenc#xmlenc;0.52 in default
[ivy:resolve] 	found commons-daemon#commons-daemon;1.0.1 in maven2
[ivy:resolve] 	found net.java.dev.jets3t#jets3t;0.6.1 in maven2
[ivy:resolve] 	found commons-net#commons-net;1.4.1 in default
[ivy:resolve] 	found oro#oro;2.0.8 in default
[ivy:resolve] 	found org.mortbay.jetty#jetty;6.1.26 in maven2
[ivy:resolve] 	found org.mortbay.jetty#jetty-util;6.1.26 in maven2
[ivy:resolve] 	found org.mortbay.jetty#servlet-api;2.5-20081211 in maven2
[ivy:resolve] 	found tomcat#jasper-runtime;5.5.12 in default
[ivy:resolve] 	found tomcat#jasper-compiler;5.5.12 in default
[ivy:resolve] 	found commons-el#commons-el;1.0 in default
[ivy:resolve] 	found org.apache.commons#commons-math;2.1 in maven2
[ivy:resolve] 	found junit#junit;4.5 in maven2
[ivy:resolve] 	found commons-logging#commons-logging-api;1.0.4 in maven2
[ivy:resolve] 	found org.slf4j#slf4j-api;1.4.3 in maven2
[ivy:resolve] 	found org.eclipse.jdt#core;3.1.1 in default
[ivy:resolve] 	found org.slf4j#slf4j-log4j12;1.4.3 in maven2
[ivy:resolve] 	found org.codehaus.jackson#jackson-mapper-asl;1.0.1 in maven2
[ivy:resolve] 	found org.codehaus.jackson#jackson-core-asl;1.0.1 in maven2
[ivy:resolve] 	found org.mockito#mockito-all;1.8.5 in maven2
[ivy:resolve] 	found com.jcraft#jsch;0.1.42 in maven2
[ivy:resolve] 	found org.aspectj#aspectjrt;1.6.5 in maven2
[ivy:resolve] 	found org.aspectj#aspectjtools;1.6.5 in maven2
[ivy:resolve] 	found org.vafer#jdeb;0.8 in maven2
[ivy:resolve] :: resolution report :: resolve 3324ms :: artifacts dl 18ms
	---------------------------------------------------------------------
	|                  |            modules            ||   artifacts   |
	|       conf       | number| search|dwnlded|evicted|| number|dwnlded|
	---------------------------------------------------------------------
	|      common      |   30  |   0   |   0   |   0   ||   29  |   0   |
	---------------------------------------------------------------------
[ivy:resolve] 
[ivy:resolve] :: problems summary ::
[ivy:resolve] :::: WARNINGS
[ivy:resolve] 	io problem while parsing ivy file: http://repo1.maven.org/maven2/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.pom: Resetting to invalid mark
[ivy:resolve] 	io problem while parsing ivy file: https://oss.sonatype.org/content/groups/public/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.pom: Resetting to invalid mark
[ivy:resolve] 		module not found: commons-configuration#commons-configuration;1.6
[ivy:resolve] 	==== local: tried
[ivy:resolve] 	  /home/hudson/.ivy2/local/commons-configuration/commons-configuration/1.6/ivys/ivy.xml
[ivy:resolve] 	  -- artifact commons-configuration#commons-configuration;1.6!commons-configuration.jar:
[ivy:resolve] 	  /home/hudson/.ivy2/local/commons-configuration/commons-configuration/1.6/jars/commons-configuration.jar
[ivy:resolve] 	==== maven2: tried
[ivy:resolve] 	  http://repo1.maven.org/maven2/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.pom
[ivy:resolve] 	==== oss-sonatype: tried
[ivy:resolve] 	  https://oss.sonatype.org/content/groups/public/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.pom
[ivy:resolve] 		::::::::::::::::::::::::::::::::::::::::::::::
[ivy:resolve] 		::          UNRESOLVED DEPENDENCIES         ::
[ivy:resolve] 		::::::::::::::::::::::::::::::::::::::::::::::
[ivy:resolve] 		:: commons-configuration#commons-configuration;1.6: not found
[ivy:resolve] 		::::::::::::::::::::::::::::::::::::::::::::::
[ivy:resolve] 
[ivy:resolve] :: USE VERBOSE OR DEBUG MESSAGE LEVEL FOR MORE DETAILS

BUILD FAILED
<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/build.xml>:2324: impossible to resolve dependencies:
	resolve failed - see output for details

Total time: 7 seconds
Archiving artifacts
Recording test results
Publishing Javadoc
Recording fingerprints
Description set: 


Build failed in Jenkins: Hadoop-0.20.204-Build #15

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-0.20.204-Build/15/>

------------------------------------------
Started by user gkesavan
Building remotely on ubuntu2
Cleaning workspace <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/>
hudson.util.IOException2: remote file operation failed: <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/> at hudson.remoting.Channel@4cb129bb:ubuntu2
	at hudson.FilePath.act(FilePath.java:754)
	at hudson.FilePath.act(FilePath.java:740)
	at hudson.scm.SubversionSCM.checkout(SubversionSCM.java:684)
	at hudson.scm.SubversionSCM.checkout(SubversionSCM.java:633)
	at hudson.model.AbstractProject.checkout(AbstractProject.java:1181)
	at hudson.model.AbstractBuild$AbstractRunner.checkout(AbstractBuild.java:536)
	at hudson.model.AbstractBuild$AbstractRunner.run(AbstractBuild.java:424)
	at hudson.model.Run.run(Run.java:1374)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:46)
	at hudson.model.ResourceController.execute(ResourceController.java:88)
	at hudson.model.Executor.run(Executor.java:145)
Caused by: java.io.IOException: Unable to delete <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/test/aop/org/apache/.svn> - files in dir: [<https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/test/aop/org/apache/.svn/tmp,> <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/trunk/src/test/aop/org/apache/.svn/all-wcprops]>
	at hudson.Util.deleteFile(Util.java:262)
	at hudson.Util.deleteRecursive(Util.java:305)
	at hudson.Util.deleteContentsRecursive(Util.java:224)
	at hudson.Util.deleteRecursive(Util.java:304)
	at hudson.Util.deleteContentsRecursive(Util.java:224)
	at hudson.Util.deleteRecursive(Util.java:304)
	at hudson.Util.deleteContentsRecursive(Util.java:224)
	at hudson.Util.deleteRecursive(Util.java:304)
	at hudson.Util.deleteContentsRecursive(Util.java:224)
	at hudson.Util.deleteRecursive(Util.java:304)
	at hudson.Util.deleteContentsRecursive(Util.java:224)
	at hudson.Util.deleteRecursive(Util.java:304)
	at hudson.Util.deleteContentsRecursive(Util.java:224)
	at hudson.Util.deleteRecursive(Util.java:304)
	at hudson.Util.deleteContentsRecursive(Util.java:224)
	at hudson.scm.subversion.CheckoutUpdater$1.perform(CheckoutUpdater.java:67)
	at hudson.scm.subversion.WorkspaceUpdater$UpdateTask.delegateTo(WorkspaceUpdater.java:135)
	at hudson.scm.SubversionSCM$CheckOutTask.perform(SubversionSCM.java:726)
	at hudson.scm.SubversionSCM$CheckOutTask.invoke(SubversionSCM.java:707)
	at hudson.scm.SubversionSCM$CheckOutTask.invoke(SubversionSCM.java:691)
	at hudson.FilePath$FileCallableWrapper.call(FilePath.java:1979)
	at hudson.remoting.UserRequest.perform(UserRequest.java:118)
	at hudson.remoting.UserRequest.perform(UserRequest.java:48)
	at hudson.remoting.Request$2.run(Request.java:270)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
	at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
	at java.util.concurrent.FutureTask.run(FutureTask.java:166)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1110)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:603)
	at java.lang.Thread.run(Thread.java:636)
Archiving artifacts
Recording test results
Publishing Javadoc
Recording fingerprints
Description set: 


Build failed in Jenkins: Hadoop-0.20.204-Build #14

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-0.20.204-Build/14/>

------------------------------------------
Started by user gkesavan
Building remotely on ubuntu2
SCM check out aborted
Archiving artifacts
Cleaning workspace <https://builds.apache.org/job/Hadoop-0.20.204-Build/ws/>
Recording test results
Publishing Javadoc
Recording fingerprints
Description set: