You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-dev@hadoop.apache.org by Apache Hudson Server <hu...@hudson.apache.org> on 2010/11/20 07:21:23 UTC

Build failed in Hudson: Hadoop-Common-trunk-Commit #438

See <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/438/changes>

Changes:

[nigel] Add some comments to commitBuild.sh and put artifacts in a single directory that can be cleaned up.

------------------------------------------
[...truncated 244 lines...]
    [javac]                                ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java>:38: cannot find symbol
    [javac] symbol: class SpecificRecord
    [javac]                           extends AvroSerialization<SpecificRecord>{
    [javac]                                                     ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java>:48: cannot find symbol
    [javac] symbol  : class SpecificRecord
    [javac] location: class org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization
    [javac]   public DatumReader getReader(Class<SpecificRecord> clazz) {
    [javac]                                      ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java>:48: cannot find symbol
    [javac] symbol  : class DatumReader
    [javac] location: class org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization
    [javac]   public DatumReader getReader(Class<SpecificRecord> clazz) {
    [javac]          ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java>:58: cannot find symbol
    [javac] symbol  : class SpecificRecord
    [javac] location: class org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization
    [javac]   public Schema getSchema(SpecificRecord t) {
    [javac]                           ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java>:58: cannot find symbol
    [javac] symbol  : class Schema
    [javac] location: class org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization
    [javac]   public Schema getSchema(SpecificRecord t) {
    [javac]          ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java>:64: cannot find symbol
    [javac] symbol  : class SpecificRecord
    [javac] location: class org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization
    [javac]   public DatumWriter getWriter(Class<SpecificRecord> clazz) {
    [javac]                                      ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java>:64: cannot find symbol
    [javac] symbol  : class DatumWriter
    [javac] location: class org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization
    [javac]   public DatumWriter getWriter(Class<SpecificRecord> clazz) {
    [javac]          ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroRpcEngine.java>:35: package org.apache.avro.ipc does not exist
    [javac] import org.apache.avro.ipc.Responder;
    [javac]                           ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroRpcEngine.java>:36: package org.apache.avro.ipc does not exist
    [javac] import org.apache.avro.ipc.Transceiver;
    [javac]                           ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroRpcEngine.java>:37: package org.apache.avro.reflect does not exist
    [javac] import org.apache.avro.reflect.ReflectRequestor;
    [javac]                               ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroRpcEngine.java>:38: package org.apache.avro.reflect does not exist
    [javac] import org.apache.avro.reflect.ReflectResponder;
    [javac]                               ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroRpcEngine.java>:39: package org.apache.avro.specific does not exist
    [javac] import org.apache.avro.specific.SpecificRequestor;
    [javac]                                ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroRpcEngine.java>:179: cannot find symbol
    [javac] symbol  : class Transceiver
    [javac] location: class org.apache.hadoop.ipc.AvroRpcEngine
    [javac]       Transceiver transeiver) throws IOException {
    [javac]       ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroRpcEngine.java>:178: cannot find symbol
    [javac] symbol  : class SpecificRequestor
    [javac] location: class org.apache.hadoop.ipc.AvroRpcEngine
    [javac]   protected SpecificRequestor createRequestor(Class<?> protocol, 
    [javac]             ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroRpcEngine.java>:183: cannot find symbol
    [javac] symbol  : class Responder
    [javac] location: class org.apache.hadoop.ipc.AvroRpcEngine
    [javac]   protected Responder createResponder(Class<?> iface, Object impl) {
    [javac]             ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroRpcEngine.java>:101: cannot find symbol
    [javac] symbol  : class Transceiver
    [javac] location: class org.apache.hadoop.ipc.AvroRpcEngine
    [javac]   private static class ClientTransceiver extends Transceiver {
    [javac]                                                  ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroRpcEngine.java>:161: cannot find symbol
    [javac] symbol  : class SpecificRequestor
    [javac] location: class org.apache.hadoop.ipc.AvroRpcEngine.Invoker
    [javac]     private final SpecificRequestor requestor;
    [javac]                   ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroRpcEngine.java>:189: cannot find symbol
    [javac] symbol  : class Responder
    [javac] location: class org.apache.hadoop.ipc.AvroRpcEngine.TunnelResponder
    [javac]     private Responder responder;
    [javac]             ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java>:23: package org.apache.avro.ipc does not exist
    [javac] import org.apache.avro.ipc.Responder;
    [javac]                           ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java>:24: package org.apache.avro.ipc does not exist
    [javac] import org.apache.avro.ipc.Transceiver;
    [javac]                           ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java>:25: package org.apache.avro.specific does not exist
    [javac] import org.apache.avro.specific.SpecificRequestor;
    [javac]                                ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java>:26: package org.apache.avro.specific does not exist
    [javac] import org.apache.avro.specific.SpecificResponder;
    [javac]                                ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java>:37: cannot find symbol
    [javac] symbol  : class Transceiver
    [javac] location: class org.apache.hadoop.ipc.AvroSpecificRpcEngine
    [javac]       Transceiver transeiver) throws IOException {
    [javac]       ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java>:36: cannot find symbol
    [javac] symbol  : class SpecificRequestor
    [javac] location: class org.apache.hadoop.ipc.AvroSpecificRpcEngine
    [javac]   protected SpecificRequestor createRequestor(Class<?> protocol, 
    [javac]             ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java>:41: cannot find symbol
    [javac] symbol  : class Responder
    [javac] location: class org.apache.hadoop.ipc.AvroSpecificRpcEngine
    [javac]   protected Responder createResponder(Class<?> iface, Object impl) {
    [javac]             ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:31: warning: sun.security.krb5.Config is Sun proprietary API and may be removed in a future release
    [javac] import sun.security.krb5.Config;
    [javac]                         ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:32: warning: sun.security.krb5.KrbException is Sun proprietary API and may be removed in a future release
    [javac] import sun.security.krb5.KrbException;
    [javac]                         ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:81: warning: sun.security.krb5.Config is Sun proprietary API and may be removed in a future release
    [javac]   private static Config kerbConf;
    [javac]                  ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:39: warning: sun.security.jgss.krb5.Krb5Util is Sun proprietary API and may be removed in a future release
    [javac] import sun.security.jgss.krb5.Krb5Util;
    [javac]                              ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:40: warning: sun.security.krb5.Credentials is Sun proprietary API and may be removed in a future release
    [javac] import sun.security.krb5.Credentials;
    [javac]                         ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:41: warning: sun.security.krb5.PrincipalName is Sun proprietary API and may be removed in a future release
    [javac] import sun.security.krb5.PrincipalName;
    [javac]                         ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/token/delegation/DelegationKey.java>:32: package org.apache.avro.reflect does not exist
    [javac] import org.apache.avro.reflect.Nullable;
    [javac]                               ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/token/delegation/DelegationKey.java>:42: cannot find symbol
    [javac] symbol  : class Nullable
    [javac] location: class org.apache.hadoop.security.token.delegation.DelegationKey
    [javac]   @Nullable
    [javac]    ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java>:96: cannot find symbol
    [javac] symbol  : class BinaryEncoder
    [javac] location: class org.apache.hadoop.io.serializer.avro.AvroSerialization<T>.AvroSerializer
    [javac]       encoder = new BinaryEncoder(out);
    [javac]                     ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java>:130: cannot find symbol
    [javac] symbol  : variable DecoderFactory
    [javac] location: class org.apache.hadoop.io.serializer.avro.AvroSerialization<T>.AvroDeserializer
    [javac]       decoder = DecoderFactory.defaultFactory().createBinaryDecoder(in, null);
    [javac]                 ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java>:79: cannot find symbol
    [javac] symbol  : class ReflectDatumReader
    [javac] location: class org.apache.hadoop.io.serializer.avro.AvroReflectSerialization
    [javac]       return new ReflectDatumReader(clazz);
    [javac]                  ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java>:88: cannot find symbol
    [javac] symbol  : variable ReflectData
    [javac] location: class org.apache.hadoop.io.serializer.avro.AvroReflectSerialization
    [javac]     return ReflectData.get().getSchema(t.getClass());
    [javac]            ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.java>:94: cannot find symbol
    [javac] symbol  : class ReflectDatumWriter
    [javac] location: class org.apache.hadoop.io.serializer.avro.AvroReflectSerialization
    [javac]     return new ReflectDatumWriter();
    [javac]                ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java>:43: cannot find symbol
    [javac] symbol  : class SpecificRecord
    [javac] location: class org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization
    [javac]     return SpecificRecord.class.isAssignableFrom(c);
    [javac]            ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java>:50: cannot find symbol
    [javac] symbol  : class SpecificDatumReader
    [javac] location: class org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization
    [javac]       return new SpecificDatumReader(clazz.newInstance().getSchema());
    [javac]                  ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.java>:65: cannot find symbol
    [javac] symbol  : class SpecificDatumWriter
    [javac] location: class org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization
    [javac]     return new SpecificDatumWriter();
    [javac]                ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroRpcEngine.java>:180: cannot find symbol
    [javac] symbol  : class ReflectRequestor
    [javac] location: class org.apache.hadoop.ipc.AvroRpcEngine
    [javac]     return new ReflectRequestor(protocol, transeiver);
    [javac]                ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroRpcEngine.java>:184: cannot find symbol
    [javac] symbol  : class ReflectResponder
    [javac] location: class org.apache.hadoop.ipc.AvroRpcEngine
    [javac]     return new ReflectResponder(iface, impl);
    [javac]                ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java>:38: cannot find symbol
    [javac] symbol  : class SpecificRequestor
    [javac] location: class org.apache.hadoop.ipc.AvroSpecificRpcEngine
    [javac]     return new SpecificRequestor(protocol, transeiver);
    [javac]                ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/ipc/AvroSpecificRpcEngine.java>:42: cannot find symbol
    [javac] symbol  : class SpecificResponder
    [javac] location: class org.apache.hadoop.ipc.AvroSpecificRpcEngine
    [javac]     return new SpecificResponder(iface, impl);
    [javac]                ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:85: warning: sun.security.krb5.Config is Sun proprietary API and may be removed in a future release
    [javac]       kerbConf = Config.getInstance();
    [javac]                  ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:87: warning: sun.security.krb5.KrbException is Sun proprietary API and may be removed in a future release
    [javac]     } catch (KrbException ke) {
    [javac]              ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:120: warning: sun.security.krb5.Credentials is Sun proprietary API and may be removed in a future release
    [javac]     Credentials serviceCred = null;
    [javac]     ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:122: warning: sun.security.krb5.PrincipalName is Sun proprietary API and may be removed in a future release
    [javac]       PrincipalName principal = new PrincipalName(serviceName,
    [javac]       ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:122: warning: sun.security.krb5.PrincipalName is Sun proprietary API and may be removed in a future release
    [javac]       PrincipalName principal = new PrincipalName(serviceName,
    [javac]                                     ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:123: warning: sun.security.krb5.PrincipalName is Sun proprietary API and may be removed in a future release
    [javac]           PrincipalName.KRB_NT_SRV_HST);
    [javac]           ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:125: warning: sun.security.jgss.krb5.Krb5Util is Sun proprietary API and may be removed in a future release
    [javac]           .toString(), Krb5Util.ticketToCreds(getTgtFromSubject()));
    [javac]                        ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:124: warning: sun.security.krb5.Credentials is Sun proprietary API and may be removed in a future release
    [javac]       serviceCred = Credentials.acquireServiceCreds(principal
    [javac]                     ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:134: warning: sun.security.jgss.krb5.Krb5Util is Sun proprietary API and may be removed in a future release
    [javac]         .add(Krb5Util.credsToTicket(serviceCred));
    [javac]              ^
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
    [javac] 73 errors
    [javac] 15 warnings

BUILD FAILED
<https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build.xml>:346: Compile failed; see the compiler error output for details.

Total time: 8 seconds


======================================================================
======================================================================
STORE: saving artifacts
======================================================================
======================================================================


mv: cannot stat `build/*.tar.tgz': No such file or directory
mv: cannot stat `build/*.jar': No such file or directory
mv: cannot stat `build/test/findbugs': No such file or directory
mv: cannot stat `build/docs/api': No such file or directory
Build Failed
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure


Hudson build is back to normal : Hadoop-Common-trunk-Commit #442

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/442/changes>



Build failed in Hudson: Hadoop-Common-trunk-Commit #441

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/441/>

------------------------------------------
[...truncated 4137 lines...]
    [junit] Running org.apache.hadoop.io.compress.TestBlockDecompressorStream
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.162 sec
    [junit] Running org.apache.hadoop.io.compress.TestCodec
    [junit] Tests run: 16, Failures: 0, Errors: 0, Time elapsed: 60.267 sec
    [junit] Running org.apache.hadoop.io.compress.TestCodecFactory
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.346 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestTFile
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 1.439 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestTFileByteArrays
    [junit] Tests run: 25, Failures: 0, Errors: 0, Time elapsed: 3.639 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestTFileComparator2
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 1.436 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestTFileComparators
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 0.396 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestTFileJClassComparatorByteArrays
    [junit] Tests run: 25, Failures: 0, Errors: 0, Time elapsed: 3.69 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestTFileLzoCodecsByteArrays
    [junit] Tests run: 25, Failures: 0, Errors: 0, Time elapsed: 0.185 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestTFileLzoCodecsStreams
    [junit] Tests run: 19, Failures: 0, Errors: 0, Time elapsed: 0.166 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestTFileNoneCodecsByteArrays
    [junit] Tests run: 25, Failures: 0, Errors: 0, Time elapsed: 1.823 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestTFileNoneCodecsJClassComparatorByteArrays
    [junit] Tests run: 25, Failures: 0, Errors: 0, Time elapsed: 1.809 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestTFileNoneCodecsStreams
    [junit] Tests run: 19, Failures: 0, Errors: 0, Time elapsed: 2.036 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestTFileSeek
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 5.504 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestTFileSeqFileComparison
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 12.664 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestTFileSplit
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 12.969 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestTFileStreams
    [junit] Tests run: 19, Failures: 0, Errors: 0, Time elapsed: 1.963 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestTFileUnsortedByteArrays
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 0.568 sec
    [junit] Running org.apache.hadoop.io.file.tfile.TestVLong
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 3.137 sec
    [junit] Running org.apache.hadoop.io.retry.TestRetryProxy
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 0.309 sec
    [junit] Running org.apache.hadoop.io.serializer.TestWritableSerialization
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.191 sec
    [junit] Running org.apache.hadoop.io.serializer.avro.TestAvroSerialization
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 0.366 sec
    [junit] Running org.apache.hadoop.ipc.TestAvroRpc
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.729 sec
    [junit] Running org.apache.hadoop.ipc.TestIPC
    [junit] Tests run: 7, Failures: 0, Errors: 0, Time elapsed: 52.83 sec
    [junit] Running org.apache.hadoop.ipc.TestIPCServerResponder
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 5.42 sec
    [junit] Running org.apache.hadoop.ipc.TestMiniRPCBenchmark
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 1.949 sec
    [junit] Running org.apache.hadoop.ipc.TestRPC
    [junit] Tests run: 6, Failures: 0, Errors: 0, Time elapsed: 57.847 sec
    [junit] Running org.apache.hadoop.ipc.TestSaslRPC
    [junit] Tests run: 7, Failures: 0, Errors: 0, Time elapsed: 0.899 sec
    [junit] Running org.apache.hadoop.log.TestLogLevel
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.819 sec
    [junit] Running org.apache.hadoop.metrics.TestMetricsServlet
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 0.197 sec
    [junit] Running org.apache.hadoop.metrics.spi.TestOutputRecord
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.164 sec
    [junit] Running org.apache.hadoop.net.TestDNS
    [junit] Tests run: 7, Failures: 0, Errors: 0, Time elapsed: 0.101 sec
    [junit] Running org.apache.hadoop.net.TestNetUtils
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.31 sec
    [junit] Running org.apache.hadoop.net.TestScriptBasedMapping
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 1.507 sec
    [junit] Running org.apache.hadoop.net.TestSocketIOWithTimeout
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 3.145 sec
    [junit] Running org.apache.hadoop.record.TestBuffer
    [junit] Tests run: 6, Failures: 0, Errors: 0, Time elapsed: 0.089 sec
    [junit] Running org.apache.hadoop.record.TestRecordIO
    [junit] Tests run: 5, Failures: 0, Errors: 0, Time elapsed: 0.159 sec
    [junit] Running org.apache.hadoop.record.TestRecordVersioning
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.173 sec
    [junit] Running org.apache.hadoop.security.TestCredentials
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.502 sec
    [junit] Running org.apache.hadoop.security.TestDoAsEffectiveUser
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 1.037 sec
    [junit] Running org.apache.hadoop.security.TestJNIGroupsMapping
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.221 sec
    [junit] Running org.apache.hadoop.security.TestKerberosName
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.327 sec
    [junit] Running org.apache.hadoop.security.TestSecurityUtil
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 0.323 sec
    [junit] Running org.apache.hadoop.security.TestUserGroupInformation
    [junit] Tests run: 12, Failures: 0, Errors: 0, Time elapsed: 0.422 sec
    [junit] Running org.apache.hadoop.security.authorize.TestAccessControlList
    [junit] Tests run: 7, Failures: 0, Errors: 0, Time elapsed: 0.392 sec
    [junit] Running org.apache.hadoop.security.token.TestToken
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.248 sec
    [junit] Running org.apache.hadoop.security.token.delegation.TestDelegationToken
    [junit] Tests run: 7, Failures: 0, Errors: 0, Time elapsed: 29.728 sec
    [junit] Running org.apache.hadoop.util.TestAsyncDiskService
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.123 sec
    [junit] Running org.apache.hadoop.util.TestCyclicIteration
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.152 sec
    [junit] Running org.apache.hadoop.util.TestDiskChecker
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 0.567 sec
    [junit] Running org.apache.hadoop.util.TestGenericOptionsParser
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 0.671 sec
    [junit] Running org.apache.hadoop.util.TestGenericsUtil
    [junit] Tests run: 6, Failures: 0, Errors: 0, Time elapsed: 0.303 sec
    [junit] Running org.apache.hadoop.util.TestHostsFileReader
    [junit] Tests run: 5, Failures: 0, Errors: 0, Time elapsed: 0.234 sec
    [junit] Running org.apache.hadoop.util.TestIndexedSort
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 1.512 sec
    [junit] Running org.apache.hadoop.util.TestOptions
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.154 sec
    [junit] Running org.apache.hadoop.util.TestPureJavaCrc32
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 1.562 sec
    [junit] Running org.apache.hadoop.util.TestReflectionUtils
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 0.653 sec
    [junit] Running org.apache.hadoop.util.TestRunJar
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.226 sec
    [junit] Running org.apache.hadoop.util.TestShell
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 4.281 sec
    [junit] Running org.apache.hadoop.util.TestStringUtils
    [junit] Tests run: 8, Failures: 0, Errors: 0, Time elapsed: 0.187 sec

checkfailure:

injectfaults:
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi>

ivy-download:
      [get] Getting: http://repo2.maven.org/maven2/org/apache/ivy/ivy/2.1.0/ivy-2.1.0.jar
      [get] To: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/ivy/ivy-2.1.0.jar>
      [get] Not modified - so not downloaded

ivy-init-dirs:
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/ivy>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/ivy/lib>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/ivy/report>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/ivy/maven>

ivy-probe-antlib:

ivy-init-antlib:

ivy-init:
[ivy:configure] :: loading settings :: file = <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/ivy/ivysettings.xml>

ivy-resolve-common:

ivy-retrieve-common:
[ivy:cachepath] DEPRECATED: 'ivy.conf.file' is deprecated, use 'ivy.settings.file' instead
[ivy:cachepath] :: loading settings :: file = <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/ivy/ivysettings.xml>

init:
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/classes>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/src>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/webapps>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/classes>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/extraconf>
    [touch] Creating /tmp/null948194020
   [delete] Deleting: /tmp/null948194020
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/conf>
     [copy] Copying 5 files to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/conf>
     [copy] Copying <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/conf/core-site.xml.template> to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/conf/core-site.xml>
     [copy] Copying <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/conf/masters.template> to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/conf/masters>
     [copy] Copying <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/conf/hadoop-env.sh.template> to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/conf/hadoop-env.sh>
     [copy] Copying <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/conf/slaves.template> to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/conf/slaves>
     [copy] Copying <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/conf/hadoop-policy.xml.template> to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/conf/hadoop-policy.xml>

record-parser:

compile-rcc-compiler:
    [javac] Compiling 29 source files to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/classes>
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
Trying to override old definition of task recordcc

compile-core-classes:
    [javac] Compiling 393 source files to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/classes>
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:31: warning: sun.security.krb5.Config is Sun proprietary API and may be removed in a future release
    [javac] import sun.security.krb5.Config;
    [javac]                         ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:32: warning: sun.security.krb5.KrbException is Sun proprietary API and may be removed in a future release
    [javac] import sun.security.krb5.KrbException;
    [javac]                         ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:81: warning: sun.security.krb5.Config is Sun proprietary API and may be removed in a future release
    [javac]   private static Config kerbConf;
    [javac]                  ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:39: warning: sun.security.jgss.krb5.Krb5Util is Sun proprietary API and may be removed in a future release
    [javac] import sun.security.jgss.krb5.Krb5Util;
    [javac]                              ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:40: warning: sun.security.krb5.Credentials is Sun proprietary API and may be removed in a future release
    [javac] import sun.security.krb5.Credentials;
    [javac]                         ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:41: warning: sun.security.krb5.PrincipalName is Sun proprietary API and may be removed in a future release
    [javac] import sun.security.krb5.PrincipalName;
    [javac]                         ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:85: warning: sun.security.krb5.Config is Sun proprietary API and may be removed in a future release
    [javac]       kerbConf = Config.getInstance();
    [javac]                  ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:87: warning: sun.security.krb5.KrbException is Sun proprietary API and may be removed in a future release
    [javac]     } catch (KrbException ke) {
    [javac]              ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:120: warning: sun.security.krb5.Credentials is Sun proprietary API and may be removed in a future release
    [javac]     Credentials serviceCred = null;
    [javac]     ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:122: warning: sun.security.krb5.PrincipalName is Sun proprietary API and may be removed in a future release
    [javac]       PrincipalName principal = new PrincipalName(serviceName,
    [javac]       ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:122: warning: sun.security.krb5.PrincipalName is Sun proprietary API and may be removed in a future release
    [javac]       PrincipalName principal = new PrincipalName(serviceName,
    [javac]                                     ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:123: warning: sun.security.krb5.PrincipalName is Sun proprietary API and may be removed in a future release
    [javac]           PrincipalName.KRB_NT_SRV_HST);
    [javac]           ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:125: warning: sun.security.jgss.krb5.Krb5Util is Sun proprietary API and may be removed in a future release
    [javac]           .toString(), Krb5Util.ticketToCreds(getTgtFromSubject()));
    [javac]                        ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:124: warning: sun.security.krb5.Credentials is Sun proprietary API and may be removed in a future release
    [javac]       serviceCred = Credentials.acquireServiceCreds(principal
    [javac]                     ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:134: warning: sun.security.jgss.krb5.Krb5Util is Sun proprietary API and may be removed in a future release
    [javac]         .add(Krb5Util.credsToTicket(serviceCred));
    [javac]              ^
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
    [javac] 15 warnings
     [copy] Copying 1 file to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/classes>

ivy-resolve-test:

ivy-retrieve-test:

generate-test-records:

generate-avro-records:

BUILD FAILED
<https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build.xml>:769: The following error occurred while executing this line:
<https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/test/aop/build/aop.xml>:119: The following error occurred while executing this line:
<https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/test/aop/build/aop.xml>:147: The following error occurred while executing this line:
<https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build.xml>:477: taskdef class org.apache.avro.specific.SchemaTask cannot be found

Total time: 13 minutes 50 seconds
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure


Build failed in Hudson: Hadoop-Common-trunk-Commit #440

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/440/changes>

Changes:

[nigel] Fix bug in tar file name

------------------------------------------
[...truncated 2993 lines...]
     [exec]   
     [exec] ------------------------------------------------------------------------ 
     [exec] cocoon 2.2.0-dev
     [exec] Copyright (c) 1999-2005 Apache Software Foundation. All rights reserved.
     [exec] Build: December 8 2005 (TargetVM=1.4, SourceVM=1.4, Debug=on, Optimize=on)
     [exec] ------------------------------------------------------------------------ 
     [exec] 
     [exec] 
     [exec] * [1/29]    [29/29]   3.218s 9.4Kb   linkmap.html
     [exec] * [3/27]    [0/0]     0.08s  3.9Kb   skin/profile.css
     [exec] * [4/26]    [0/0]     0.227s 1.2Kb   skin/print.css
     [exec] * [5/25]    [0/0]     0.37s  2.9Kb   skin/basic.css
     [exec] * [6/25]    [1/29]    0.865s 13.9Kb  native_libraries.html
     [exec] * [7/28]    [4/31]    0.46s  7.9Kb   hod.html
     [exec] * [8/27]    [0/0]     1.76s  86.3Kb  native_libraries.pdf
     [exec] * [10/26]   [1/29]    0.576s 39.5Kb  streaming.html
     [exec] * [11/26]   [1/28]    0.224s 8.3Kb   hdfs_quota_admin_guide.html
     [exec] * [12/25]   [0/0]     0.571s 26.6Kb  linkmap.pdf
     [exec] * [13/25]   [1/28]    0.228s 21.1Kb  distcp.html
     [exec] * [14/24]   [0/0]     0.466s 130.8Kb distcp.pdf
     [exec] * [15/24]   [1/28]    0.258s 9.4Kb   hadoop_archives.html
     [exec] * [16/23]   [0/0]     0.234s 51.4Kb  hadoop_archives.pdf
     [exec] * [18/22]   [1/30]    0.379s 27.4Kb  cluster_setup.html
     [exec] * [19/21]   [0/0]     0.544s 140.3Kb cluster_setup.pdf
     [exec] * [20/20]   [0/0]     0.313s 47.2Kb  hdfs_quota_admin_guide.pdf
     [exec] * [21/21]   [2/30]    0.255s 7.3Kb   index.html
     [exec] * [22/20]   [0/0]     0.339s 28.2Kb  index.pdf
     [exec] * [23/19]   [0/0]     0.0050s 1.8Kb   images/built-with-forrest-button.png
     [exec] * [24/21]   [3/31]    0.306s 38.0Kb  hdfs_design.html
     [exec] * [25/20]   [0/0]     0.0090s 17.2Kb  images/hdfsarchitecture.gif
     [exec] * [26/19]   [0/0]     0.0090s 9.2Kb   images/hadoop-logo.jpg
     [exec] * [27/19]   [1/28]    0.301s 22.3Kb  hdfs_shell.html
     [exec] * [28/18]   [0/0]     0.397s 99.3Kb  hdfs_shell.pdf
     [exec] * [29/17]   [0/0]     0.547s 348b    skin/images/rc-b-l-15-1body-2menu-3menu.png
     [exec] * [30/17]   [1/34]    0.243s 24.9Kb  hod_admin_guide.html
     [exec] * [31/16]   [0/0]     0.433s 174.4Kb hod_admin_guide.pdf
     [exec] * [32/15]   [0/0]     0.0080s 6.5Kb   images/core-logo.gif
     [exec] * [33/15]   [1/28]    0.195s 18.2Kb  hdfs_permissions_guide.html
     [exec] * [34/14]   [0/0]     0.334s 129.4Kb hdfs_permissions_guide.pdf
     [exec] * [35/13]   [0/0]     1.691s 280.5Kb hdfs_design.pdf
     [exec] * [36/13]   [1/30]    0.184s 17.8Kb  hod_config_guide.html
     [exec] * [37/12]   [0/0]     0.32s  122.2Kb hod_config_guide.pdf
     [exec] * [38/12]   [1/36]    0.955s 129.7Kb mapred_tutorial.html
     [exec] * [39/11]   [0/0]     2.948s 342.3Kb mapred_tutorial.pdf
     [exec] * [40/10]   [0/0]     0.011s 15.6Kb  images/hdfsdatanodes.gif
     [exec] * [42/9]    [1/31]    0.406s 61.5Kb  hod_user_guide.html
     [exec] * [43/21]   [13/13]   0.071s 12.4Kb  skin/screen.css
     [exec] * [46/18]   [0/0]     0.01s  199b    skin/images/rc-t-l-5-1header-2tab-unselected-3tab-unselected.png
     [exec] * [47/17]   [0/0]     0.01s  214b    skin/images/rc-t-r-5-1header-2tab-unselected-3tab-unselected.png
     [exec] * [48/16]   [0/0]     0.01s  199b    skin/images/rc-t-l-5-1header-2searchbox-3searchbox.png
     [exec] * [49/15]   [0/0]     0.011s 390b    skin/images/rc-t-r-15-1body-2menu-3menu.png
     [exec] * [51/13]   [0/0]     0.01s  215b    skin/images/rc-t-r-5-1header-2tab-selected-3tab-selected.png
     [exec] * [52/12]   [0/0]     0.01s  214b    skin/images/rc-t-r-5-1header-2searchbox-3searchbox.png
     [exec] * [53/11]   [0/0]     0.02s  209b    skin/images/rc-t-l-5-1header-2tab-selected-3tab-selected.png
     [exec] * [54/10]   [0/0]     0.0030s 285b    images/instruction_arrow.png
     [exec] * [55/9]    [0/0]     0.01s  200b    skin/images/rc-b-r-5-1header-2tab-selected-3tab-selected.png
     [exec] * [56/9]    [1/32]    0.231s 29.4Kb  hdfs_user_guide.html
     [exec] * [57/8]    [0/0]     0.362s 189.6Kb hdfs_user_guide.pdf
     [exec] * [58/7]    [0/0]     0.895s 237.1Kb hod_user_guide.pdf
     [exec] * [59/6]    [0/0]     0.461s 171.5Kb streaming.pdf
     [exec] * [60/5]    [0/0]     0.215s 54.2Kb  hod.pdf
     [exec] * [62/3]    [0/0]     0.0080s 766b    images/favicon.ico
     [exec] * [63/2]    [0/0]     0.011s 319b    skin/images/rc-b-r-15-1body-2menu-3menu.png
     [exec] * [64/2]    [1/28]    0.187s 16.0Kb  quickstart.html
     [exec] * [65/1]    [0/0]     0.329s 105.1Kb quickstart.pdf
     [exec] * [66/1]    [1/48]    0.3s   35.1Kb  commands_manual.html
     [exec] * [67/0]    [0/0]     0.614s 153.3Kb commands_manual.pdf
     [exec] Total time: 0 minutes 27 seconds,  Site size: 3,259,568 Site pages: 59
     [exec] 
     [exec]   Copying broken links file to site root.
     [exec]       
     [exec] Copying 1 file to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/docs/cn/build/site>
     [exec] 
     [exec] -----------------------------
     [exec] Static site was successfully generated at:
     [exec] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/docs/cn/build/site>
     [exec] ------------------------------
     [exec]     
     [exec] 
     [exec] BUILD SUCCESSFUL
     [exec] Total time: 31 seconds
     [copy] Copying 107 files to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/docs/cn>
     [copy] Copied 7 empty directories to 3 empty directories under <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/docs/cn>
    [style] Warning: the task name <style> is deprecated. Use <xslt> instead.
    [style] Transforming into <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/docs/cn>
    [style] Processing <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/core-default.xml> to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/docs/cn/core-default.html>
    [style] Loading stylesheet <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/conf/configuration.xsl>

changes-to-html:

ivy-resolve-jdiff:

ivy-retrieve-jdiff:

write-null:

api-xml:
  [javadoc] Generating Javadoc
  [javadoc] Javadoc execution
  [javadoc] Loading source files for package org.apache.hadoop...
  [javadoc] Loading source files for package org.apache.hadoop.classification...
  [javadoc] Loading source files for package org.apache.hadoop.classification.tools...
  [javadoc] Loading source files for package org.apache.hadoop.conf...
  [javadoc] Loading source files for package org.apache.hadoop.fs...
  [javadoc] Loading source files for package org.apache.hadoop.fs.ftp...
  [javadoc] Loading source files for package org.apache.hadoop.fs.kfs...
  [javadoc] Loading source files for package org.apache.hadoop.fs.local...
  [javadoc] Loading source files for package org.apache.hadoop.fs.permission...
  [javadoc] Loading source files for package org.apache.hadoop.fs.s3...
  [javadoc] Loading source files for package org.apache.hadoop.fs.s3native...
  [javadoc] Loading source files for package org.apache.hadoop.fs.shell...
  [javadoc] Loading source files for package org.apache.hadoop.http...
  [javadoc] Loading source files for package org.apache.hadoop.io...
  [javadoc] Loading source files for package org.apache.hadoop.io.compress...
  [javadoc] Loading source files for package org.apache.hadoop.io.compress.bzip2...
  [javadoc] Loading source files for package org.apache.hadoop.io.compress.zlib...
  [javadoc] Loading source files for package org.apache.hadoop.io.file.tfile...
  [javadoc] Loading source files for package org.apache.hadoop.io.retry...
  [javadoc] Loading source files for package org.apache.hadoop.io.serializer...
  [javadoc] Loading source files for package org.apache.hadoop.io.serializer.avro...
  [javadoc] Loading source files for package org.apache.hadoop.ipc...
  [javadoc] Loading source files for package org.apache.hadoop.ipc.metrics...
  [javadoc] Loading source files for package org.apache.hadoop.log...
  [javadoc] Loading source files for package org.apache.hadoop.metrics...
  [javadoc] Loading source files for package org.apache.hadoop.metrics.file...
  [javadoc] Loading source files for package org.apache.hadoop.metrics.ganglia...
  [javadoc] Loading source files for package org.apache.hadoop.metrics.jvm...
  [javadoc] Loading source files for package org.apache.hadoop.metrics.spi...
  [javadoc] Loading source files for package org.apache.hadoop.metrics.util...
  [javadoc] Loading source files for package org.apache.hadoop.net...
  [javadoc] Loading source files for package org.apache.hadoop.record...
  [javadoc] Loading source files for package org.apache.hadoop.record.compiler...
  [javadoc] Loading source files for package org.apache.hadoop.record.compiler.ant...
  [javadoc] Loading source files for package org.apache.hadoop.record.compiler.generated...
  [javadoc] Loading source files for package org.apache.hadoop.record.meta...
  [javadoc] Loading source files for package org.apache.hadoop.security...
  [javadoc] Loading source files for package org.apache.hadoop.security.authorize...
  [javadoc] Loading source files for package org.apache.hadoop.security.token...
  [javadoc] Loading source files for package org.apache.hadoop.security.token.delegation...
  [javadoc] Loading source files for package org.apache.hadoop.util...
  [javadoc] Loading source files for package org.apache.hadoop.util.bloom...
  [javadoc] Loading source files for package org.apache.hadoop.util.hash...
  [javadoc] Constructing Javadoc information...
  [javadoc] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:31: warning: sun.security.krb5.Config is Sun proprietary API and may be removed in a future release
  [javadoc] import sun.security.krb5.Config;
  [javadoc]                         ^
  [javadoc] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:32: warning: sun.security.krb5.KrbException is Sun proprietary API and may be removed in a future release
  [javadoc] import sun.security.krb5.KrbException;
  [javadoc]                         ^
  [javadoc] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:81: warning: sun.security.krb5.Config is Sun proprietary API and may be removed in a future release
  [javadoc]   private static Config kerbConf;
  [javadoc]                  ^
  [javadoc] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:39: warning: sun.security.jgss.krb5.Krb5Util is Sun proprietary API and may be removed in a future release
  [javadoc] import sun.security.jgss.krb5.Krb5Util;
  [javadoc]                              ^
  [javadoc] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:40: warning: sun.security.krb5.Credentials is Sun proprietary API and may be removed in a future release
  [javadoc] import sun.security.krb5.Credentials;
  [javadoc]                         ^
  [javadoc] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:41: warning: sun.security.krb5.PrincipalName is Sun proprietary API and may be removed in a future release
  [javadoc] import sun.security.krb5.PrincipalName;
  [javadoc]                         ^
  [javadoc] ExcludePrivateAnnotationsJDiffDoclet
  [javadoc] JDiff: doclet started ...
  [javadoc] JDiff: writing the API to file '<https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/lib/jdiff/hadoop-core_0.23.0-SNAPSHOT.xml'...>
  [javadoc] JDiff: finished (took 0s, not including scanning the source files).
  [javadoc] 6 warnings

api-report:
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/docs/jdiff>
  [javadoc] Generating Javadoc
  [javadoc] Javadoc execution
  [javadoc] javadoc: error - Illegal package name: ""
  [javadoc] Loading source file <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/ivy/lib/Hadoop-Common/jdiff/Null.java...>
  [javadoc] Loading source files for package org.apache.hadoop...
  [javadoc] Loading source files for package org.apache.hadoop.classification...
  [javadoc] Loading source files for package org.apache.hadoop.classification.tools...
  [javadoc] Loading source files for package org.apache.hadoop.conf...
  [javadoc] Loading source files for package org.apache.hadoop.fs...
  [javadoc] Loading source files for package org.apache.hadoop.fs.ftp...
  [javadoc] Loading source files for package org.apache.hadoop.fs.kfs...
  [javadoc] Loading source files for package org.apache.hadoop.fs.local...
  [javadoc] Loading source files for package org.apache.hadoop.fs.permission...
  [javadoc] Loading source files for package org.apache.hadoop.fs.s3...
  [javadoc] Loading source files for package org.apache.hadoop.fs.s3native...
  [javadoc] Loading source files for package org.apache.hadoop.fs.shell...
  [javadoc] Loading source files for package org.apache.hadoop.http...
  [javadoc] Loading source files for package org.apache.hadoop.io...
  [javadoc] Loading source files for package org.apache.hadoop.io.compress...
  [javadoc] Loading source files for package org.apache.hadoop.io.compress.bzip2...
  [javadoc] Loading source files for package org.apache.hadoop.io.compress.zlib...
  [javadoc] Loading source files for package org.apache.hadoop.io.file.tfile...
  [javadoc] Loading source files for package org.apache.hadoop.io.retry...
  [javadoc] Loading source files for package org.apache.hadoop.io.serializer...
  [javadoc] Loading source files for package org.apache.hadoop.io.serializer.avro...
  [javadoc] Loading source files for package org.apache.hadoop.ipc...
  [javadoc] Loading source files for package org.apache.hadoop.ipc.metrics...
  [javadoc] Loading source files for package org.apache.hadoop.log...
  [javadoc] Loading source files for package org.apache.hadoop.metrics...
  [javadoc] Loading source files for package org.apache.hadoop.metrics.file...
  [javadoc] Loading source files for package org.apache.hadoop.metrics.ganglia...
  [javadoc] Loading source files for package org.apache.hadoop.metrics.jvm...
  [javadoc] Loading source files for package org.apache.hadoop.metrics.spi...
  [javadoc] Loading source files for package org.apache.hadoop.metrics.util...
  [javadoc] Loading source files for package org.apache.hadoop.net...
  [javadoc] Loading source files for package org.apache.hadoop.record...
  [javadoc] Loading source files for package org.apache.hadoop.record.compiler...
  [javadoc] Loading source files for package org.apache.hadoop.record.compiler.ant...
  [javadoc] Loading source files for package org.apache.hadoop.record.compiler.generated...
  [javadoc] Loading source files for package org.apache.hadoop.record.meta...
  [javadoc] Loading source files for package org.apache.hadoop.security...
  [javadoc] Loading source files for package org.apache.hadoop.security.authorize...
  [javadoc] Loading source files for package org.apache.hadoop.security.token...
  [javadoc] Loading source files for package org.apache.hadoop.security.token.delegation...
  [javadoc] Loading source files for package org.apache.hadoop.util...
  [javadoc] Loading source files for package org.apache.hadoop.util.bloom...
  [javadoc] Loading source files for package org.apache.hadoop.util.hash...
  [javadoc] 1 error

ivy-resolve-test:

ivy-retrieve-test:

generate-test-records:

generate-avro-records:

BUILD FAILED
<https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build.xml>:477: taskdef class org.apache.avro.specific.SchemaTask cannot be found

Total time: 3 minutes 5 seconds


======================================================================
======================================================================
STORE: saving artifacts
======================================================================
======================================================================


mv: cannot stat `build/*.tar.gz': No such file or directory
mv: cannot stat `build/test/findbugs': No such file or directory
Build Failed
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure


Build failed in Hudson: Hadoop-Common-trunk-Commit #439

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/439/>

------------------------------------------
[...truncated 21280 lines...]
    [junit] 	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
    [junit] 	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
    [junit] 	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
    [junit] 	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
    [junit] 	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
    [junit] 	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
    [junit] 	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] checkDir success: false
    [junit] org.apache.hadoop.util.DiskChecker$DiskErrorException: directory is not listable: Mock for Path, hashCode: 7812797
    [junit] 	at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:166)
    [junit] 	at org.apache.hadoop.util.TestDiskChecker._checkDirs(TestDiskChecker.java:114)
    [junit] 	at org.apache.hadoop.util.TestDiskChecker.__CLR3_0_27686ui10fd(TestDiskChecker.java:98)
    [junit] 	at org.apache.hadoop.util.TestDiskChecker.testCheckDir_notListable(TestDiskChecker.java:97)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    [junit] 	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    [junit] 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
    [junit] 	at java.lang.reflect.Method.invoke(Method.java:597)
    [junit] 	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
    [junit] 	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
    [junit] 	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
    [junit] 	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
    [junit] 	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:76)
    [junit] 	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
    [junit] 	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
    [junit] 	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
    [junit] 	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
    [junit] 	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
    [junit] 	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
    [junit] 	at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
    [junit] checkDir success: false
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 0.538 sec
    [junit] Running org.apache.hadoop.util.TestGenericOptionsParser
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 0.658 sec
    [junit] Running org.apache.hadoop.util.TestGenericsUtil
    [junit] 2010-11-20 07:00:13,922 WARN  util.GenericOptionsParser (GenericOptionsParser.java:parseGeneralOptions(417)) - options parsing failed: Missing argument for option: jt
    [junit] usage: general options are:
    [junit]  -archives <paths>              comma separated archives to be unarchived
    [junit]                                 on the compute machines.
    [junit]  -conf <configuration file>     specify an application configuration file
    [junit]  -D <property=value>            use value for given property
    [junit]  -files <paths>                 comma separated files to be copied to the
    [junit]                                 map reduce cluster
    [junit]  -fs <local|namenode:port>      specify a namenode
    [junit]  -jt <local|jobtracker:port>    specify a job tracker
    [junit]  -libjars <paths>               comma separated jar files to include in
    [junit]                                 the classpath.
    [junit]  -tokenCacheFile <tokensFile>   name of the file with the tokens
    [junit] Tests run: 6, Failures: 0, Errors: 0, Time elapsed: 0.286 sec
    [junit] Running org.apache.hadoop.util.TestHostsFileReader
    [junit] 2010-11-20 07:00:14,604 INFO  util.HostsFileReader (HostsFileReader.java:refresh(85)) - Refreshing hosts (include/exclude) list
    [junit] 2010-11-20 07:00:14,607 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost1 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.include>
    [junit] 2010-11-20 07:00:14,608 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost2 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.include>
    [junit] 2010-11-20 07:00:14,608 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost3 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.include>
    [junit] 2010-11-20 07:00:14,609 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost4 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.include>
    [junit] 2010-11-20 07:00:14,610 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost4 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.include>
    [junit] 2010-11-20 07:00:14,611 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost5 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.include>
    [junit] 2010-11-20 07:00:14,612 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost1 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.exclude>
    [junit] 2010-11-20 07:00:14,617 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost2 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.exclude>
    [junit] 2010-11-20 07:00:14,617 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost3 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.exclude>
    [junit] 2010-11-20 07:00:14,618 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost4 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.exclude>
    [junit] 2010-11-20 07:00:14,618 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost4 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.exclude>
    [junit] 2010-11-20 07:00:14,619 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost5 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.exclude>
    [junit] 2010-11-20 07:00:14,627 INFO  util.HostsFileReader (HostsFileReader.java:refresh(85)) - Refreshing hosts (include/exclude) list
    [junit] 2010-11-20 07:00:14,630 INFO  util.HostsFileReader (HostsFileReader.java:refresh(85)) - Refreshing hosts (include/exclude) list
    [junit] 2010-11-20 07:00:14,634 INFO  util.HostsFileReader (HostsFileReader.java:refresh(85)) - Refreshing hosts (include/exclude) list
    [junit] 2010-11-20 07:00:14,635 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.include>
    [junit] 2010-11-20 07:00:14,635 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost2 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.include>
    [junit] 2010-11-20 07:00:14,636 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost3 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.include>
    [junit] 2010-11-20 07:00:14,636 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.exclude>
    [junit] 2010-11-20 07:00:14,637 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost2 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.exclude>
    [junit] 2010-11-20 07:00:14,637 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost3 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.exclude>
    [junit] 2010-11-20 07:00:14,640 INFO  util.HostsFileReader (HostsFileReader.java:refresh(85)) - Refreshing hosts (include/exclude) list
    [junit] 2010-11-20 07:00:14,641 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.include>
    [junit] 2010-11-20 07:00:14,642 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost2 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.include>
    [junit] 2010-11-20 07:00:14,642 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost4 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.include>
    [junit] 2010-11-20 07:00:14,643 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost3 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.include>
    [junit] 2010-11-20 07:00:14,643 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.exclude>
    [junit] 2010-11-20 07:00:14,644 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost2 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.exclude>
    [junit] 2010-11-20 07:00:14,645 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost4 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.exclude>
    [junit] 2010-11-20 07:00:14,645 INFO  util.HostsFileReader (HostsFileReader.java:readFileToSet(70)) - Adding somehost3 to the list of hosts from <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build/test/data/dfs.exclude>
    [junit] Tests run: 5, Failures: 0, Errors: 0, Time elapsed: 0.276 sec
    [junit] Running org.apache.hadoop.util.TestIndexedSort
    [junit] sortRandom seed: -3707164321268851267(org.apache.hadoop.util.QuickSort)
    [junit] testSorted seed: 8581036554391073056(org.apache.hadoop.util.QuickSort)
    [junit] testAllEqual setting min/max at 413/209(org.apache.hadoop.util.QuickSort)
    [junit] sortWritable seed: 1358275815979569019(org.apache.hadoop.util.QuickSort)
    [junit] QuickSort degen cmp/swp: 23252/3713(org.apache.hadoop.util.QuickSort)
    [junit] sortRandom seed: -2015333444816143551(org.apache.hadoop.util.HeapSort)
    [junit] testSorted seed: 5358884452116746921(org.apache.hadoop.util.HeapSort)
    [junit] testAllEqual setting min/max at 288/426(org.apache.hadoop.util.HeapSort)
    [junit] sortWritable seed: 6936118650852971527(org.apache.hadoop.util.HeapSort)
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 1.51 sec
    [junit] Running org.apache.hadoop.util.TestOptions
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.126 sec
    [junit] Running org.apache.hadoop.util.TestPureJavaCrc32
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 1.541 sec
    [junit] Running org.apache.hadoop.util.TestReflectionUtils
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 0.652 sec
    [junit] Running org.apache.hadoop.util.TestRunJar
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.23 sec
    [junit] Running org.apache.hadoop.util.TestShell
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 4.323 sec
    [junit] Running org.apache.hadoop.util.TestStringUtils
    [junit] Tests run: 8, Failures: 0, Errors: 0, Time elapsed: 0.189 sec

checkfailure:

injectfaults:
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi>

ivy-download:
      [get] Getting: http://repo2.maven.org/maven2/org/apache/ivy/ivy/2.1.0/ivy-2.1.0.jar
      [get] To: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/ivy/ivy-2.1.0.jar>
      [get] Not modified - so not downloaded

ivy-init-dirs:
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/ivy>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/ivy/lib>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/ivy/report>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/ivy/maven>

ivy-probe-antlib:

ivy-init-antlib:

ivy-init:
[ivy:configure] :: loading settings :: file = <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/ivy/ivysettings.xml>

ivy-resolve-common:
[ivy:resolve] 
[ivy:resolve] :: problems summary ::
[ivy:resolve] :::: ERRORS
[ivy:resolve] 	unknown resolver null
[ivy:resolve] 	unknown resolver null
[ivy:resolve] 
[ivy:resolve] :: USE VERBOSE OR DEBUG MESSAGE LEVEL FOR MORE DETAILS

ivy-retrieve-common:
[ivy:cachepath] DEPRECATED: 'ivy.conf.file' is deprecated, use 'ivy.settings.file' instead
[ivy:cachepath] :: loading settings :: file = <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/ivy/ivysettings.xml>

init:
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/classes>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/src>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/webapps>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/classes>
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/extraconf>
    [touch] Creating /tmp/null1599772257
   [delete] Deleting: /tmp/null1599772257
    [mkdir] Created dir: <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/conf>
     [copy] Copying 5 files to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/conf>
     [copy] Copying <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/conf/core-site.xml.template> to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/conf/core-site.xml>
     [copy] Copying <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/conf/masters.template> to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/conf/masters>
     [copy] Copying <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/conf/hadoop-env.sh.template> to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/conf/hadoop-env.sh>
     [copy] Copying <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/conf/slaves.template> to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/conf/slaves>
     [copy] Copying <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/conf/hadoop-policy.xml.template> to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/test/conf/hadoop-policy.xml>

record-parser:

compile-rcc-compiler:
    [javac] Compiling 29 source files to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/classes>
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
Trying to override old definition of task recordcc

compile-core-classes:
    [javac] Compiling 393 source files to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/classes>
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:31: warning: sun.security.krb5.Config is Sun proprietary API and may be removed in a future release
    [javac] import sun.security.krb5.Config;
    [javac]                         ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:32: warning: sun.security.krb5.KrbException is Sun proprietary API and may be removed in a future release
    [javac] import sun.security.krb5.KrbException;
    [javac]                         ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:81: warning: sun.security.krb5.Config is Sun proprietary API and may be removed in a future release
    [javac]   private static Config kerbConf;
    [javac]                  ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:39: warning: sun.security.jgss.krb5.Krb5Util is Sun proprietary API and may be removed in a future release
    [javac] import sun.security.jgss.krb5.Krb5Util;
    [javac]                              ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:40: warning: sun.security.krb5.Credentials is Sun proprietary API and may be removed in a future release
    [javac] import sun.security.krb5.Credentials;
    [javac]                         ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:41: warning: sun.security.krb5.PrincipalName is Sun proprietary API and may be removed in a future release
    [javac] import sun.security.krb5.PrincipalName;
    [javac]                         ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:85: warning: sun.security.krb5.Config is Sun proprietary API and may be removed in a future release
    [javac]       kerbConf = Config.getInstance();
    [javac]                  ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/KerberosName.java>:87: warning: sun.security.krb5.KrbException is Sun proprietary API and may be removed in a future release
    [javac]     } catch (KrbException ke) {
    [javac]              ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:120: warning: sun.security.krb5.Credentials is Sun proprietary API and may be removed in a future release
    [javac]     Credentials serviceCred = null;
    [javac]     ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:122: warning: sun.security.krb5.PrincipalName is Sun proprietary API and may be removed in a future release
    [javac]       PrincipalName principal = new PrincipalName(serviceName,
    [javac]       ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:122: warning: sun.security.krb5.PrincipalName is Sun proprietary API and may be removed in a future release
    [javac]       PrincipalName principal = new PrincipalName(serviceName,
    [javac]                                     ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:123: warning: sun.security.krb5.PrincipalName is Sun proprietary API and may be removed in a future release
    [javac]           PrincipalName.KRB_NT_SRV_HST);
    [javac]           ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:125: warning: sun.security.jgss.krb5.Krb5Util is Sun proprietary API and may be removed in a future release
    [javac]           .toString(), Krb5Util.ticketToCreds(getTgtFromSubject()));
    [javac]                        ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:124: warning: sun.security.krb5.Credentials is Sun proprietary API and may be removed in a future release
    [javac]       serviceCred = Credentials.acquireServiceCreds(principal
    [javac]                     ^
    [javac] <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/java/org/apache/hadoop/security/SecurityUtil.java>:134: warning: sun.security.jgss.krb5.Krb5Util is Sun proprietary API and may be removed in a future release
    [javac]         .add(Krb5Util.credsToTicket(serviceCred));
    [javac]              ^
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
    [javac] 15 warnings
     [copy] Copying 1 file to <https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build-fi/classes>

ivy-resolve-test:

ivy-retrieve-test:

generate-test-records:

generate-avro-records:

BUILD FAILED
<https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build.xml>:769: The following error occurred while executing this line:
<https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/test/aop/build/aop.xml>:119: The following error occurred while executing this line:
<https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/src/test/aop/build/aop.xml>:147: The following error occurred while executing this line:
<https://hudson.apache.org/hudson/job/Hadoop-Common-trunk-Commit/ws/trunk/build.xml>:477: taskdef class org.apache.avro.specific.SchemaTask cannot be found

Total time: 14 minutes 57 seconds
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure