You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@mahout.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2015/02/25 18:36:19 UTC
Build failed in Jenkins: Mahout-Quality #2973
See <https://builds.apache.org/job/Mahout-Quality/2973/>
------------------------------------------
[...truncated 6237 lines...]
{
0 => {0:0.4094967325774156,1:0.16221052496996724,2:0.3176686476662248}
1 => {0:0.8871193149818618,1:0.8777194893736132,2:0.14735555020627644}
2 => {0:0.5969256651166984,1:0.6373740206383058,2:0.8178174000776084}
}
C=
{
0 => {0:1.4094967325774155,1:2.162210524969967,2:3.317668647666225}
1 => {0:3.8871193149818617,1:4.8777194893736135,2:5.147355550206276}
2 => {0:5.596925665116698,1:6.637374020638306,2:7.817817400077608}
}
[32m- C = A + B, identically partitioned[0m
[32m- C = A + B side test 1[0m
[32m- C = A + B side test 2[0m
[32m- C = A + B side test 3[0m
[32m- Ax[0m
[32m- A'x[0m
[32m- colSums, colMeans[0m
[32m- rowSums, rowMeans[0m
[32m- A.diagv[0m
[32m- numNonZeroElementsPerColumn[0m
[32m- C = A cbind B, cogroup[0m
[32m- C = A cbind B, zip[0m
[32m- B = A + 1.0[0m
[32m- C = A rbind B[0m
[32m- C = A rbind B, with empty[0m
[32m- scalarOps[0m
50533 [Executor task launch worker-2] ERROR org.apache.spark.executor.Executor - Exception in task 2.0 in stage 251.0 (TID 586)
java.io.IOException: PARSING_ERROR(2)
at org.xerial.snappy.SnappyNative.throw_error(SnappyNative.java:78)
at org.xerial.snappy.SnappyNative.uncompressedLength(Native Method)
at org.xerial.snappy.Snappy.uncompressedLength(Snappy.java:545)
at org.xerial.snappy.SnappyInputStream.readFully(SnappyInputStream.java:125)
at org.xerial.snappy.SnappyInputStream.readHeader(SnappyInputStream.java:88)
at org.xerial.snappy.SnappyInputStream.<init>(SnappyInputStream.java:58)
at org.apache.spark.io.SnappyCompressionCodec.compressedInputStream(CompressionCodec.scala:128)
at org.apache.spark.broadcast.TorrentBroadcast$.unBlockifyObject(TorrentBroadcast.scala:232)
at org.apache.spark.broadcast.TorrentBroadcast.readObject(TorrentBroadcast.scala:169)
at sun.reflect.GeneratedMethodAccessor9.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:969)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1871)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1969)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:349)
at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:159)
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:895)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:918)
at java.lang.Thread.run(Thread.java:662)
50533 [Executor task launch worker-0] ERROR org.apache.spark.executor.Executor - Exception in task 1.0 in stage 251.0 (TID 585)
java.io.IOException: PARSING_ERROR(2)
at org.xerial.snappy.SnappyNative.throw_error(SnappyNative.java:78)
at org.xerial.snappy.SnappyNative.uncompressedLength(Native Method)
at org.xerial.snappy.Snappy.uncompressedLength(Snappy.java:545)
at org.xerial.snappy.SnappyInputStream.readFully(SnappyInputStream.java:125)
at org.xerial.snappy.SnappyInputStream.readHeader(SnappyInputStream.java:88)
at org.xerial.snappy.SnappyInputStream.<init>(SnappyInputStream.java:58)
at org.apache.spark.io.SnappyCompressionCodec.compressedInputStream(CompressionCodec.scala:128)
at org.apache.spark.broadcast.TorrentBroadcast$.unBlockifyObject(TorrentBroadcast.scala:232)
at org.apache.spark.broadcast.TorrentBroadcast.readObject(TorrentBroadcast.scala:169)
at sun.reflect.GeneratedMethodAccessor9.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:969)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1871)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1969)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:349)
at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:159)
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:895)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:918)
at java.lang.Thread.run(Thread.java:662)
50533 [Executor task launch worker-1] ERROR org.apache.spark.executor.Executor - Exception in task 0.0 in stage 251.0 (TID 584)
java.io.IOException: PARSING_ERROR(2)
at org.xerial.snappy.SnappyNative.throw_error(SnappyNative.java:78)
at org.xerial.snappy.SnappyNative.uncompressedLength(Native Method)
at org.xerial.snappy.Snappy.uncompressedLength(Snappy.java:545)
at org.xerial.snappy.SnappyInputStream.readFully(SnappyInputStream.java:125)
at org.xerial.snappy.SnappyInputStream.readHeader(SnappyInputStream.java:88)
at org.xerial.snappy.SnappyInputStream.<init>(SnappyInputStream.java:58)
at org.apache.spark.io.SnappyCompressionCodec.compressedInputStream(CompressionCodec.scala:128)
at org.apache.spark.broadcast.TorrentBroadcast$.unBlockifyObject(TorrentBroadcast.scala:232)
at org.apache.spark.broadcast.TorrentBroadcast.readObject(TorrentBroadcast.scala:169)
at sun.reflect.GeneratedMethodAccessor9.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:969)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1871)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1969)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:349)
at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:159)
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:895)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:918)
at java.lang.Thread.run(Thread.java:662)
50543 [Executor task launch worker-0] ERROR org.apache.spark.executor.Executor - Exception in task 3.0 in stage 251.0 (TID 587)
java.io.IOException: PARSING_ERROR(2)
at org.xerial.snappy.SnappyNative.throw_error(SnappyNative.java:78)
at org.xerial.snappy.SnappyNative.uncompressedLength(Native Method)
at org.xerial.snappy.Snappy.uncompressedLength(Snappy.java:545)
at org.xerial.snappy.SnappyInputStream.readFully(SnappyInputStream.java:125)
at org.xerial.snappy.SnappyInputStream.readHeader(SnappyInputStream.java:88)
at org.xerial.snappy.SnappyInputStream.<init>(SnappyInputStream.java:58)
at org.apache.spark.io.SnappyCompressionCodec.compressedInputStream(CompressionCodec.scala:128)
at org.apache.spark.broadcast.TorrentBroadcast$.unBlockifyObject(TorrentBroadcast.scala:232)
at org.apache.spark.broadcast.TorrentBroadcast.readObject(TorrentBroadcast.scala:169)
at sun.reflect.GeneratedMethodAccessor9.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:969)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1871)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1969)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:349)
at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:159)
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:895)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:918)
at java.lang.Thread.run(Thread.java:662)
50545 [Result resolver thread-3] ERROR org.apache.spark.scheduler.TaskSetManager - Task 2 in stage 251.0 failed 1 times; aborting job
[31m- C = A + B missing rows *** FAILED ***[0m
[31m org.apache.spark.SparkException: Job aborted due to stage failure: Task 2 in stage 251.0 failed 1 times, most recent failure: Lost task 2.0 in stage 251.0 (TID 586, localhost): java.io.IOException: PARSING_ERROR(2)[0m
[31m org.xerial.snappy.SnappyNative.throw_error(SnappyNative.java:78)[0m
[31m org.xerial.snappy.SnappyNative.uncompressedLength(Native Method)[0m
[31m org.xerial.snappy.Snappy.uncompressedLength(Snappy.java:545)[0m
[31m org.xerial.snappy.SnappyInputStream.readFully(SnappyInputStream.java:125)[0m
[31m org.xerial.snappy.SnappyInputStream.readHeader(SnappyInputStream.java:88)[0m
[31m org.xerial.snappy.SnappyInputStream.<init>(SnappyInputStream.java:58)[0m
[31m org.apache.spark.io.SnappyCompressionCodec.compressedInputStream(CompressionCodec.scala:128)[0m
[31m org.apache.spark.broadcast.TorrentBroadcast$.unBlockifyObject(TorrentBroadcast.scala:232)[0m
[31m org.apache.spark.broadcast.TorrentBroadcast.readObject(TorrentBroadcast.scala:169)[0m
[31m sun.reflect.GeneratedMethodAccessor9.invoke(Unknown Source)[0m
[31m sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)[0m
[31m java.lang.reflect.Method.invoke(Method.java:597)[0m
[31m java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:969)[0m
[31m java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1871)[0m
[31m java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)[0m
[31m java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)[0m
[31m java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1969)[0m
[31m java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)[0m
[31m java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1775)[0m
[31m java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1327)[0m
[31m java.io.ObjectInputStream.readObject(ObjectInputStream.java:349)[0m
[31m org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)[0m
[31m org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)[0m
[31m org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:159)[0m
[31m java.util.concurrent.ThreadPoolExecutor$Worker.runTask(ThreadPoolExecutor.java:895)[0m
[31m java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:918)[0m
[31m java.lang.Thread.run(Thread.java:662)[0m
[31mDriver stacktrace:[0m
[31m at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1185)[0m
[31m at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1174)[0m
[31m at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1173)[0m
[31m at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)[0m
[31m at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)[0m
[31m at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1173)[0m
[31m at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688)[0m
[31m at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688)[0m
[31m at scala.Option.foreach(Option.scala:236)[0m
[31m at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:688)[0m
[31m ...[0m
[32m- C = cbind(A, B) with missing rows[0m
collected A =
{
0 => {0:1.0,1:2.0,2:3.0}
1 => {}
2 => {}
3 => {0:3.0,1:4.0,2:5.0}
}
collected B =
{
2 => {0:1.0,1:1.0,2:1.0}
1 => {0:1.0,1:1.0,2:1.0}
3 => {0:4.0,1:5.0,2:6.0}
0 => {0:2.0,1:3.0,2:4.0}
}
[32m- B = A + 1.0 missing rows[0m
[36mRun completed in 2 minutes, 7 seconds.[0m
[36mTotal number of tests run: 89[0m
[36mSuites: completed 12, aborted 0[0m
[36mTests: succeeded 88, failed 1, canceled 0, ignored 1, pending 0[0m
[31m*** 1 TEST FAILED ***[0m
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO]
[INFO] Mahout Build Tools ................................ SUCCESS [9.111s]
[INFO] Apache Mahout ..................................... SUCCESS [4.257s]
[INFO] Mahout Math ....................................... SUCCESS [2:25.354s]
[INFO] Mahout MapReduce Legacy ........................... SUCCESS [11:32.935s]
[INFO] Mahout Integration ................................ SUCCESS [1:28.622s]
[INFO] Mahout Examples ................................... SUCCESS [53.075s]
[INFO] Mahout Release Package ............................ SUCCESS [0.102s]
[INFO] Mahout Math Scala bindings ........................ SUCCESS [2:11.007s]
[INFO] Mahout Spark bindings ............................. FAILURE [2:50.523s]
[INFO] Mahout Spark bindings shell ....................... SKIPPED
[INFO] Mahout H2O backend ................................ SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 21:38.469s
[INFO] Finished at: Wed Feb 25 17:35:45 UTC 2015
[INFO] Final Memory: 82M/481M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.scalatest:scalatest-maven-plugin:1.0-M2:test (test) on project mahout-spark_2.10: There are test failures -> [Help 1]
[ERROR]
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR]
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR]
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR] mvn <goals> -rf :mahout-spark_2.10
Build step 'Invoke top-level Maven targets' marked build as failure
[PMD] Skipping publisher since build result is FAILURE
[TASKS] Skipping publisher since build result is FAILURE
Archiving artifacts
Sending artifact delta relative to Mahout-Quality #2972
Archived 72 artifacts
Archive block size is 32768
Received 3681 blocks and 19906352 bytes
Compression is 85.8%
Took 11 sec
Recording test results
Publishing Javadoc
Jenkins build is back to normal : Mahout-Quality #2974
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Mahout-Quality/2974/>