You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Littlestar (JIRA)" <ji...@apache.org> on 2015/04/28 12:52:05 UTC

[jira] [Comment Edited] (SPARK-7193) "Spark on Mesos" may need more tests for spark 1.3.1 release

    [ https://issues.apache.org/jira/browse/SPARK-7193?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14516801#comment-14516801 ] 

Littlestar edited comment on SPARK-7193 at 4/28/15 10:51 AM:
-------------------------------------------------------------

1 master + 7 nodes (spark 1.3.1 + mesos 0.22.0/0.22.1)

{noformat}
15/04/28 18:45:53 INFO spark.SparkContext: Running Spark version 1.3.1

Spark context available as sc.
15/04/28 18:45:57 INFO repl.SparkILoop: Created sql context (with Hive support)..
SQL context available as sqlContext.

scala> val data = Array(1, 2, 3, 4, 5) 
data: Array[Int] = Array(1, 2, 3, 4, 5)

scala> val distData = sc.parallelize(data)
distData: org.apache.spark.rdd.RDD[Int] = ParallelCollectionRDD[0] at parallelize at <console>:23

scala> distData.reduce(_+_) 
-----------
org.apache.spark.SparkException: Job aborted due to stage failure: Task 7 in stage 0.0 failed 4 times, most recent failure: Lost task 7.3 in stage 0.0 (TID 17, hpblade06): ExecutorLostFailure (executor 20150427-165835-1214949568-5050-11116-S0 lost)
Driver stacktrace:
	at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1204)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1193)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1192)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
	at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1192)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:693)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:693)
	at scala.Option.foreach(Option.scala:236)
	at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:693)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1393)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1354)
	at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)


{noformat}


was (Author: cnstar9988):
{noformat}
15/04/28 18:45:53 INFO spark.SparkContext: Running Spark version 1.3.1

Spark context available as sc.
15/04/28 18:45:57 INFO repl.SparkILoop: Created sql context (with Hive support)..
SQL context available as sqlContext.

scala> val data = Array(1, 2, 3, 4, 5) 
data: Array[Int] = Array(1, 2, 3, 4, 5)

scala> val distData = sc.parallelize(data)
distData: org.apache.spark.rdd.RDD[Int] = ParallelCollectionRDD[0] at parallelize at <console>:23

scala> distData.reduce(_+_) 
-----------
org.apache.spark.SparkException: Job aborted due to stage failure: Task 7 in stage 0.0 failed 4 times, most recent failure: Lost task 7.3 in stage 0.0 (TID 17, hpblade06): ExecutorLostFailure (executor 20150427-165835-1214949568-5050-11116-S0 lost)
Driver stacktrace:
	at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1204)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1193)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1192)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
	at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1192)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:693)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:693)
	at scala.Option.foreach(Option.scala:236)
	at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:693)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1393)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1354)
	at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)


{noformat}

> "Spark on Mesos" may need more tests for spark 1.3.1 release
> ------------------------------------------------------------
>
>                 Key: SPARK-7193
>                 URL: https://issues.apache.org/jira/browse/SPARK-7193
>             Project: Spark
>          Issue Type: Bug
>          Components: Mesos
>    Affects Versions: 1.3.1
>            Reporter: Littlestar
>
> "Spark on Mesos" may need more tests for spark 1.3.1 release
> http://spark.apache.org/docs/latest/running-on-mesos.html
> I tested mesos 0.21.1/0.22.0/0.22.1 RC4.
> It just work well with "./bin/spark-shell --master mesos://host:5050".
> Any task need more than one nodes, it will throws the following exceptions.
> {noformat}
> Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Task 10 in stage 0.0 failed 4 times, most recent failure: Lost task 10.3 in stage 0.0 (TID 127, hpblade05): java.lang.IllegalStateException: unread block data
> 	at java.io.ObjectInputStream$BlockDataInputStream.setBlockDataMode(ObjectInputStream.java:2393)
> 	at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1378)
> 	at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1963)
> 	at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1887)
> 	at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1770)
> 	at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1346)
> 	at java.io.ObjectInputStream.readObject(ObjectInputStream.java:368)
> 	at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:68)
> 	at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:94)
> 	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:185)
> 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1110)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:603)
> 	at java.lang.Thread.run(Thread.java:679)
> Driver stacktrace:
> 	at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1204)
> 	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1193)
> 	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1192)
> 	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
> 	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
> 	at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1192)
> 	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:693)
> 	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:693)
> 	at scala.Option.foreach(Option.scala:236)
> 	at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:693)
> 	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1393)
> 	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1354)
> 	at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
> 15/04/28 15:33:18 ERROR scheduler.LiveListenerBus: Listener EventLoggingListener threw an exception
> java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:606)
> 	at org.apache.spark.scheduler.EventLoggingListener$$anonfun$logEvent$3.apply(EventLoggingListener.scala:144)
> 	at org.apache.spark.scheduler.EventLoggingListener$$anonfun$logEvent$3.apply(EventLoggingListener.scala:144)
> 	at scala.Option.foreach(Option.scala:236)
> 	at org.apache.spark.scheduler.EventLoggingListener.logEvent(EventLoggingListener.scala:144)
> 	at org.apache.spark.scheduler.EventLoggingListener.onStageCompleted(EventLoggingListener.scala:165)
> 	at org.apache.spark.scheduler.SparkListenerBus$class.onPostEvent(SparkListenerBus.scala:32)
> 	at org.apache.spark.scheduler.LiveListenerBus.onPostEvent(LiveListenerBus.scala:31)
> 	at org.apache.spark.scheduler.LiveListenerBus.onPostEvent(LiveListenerBus.scala:31)
> 	at org.apache.spark.util.ListenerBus$class.postToAll(ListenerBus.scala:53)
> 	at org.apache.spark.util.AsynchronousListenerBus.postToAll(AsynchronousListenerBus.scala:36)
> 	at org.apache.spark.util.AsynchronousListenerBus$$anon$1$$anonfun$run$1.apply$mcV$sp(AsynchronousListenerBus.scala:76)
> 	at org.apache.spark.util.AsynchronousListenerBus$$anon$1$$anonfun$run$1.apply(AsynchronousListenerBus.scala:61)
> 	at org.apache.spark.util.AsynchronousListenerBus$$anon$1$$anonfun$run$1.apply(AsynchronousListenerBus.scala:61)
> 	at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1618)
> 	at org.apache.spark.util.AsynchronousListenerBus$$anon$1.run(AsynchronousListenerBus.scala:60)
> Caused by: java.io.IOException: Filesystem closed
> 	at org.apache.hadoop.hdfs.DFSClient.checkOpen(DFSClient.java:707)
> 	at org.apache.hadoop.hdfs.DFSOutputStream.flushOrSync(DFSOutputStream.java:1843)
> 	at org.apache.hadoop.hdfs.DFSOutputStream.hflush(DFSOutputStream.java:1804)
> 	at org.apache.hadoop.fs.FSDataOutputStream.hflush(FSDataOutputStream.java:127)
> 	... 18 more
> 15/04/28 15:33:18 ERROR scheduler.LiveListenerBus: Listener EventLoggingListener threw an exception
> java.lang.reflect.InvocationTargetException
> 	at sun.reflect.GeneratedMethodAccessor3.invoke(Unknown Source)
> 	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> 	at java.lang.reflect.Method.invoke(Method.java:606)
> 	at org.apache.spark.scheduler.EventLoggingListener$$anonfun$logEvent$3.apply(EventLoggingListener.scala:144)
> 	at org.apache.spark.scheduler.EventLoggingListener$$anonfun$logEvent$3.apply(EventLoggingListener.scala:144)
> 	at scala.Option.foreach(Option.scala:236)
> 	at org.apache.spark.scheduler.EventLoggingListener.logEvent(EventLoggingListener.scala:144)
> 	at org.apache.spark.scheduler.EventLoggingListener.onJobEnd(EventLoggingListener.scala:169)
> 	at org.apache.spark.scheduler.SparkListenerBus$class.onPostEvent(SparkListenerBus.scala:36)
> 	at org.apache.spark.scheduler.LiveListenerBus.onPostEvent(LiveListenerBus.scala:31)
> 	at org.apache.spark.scheduler.LiveListenerBus.onPostEvent(LiveListenerBus.scala:31)
> 	at org.apache.spark.util.ListenerBus$class.postToAll(ListenerBus.scala:53)
> 	at org.apache.spark.util.AsynchronousListenerBus.postToAll(AsynchronousListenerBus.scala:36)
> 	at org.apache.spark.util.AsynchronousListenerBus$$anon$1$$anonfun$run$1.apply$mcV$sp(AsynchronousListenerBus.scala:76)
> 	at org.apache.spark.util.AsynchronousListenerBus$$anon$1$$anonfun$run$1.apply(AsynchronousListenerBus.scala:61)
> 	at org.apache.spark.util.AsynchronousListenerBus$$anon$1$$anonfun$run$1.apply(AsynchronousListenerBus.scala:61)
> 	at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1618)
> 	at org.apache.spark.util.AsynchronousListenerBus$$anon$1.run(AsynchronousListenerBus.scala:60)
> Caused by: java.io.IOException: Filesystem closed
> 	at org.apache.hadoop.hdfs.DFSClient.checkOpen(DFSClient.java:707)
> 	at org.apache.hadoop.hdfs.DFSOutputStream.flushOrSync(DFSOutputStream.java:1843)
> 	at org.apache.hadoop.hdfs.DFSOutputStream.hflush(DFSOutputStream.java:1804)
> 	at org.apache.hadoop.fs.FSDataOutputStream.hflush(FSDataOutputStream.java:127)
> 	... 18 more
> {noformat}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org