You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "The sea (JIRA)" <ji...@apache.org> on 2016/04/09 10:41:25 UTC

[jira] [Updated] (SPARK-14505) Creating two SparkContext Object in the same jvm, the first one will can not use!

     [ https://issues.apache.org/jira/browse/SPARK-14505?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]

The sea updated SPARK-14505:
----------------------------
    Description: 
Execute code below in spark shell:

'''
import org.apache.spark.SparkContext

val sc = new SparkContext("local", "app")

sc.range(1, 10).reduce(_ + _)

'''

16/04/09 15:40:01 WARN scheduler.TaskSetManager: Lost task 1.0 in stage 1.0 (TID 3, 192.168.172.131): java.io.IOException: org.apache.spark.SparkException: Failed to get broadcast_1_piece0 of broadcast_1
	at org.apache.spark.util.Utils$.tryOrIOException(Utils.scala:1222)
	at org.apache.spark.broadcast.TorrentBroadcast.readBroadcastBlock(TorrentBroadcast.scala:165)
	at org.apache.spark.broadcast.TorrentBroadcast._value$lzycompute(TorrentBroadcast.scala:64)
	at org.apache.spark.broadcast.TorrentBroadcast._value(TorrentBroadcast.scala:64)
	at org.apache.spark.broadcast.TorrentBroadcast.getValue(TorrentBroadcast.scala:88)
	at org.apache.spark.broadcast.Broadcast.value(Broadcast.scala:70)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:62)
	at org.apache.spark.scheduler.Task.run(Task.scala:89)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.spark.SparkException: Failed to get broadcast_1_piece0 of broadcast_1
	at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$org$apache$spark$broadcast$TorrentBroadcast$$readBlocks$1$$anonfun$2.apply(TorrentBroadcast.scala:138)
	at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$org$apache$spark$broadcast$TorrentBroadcast$$readBlocks$1$$anonfun$2.apply(TorrentBroadcast.scala:138)
	at scala.Option.getOrElse(Option.scala:120)
	at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$org$apache$spark$broadcast$TorrentBroadcast$$readBlocks$1.apply$mcVI$sp(TorrentBroadcast.scala:137)
	at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$org$apache$spark$broadcast$TorrentBroadcast$$readBlocks$1.apply(TorrentBroadcast.scala:120)
	at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$org$apache$spark$broadcast$TorrentBroadcast$$readBlocks$1.apply(TorrentBroadcast.scala:120)
	at scala.collection.immutable.List.foreach(List.scala:318)
	at org.apache.spark.broadcast.TorrentBroadcast.org$apache$spark$broadcast$TorrentBroadcast$$readBlocks(TorrentBroadcast.scala:120)
	at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$readBroadcastBlock$1.apply(TorrentBroadcast.scala:175)
	at org.apache.spark.util.Utils$.tryOrIOException(Utils.scala:1219)
	... 11 more


> Creating two SparkContext Object in the same jvm, the first one will can not use!
> ---------------------------------------------------------------------------------
>
>                 Key: SPARK-14505
>                 URL: https://issues.apache.org/jira/browse/SPARK-14505
>             Project: Spark
>          Issue Type: Bug
>          Components: Spark Core
>    Affects Versions: 1.6.1
>            Reporter: The sea
>            Priority: Critical
>             Fix For: 1.6.1
>
>
> Execute code below in spark shell:
> '''
> import org.apache.spark.SparkContext
> val sc = new SparkContext("local", "app")
> sc.range(1, 10).reduce(_ + _)
> '''
> 16/04/09 15:40:01 WARN scheduler.TaskSetManager: Lost task 1.0 in stage 1.0 (TID 3, 192.168.172.131): java.io.IOException: org.apache.spark.SparkException: Failed to get broadcast_1_piece0 of broadcast_1
> 	at org.apache.spark.util.Utils$.tryOrIOException(Utils.scala:1222)
> 	at org.apache.spark.broadcast.TorrentBroadcast.readBroadcastBlock(TorrentBroadcast.scala:165)
> 	at org.apache.spark.broadcast.TorrentBroadcast._value$lzycompute(TorrentBroadcast.scala:64)
> 	at org.apache.spark.broadcast.TorrentBroadcast._value(TorrentBroadcast.scala:64)
> 	at org.apache.spark.broadcast.TorrentBroadcast.getValue(TorrentBroadcast.scala:88)
> 	at org.apache.spark.broadcast.Broadcast.value(Broadcast.scala:70)
> 	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:62)
> 	at org.apache.spark.scheduler.Task.run(Task.scala:89)
> 	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)
> 	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
> 	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
> 	at java.lang.Thread.run(Thread.java:745)
> Caused by: org.apache.spark.SparkException: Failed to get broadcast_1_piece0 of broadcast_1
> 	at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$org$apache$spark$broadcast$TorrentBroadcast$$readBlocks$1$$anonfun$2.apply(TorrentBroadcast.scala:138)
> 	at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$org$apache$spark$broadcast$TorrentBroadcast$$readBlocks$1$$anonfun$2.apply(TorrentBroadcast.scala:138)
> 	at scala.Option.getOrElse(Option.scala:120)
> 	at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$org$apache$spark$broadcast$TorrentBroadcast$$readBlocks$1.apply$mcVI$sp(TorrentBroadcast.scala:137)
> 	at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$org$apache$spark$broadcast$TorrentBroadcast$$readBlocks$1.apply(TorrentBroadcast.scala:120)
> 	at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$org$apache$spark$broadcast$TorrentBroadcast$$readBlocks$1.apply(TorrentBroadcast.scala:120)
> 	at scala.collection.immutable.List.foreach(List.scala:318)
> 	at org.apache.spark.broadcast.TorrentBroadcast.org$apache$spark$broadcast$TorrentBroadcast$$readBlocks(TorrentBroadcast.scala:120)
> 	at org.apache.spark.broadcast.TorrentBroadcast$$anonfun$readBroadcastBlock$1.apply(TorrentBroadcast.scala:175)
> 	at org.apache.spark.util.Utils$.tryOrIOException(Utils.scala:1219)
> 	... 11 more



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org