You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Nabarun (JIRA)" <ji...@apache.org> on 2016/08/07 06:10:20 UTC

[jira] [Commented] (SPARK-8904) When using LDA DAGScheduler throws exception

    [ https://issues.apache.org/jira/browse/SPARK-8904?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15410835#comment-15410835 ] 

Nabarun commented on SPARK-8904:
--------------------------------

This seem to be related to something which I am seeing at my end to. I converted my countVectors into DF

val ldaDF = countVectors.map { case Row(id: Long, countVector: Vector) => (id, countVector) } 

When I am trying to display it, this throws following exception

org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 3148.0 failed 4 times, most recent failure: Lost task 0.3 in stage 3148.0 (TID 11632, 10.209.235.85): scala.MatchError: [0,(1671,[1,2,3,5,8,10,11,12,14,15,17,18,20,21,23,27,28,29,30,31,32,36,37,38,39,41,42,43,45,46,51,52,54,66,69,71,74,75,78,80,82,83,85,88,89,90,92,96,97,98,99,102,104,106,107,108,109,111,112,115,118,121,123,124,126,134,138,139,143,144,145,148,150,151,152,153,155,161,166,171,172,173,174,176,178,179,180,181,189,190,197,199,200,201,207,209,212,216,217,218,220,222,223,224,226,227,228,232,234,238,240,244,246,250,252,254,255,260,261,262,264,268,269,270,277,280,281,282,286,292,294,295,296,297,301,310,312,314,316,318,323,324,325,337,341,343,346,347,351,355,359,366,367,379,380,381,388,390,391,398,403,405,411,417,442,444,448,456,460,464,466,468,470,477,480,484,487,490,491,495,496,501,502,507,509,512,522,523,527,529,531,533,534,535,552,554,556,557,565,566,567,569,574,575,585,624,630,632,633,638,644,646,652,653,658,668,669,670,680,683,686,690,693,696,698,704,705,712,723,726,736,746,747,750,757,758,761,765,773,774,775,783,786,796,797,801,807,811,815,825,830,833,843,844,845,847,849,859,861,862,864,867,871,872,876,879,882,892,895,896,897,912,923,924,935,937,941,944,945,948,949,952,968,982,989,1000,1003,1015,1018,1021,1025,1029,1034,1036,1038,1041,1048,1072,1082,1086,1092,1106,1111,1114,1117,1123,1128,1133,1135,1145,1149,1154,1168,1169,1171,1178,1180,1181,1183,1184,1201,1224,1234,1240,1250,1260,1261,1267,1269,1270,1280,1305,1309,1317,1333,1354,1355,1358,1378,1379,1386,1389,1393,1411,1413,1426,1428,1475,1480,1504,1506,1521,1525,1530,1532,1545,1555,1601,1614,1635,1643,1649,1653,1668],[1.0,5.0,4.0,3.0,2.0,14.0,30.0,2.0,72.0,9.0,6.0,6.0,1.0,13.0,1.0,4.0,1.0,3.0,2.0,10.0,2.0,4.0,74.0,3.0,11.0,1.0,35.0,1.0,16.0,1.0,2.0,15.0,3.0,4.0,17.0,2.0,8.0,60.0,35.0,3.0,1.0,33.0,2.0,2.0,3.0,11.0,16.0,2.0,8.0,2.0,3.0,48.0,1.0,1.0,4.0,8.0,4.0,3.0,4.0,4.0,1.0,3.0,1.0,11.0,1.0,2.0,3.0,1.0,35.0,6.0,2.0,1.0,2.0,3.0,3.0,4.0,2.0,2.0,1.0,1.0,20.0,9.0,6.0,17.0,10.0,8.0,1.0,12.0,1.0,3.0,3.0,2.0,9.0,1.0,2.0,19.0,1.0,2.0,1.0,1.0,2.0,9.0,1.0,1.0,1.0,5.0,1.0,2.0,5.0,1.0,1.0,1.0,1.0,1.0,7.0,1.0,14.0,2.0,2.0,1.0,5.0,2.0,5.0,5.0,20.0,2.0,27.0,3.0,4.0,11.0,1.0,3.0,3.0,1.0,2.0,2.0,7.0,5.0,2.0,2.0,1.0,3.0,1.0,2.0,1.0,2.0,8.0,5.0,1.0,5.0,3.0,1.0,4.0,3.0,3.0,4.0,1.0,3.0,4.0,1.0,2.0,3.0,5.0,7.0,1.0,8.0,1.0,2.0,4.0,2.0,1.0,12.0,5.0,1.0,6.0,4.0,2.0,2.0,1.0,1.0,3.0,4.0,1.0,1.0,2.0,4.0,3.0,1.0,2.0,6.0,1.0,1.0,1.0,4.0,2.0,1.0,7.0,12.0,1.0,12.0,1.0,1.0,9.0,2.0,1.0,2.0,1.0,1.0,6.0,6.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,2.0,2.0,3.0,1.0,1.0,2.0,1.0,3.0,1.0,4.0,1.0,5.0,2.0,1.0,2.0,2.0,3.0,1.0,2.0,1.0,1.0,2.0,3.0,1.0,4.0,3.0,1.0,3.0,2.0,1.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,4.0,1.0,1.0,1.0,1.0,1.0,4.0,1.0,2.0,2.0,1.0,1.0,2.0,3.0,1.0,1.0,2.0,2.0,1.0,1.0,1.0,2.0,1.0,2.0,1.0,1.0,1.0,5.0,3.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,4.0,1.0,1.0,1.0,2.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,2.0,1.0,3.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0])] (of class org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema)
	at line1907dd16af5d4fbfa217a9d52f096b36316.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:142)
	at line1907dd16af5d4fbfa217a9d52f096b36316.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:142)
	at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(Unknown Source)
	at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
	at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:370)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$3.apply(SparkPlan.scala:231)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$3.apply(SparkPlan.scala:225)
	at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:790)
	at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:790)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:70)
	at org.apache.spark.scheduler.Task.run(Task.scala:85)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)

Driver stacktrace:
	at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1450)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1438)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1437)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
	at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1437)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:811)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:811)
	at scala.Option.foreach(Option.scala:236)
	at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:811)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1659)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1618)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1607)
	at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
	at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:632)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1871)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1884)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1897)
	at org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:332)
	at org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:39)
	at org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$execute$1$1.apply(Dataset.scala:2183)
	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)
	at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2532)
	at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$execute$1(Dataset.scala:2182)
	at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collect(Dataset.scala:2189)
	at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1925)
	at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1924)
	at org.apache.spark.sql.Dataset.withTypedCallback(Dataset.scala:2562)
	at org.apache.spark.sql.Dataset.head(Dataset.scala:1924)
	at org.apache.spark.sql.Dataset.take(Dataset.scala:2139)
	at com.databricks.backend.daemon.driver.OutputAggregator$.withOutputAggregation0(OutputAggregator.scala:80)
	at com.databricks.backend.daemon.driver.OutputAggregator$.withOutputAggregation(OutputAggregator.scala:42)
	at com.databricks.backend.daemon.driver.ScalaDriverLocal$$anonfun$repl$2.apply(ScalaDriverLocal.scala:193)
	at com.databricks.backend.daemon.driver.ScalaDriverLocal$$anonfun$repl$2.apply(ScalaDriverLocal.scala:185)
	at scala.Option.map(Option.scala:145)
	at com.databricks.backend.daemon.driver.ScalaDriverLocal.repl(ScalaDriverLocal.scala:185)
	at com.databricks.backend.daemon.driver.DriverLocal$$anonfun$execute$3.apply(DriverLocal.scala:170)
	at com.databricks.backend.daemon.driver.DriverLocal$$anonfun$execute$3.apply(DriverLocal.scala:170)
	at com.databricks.logging.UsageLogging$$anonfun$withAttributionContext$1.apply(UsageLogging.scala:121)
	at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
	at com.databricks.logging.UsageLogging$class.withAttributionContext(UsageLogging.scala:116)
	at com.databricks.backend.daemon.driver.DriverLocal.withAttributionContext(DriverLocal.scala:32)
	at com.databricks.logging.UsageLogging$class.withAttributionTags(UsageLogging.scala:154)
	at com.databricks.backend.daemon.driver.DriverLocal.withAttributionTags(DriverLocal.scala:32)
	at com.databricks.backend.daemon.driver.DriverLocal.execute(DriverLocal.scala:169)
	at com.databricks.backend.daemon.driver.DriverWrapper$$anonfun$tryExecutingCommand$2.apply(DriverWrapper.scala:544)
	at com.databricks.backend.daemon.driver.DriverWrapper$$anonfun$tryExecutingCommand$2.apply(DriverWrapper.scala:544)
	at scala.util.Try$.apply(Try.scala:161)
	at com.databricks.backend.daemon.driver.DriverWrapper.tryExecutingCommand(DriverWrapper.scala:539)
	at com.databricks.backend.daemon.driver.DriverWrapper.executeCommand(DriverWrapper.scala:483)
	at com.databricks.backend.daemon.driver.DriverWrapper.runInnerLoop(DriverWrapper.scala:391)
	at com.databricks.backend.daemon.driver.DriverWrapper.runInner(DriverWrapper.scala:348)
	at com.databricks.backend.daemon.driver.DriverWrapper.run(DriverWrapper.scala:214)
	at java.lang.Thread.run(Thread.java:745)
Caused by: scala.MatchError: [0,(1671,[1,2,3,5,8,10,11,12,14,15,17,18,20,21,23,27,28,29,30,31,32,36,37,38,39,41,42,43,45,46,51,52,54,66,69,71,74,75,78,80,82,83,85,88,89,90,92,96,97,98,99,102,104,106,107,108,109,111,112,115,118,121,123,124,126,134,138,139,143,144,145,148,150,151,152,153,155,161,166,171,172,173,174,176,178,179,180,181,189,190,197,199,200,201,207,209,212,216,217,218,220,222,223,224,226,227,228,232,234,238,240,244,246,250,252,254,255,260,261,262,264,268,269,270,277,280,281,282,286,292,294,295,296,297,301,310,312,314,316,318,323,324,325,337,341,343,346,347,351,355,359,366,367,379,380,381,388,390,391,398,403,405,411,417,442,444,448,456,460,464,466,468,470,477,480,484,487,490,491,495,496,501,502,507,509,512,522,523,527,529,531,533,534,535,552,554,556,557,565,566,567,569,574,575,585,624,630,632,633,638,644,646,652,653,658,668,669,670,680,683,686,690,693,696,698,704,705,712,723,726,736,746,747,750,757,758,761,765,773,774,775,783,786,796,797,801,807,811,815,825,830,833,843,844,845,847,849,859,861,862,864,867,871,872,876,879,882,892,895,896,897,912,923,924,935,937,941,944,945,948,949,952,968,982,989,1000,1003,1015,1018,1021,1025,1029,1034,1036,1038,1041,1048,1072,1082,1086,1092,1106,1111,1114,1117,1123,1128,1133,1135,1145,1149,1154,1168,1169,1171,1178,1180,1181,1183,1184,1201,1224,1234,1240,1250,1260,1261,1267,1269,1270,1280,1305,1309,1317,1333,1354,1355,1358,1378,1379,1386,1389,1393,1411,1413,1426,1428,1475,1480,1504,1506,1521,1525,1530,1532,1545,1555,1601,1614,1635,1643,1649,1653,1668],[1.0,5.0,4.0,3.0,2.0,14.0,30.0,2.0,72.0,9.0,6.0,6.0,1.0,13.0,1.0,4.0,1.0,3.0,2.0,10.0,2.0,4.0,74.0,3.0,11.0,1.0,35.0,1.0,16.0,1.0,2.0,15.0,3.0,4.0,17.0,2.0,8.0,60.0,35.0,3.0,1.0,33.0,2.0,2.0,3.0,11.0,16.0,2.0,8.0,2.0,3.0,48.0,1.0,1.0,4.0,8.0,4.0,3.0,4.0,4.0,1.0,3.0,1.0,11.0,1.0,2.0,3.0,1.0,35.0,6.0,2.0,1.0,2.0,3.0,3.0,4.0,2.0,2.0,1.0,1.0,20.0,9.0,6.0,17.0,10.0,8.0,1.0,12.0,1.0,3.0,3.0,2.0,9.0,1.0,2.0,19.0,1.0,2.0,1.0,1.0,2.0,9.0,1.0,1.0,1.0,5.0,1.0,2.0,5.0,1.0,1.0,1.0,1.0,1.0,7.0,1.0,14.0,2.0,2.0,1.0,5.0,2.0,5.0,5.0,20.0,2.0,27.0,3.0,4.0,11.0,1.0,3.0,3.0,1.0,2.0,2.0,7.0,5.0,2.0,2.0,1.0,3.0,1.0,2.0,1.0,2.0,8.0,5.0,1.0,5.0,3.0,1.0,4.0,3.0,3.0,4.0,1.0,3.0,4.0,1.0,2.0,3.0,5.0,7.0,1.0,8.0,1.0,2.0,4.0,2.0,1.0,12.0,5.0,1.0,6.0,4.0,2.0,2.0,1.0,1.0,3.0,4.0,1.0,1.0,2.0,4.0,3.0,1.0,2.0,6.0,1.0,1.0,1.0,4.0,2.0,1.0,7.0,12.0,1.0,12.0,1.0,1.0,9.0,2.0,1.0,2.0,1.0,1.0,6.0,6.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,2.0,2.0,3.0,1.0,1.0,2.0,1.0,3.0,1.0,4.0,1.0,5.0,2.0,1.0,2.0,2.0,3.0,1.0,2.0,1.0,1.0,2.0,3.0,1.0,4.0,3.0,1.0,3.0,2.0,1.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,4.0,1.0,1.0,1.0,1.0,1.0,4.0,1.0,2.0,2.0,1.0,1.0,2.0,3.0,1.0,1.0,2.0,2.0,1.0,1.0,1.0,2.0,1.0,2.0,1.0,1.0,1.0,5.0,3.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,4.0,1.0,1.0,1.0,2.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,2.0,1.0,3.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0])] (of class org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema)
	at line1907dd16af5d4fbfa217a9d52f096b36316.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:142)
	at line1907dd16af5d4fbfa217a9d52f096b36316.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:142)
	at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(Unknown Source)
	at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
	at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:370)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$3.apply(SparkPlan.scala:231)
	at org.apache.spark.sql.execution.SparkPlan$$anonfun$3.apply(SparkPlan.scala:225)
	at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:790)
	at org.apache.spark.rdd.RDD$$anonfun$mapPartitionsInternal$1$$anonfun$apply$24.apply(RDD.scala:790)
	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
	at org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:70)
	at org.apache.spark.scheduler.Task.run(Task.scala:85)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	... 1 more

> When using LDA DAGScheduler throws exception
> --------------------------------------------
>
>                 Key: SPARK-8904
>                 URL: https://issues.apache.org/jira/browse/SPARK-8904
>             Project: Spark
>          Issue Type: Bug
>          Components: ML, MLlib
>    Affects Versions: 1.4.0
>         Environment: Amazon EC2 using ubuntu
>            Reporter: Ohad Zadok
>         Attachments: ldaexample.scala, screen1.png, screen2.png
>
>
> When using the LDA algorithm, DAGscheduler throws an exeption, this is the stack trace:
>         at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
>         at java.lang.Thread.run(Thread.java:745)
> Driver stacktrace:
>         at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1266)
>         at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1257)
>         at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1256)
>         at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
>         at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
>         at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1256)
>         at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:730)
>         at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:730)
>         at scala.Option.foreach(Option.scala:236)
>         at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:730)
>         at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1450)
>         at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1411)
>         at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org