You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2013/12/21 04:16:06 UTC
[2/3] git commit: Fixed test failure by adding exception to abortion
msg
Fixed test failure by adding exception to abortion msg
Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/d7bf08cb
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/d7bf08cb
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/d7bf08cb
Branch: refs/heads/branch-0.8
Commit: d7bf08cba3dc34180ef6b744560f99c8aefc96bf
Parents: 6183102
Author: Kay Ousterhout <ka...@gmail.com>
Authored: Fri Dec 20 10:19:03 2013 -0800
Committer: Kay Ousterhout <ka...@gmail.com>
Committed: Fri Dec 20 10:19:03 2013 -0800
----------------------------------------------------------------------
.../apache/spark/scheduler/local/LocalTaskSetManager.scala | 9 ++++++---
1 file changed, 6 insertions(+), 3 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/d7bf08cb/core/src/main/scala/org/apache/spark/scheduler/local/LocalTaskSetManager.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/scheduler/local/LocalTaskSetManager.scala b/core/src/main/scala/org/apache/spark/scheduler/local/LocalTaskSetManager.scala
index a498599..f92ad4a 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/local/LocalTaskSetManager.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/local/LocalTaskSetManager.scala
@@ -176,15 +176,18 @@ private[spark] class LocalTaskSetManager(sched: LocalScheduler, val taskSet: Tas
val task = taskSet.tasks(index)
info.markFailed()
decreaseRunningTasks(1)
+ var failureReason = "unknown"
ser.deserialize[TaskEndReason](serializedData, getClass.getClassLoader) match {
case ef: ExceptionFailure =>
+ failureReason = "Exception failure: %s".format(ef.description)
val locs = ef.stackTrace.map(loc => "\tat %s".format(loc.toString))
logInfo("Task loss due to %s\n%s\n%s".format(
ef.className, ef.description, locs.mkString("\n")))
sched.dagScheduler.taskEnded(task, ef, null, null, info, ef.metrics.getOrElse(null))
case TaskResultLost =>
- logWarning("Lost result for TID %s".format(tid))
+ failureReason = "Lost result for TID %s".format(tid)
+ logWarning(failureReason)
sched.dagScheduler.taskEnded(task, TaskResultLost, null, null, info, null)
case _ => {}
@@ -193,8 +196,8 @@ private[spark] class LocalTaskSetManager(sched: LocalScheduler, val taskSet: Tas
copiesRunning(index) -= 1
numFailures(index) += 1
if (numFailures(index) > MAX_TASK_FAILURES) {
- val errorMessage = "Task %s:%d failed more than %d times; aborting job".format(
- taskSet.id, index, MAX_TASK_FAILURES)
+ val errorMessage = ("Task %s:%d failed more than %d times; aborting job" +
+ "(most recent failure: %s").format(taskSet.id, index, MAX_TASK_FAILURES, failureReason)
decreaseRunningTasks(runningTasks)
sched.dagScheduler.taskSetFailed(taskSet, errorMessage)
// need to delete failed Taskset from schedule queue