From 018b06bb69d41845c7d9e1d3e952f7a5f5b08a74 Mon Sep 17 00:00:00 2001 From: Michael Armbrust Date: Mon, 14 Apr 2014 19:57:55 -0700 Subject: [PATCH] Include stack trace for exceptions in user code. --- .../scala/org/apache/spark/scheduler/TaskSetManager.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala index 86d2050a03f18..6b83610fb3650 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala @@ -538,8 +538,8 @@ private[spark] class TaskSetManager( return } val key = ef.description - failureReason = "Exception failure in TID %s on host %s: %s".format( - tid, info.host, ef.description) + failureReason = "Exception failure in TID %s on host %s: %s\n%s".format( + tid, info.host, ef.description, ef.stackTrace.mkString("\n")) val now = clock.getTime() val (printFull, dupCount) = { if (recentExceptions.contains(key)) { @@ -582,7 +582,7 @@ private[spark] class TaskSetManager( if (numFailures(index) >= maxTaskFailures) { logError("Task %s:%d failed %d times; aborting job".format( taskSet.id, index, maxTaskFailures)) - abort("Task %s:%d failed %d times (most recent failure: %s)".format( + abort("Task %s:%d failed %d times, most recent failure: %s".format( taskSet.id, index, maxTaskFailures, failureReason)) return }