Skip to content
Snippets Groups Projects
Commit 3a98611d authored by Matei Zaharia's avatar Matei Zaharia
Browse files

Merge pull request #377 from stephenh/executeOnCompleteCallbackInFinally

Call executeOnCompleteCallbacks in a finally block.
parents cb867e9f d228bff4
No related branches found
No related tags found
No related merge requests found
......@@ -15,9 +15,11 @@ private[spark] class ResultTask[T, U](
override def run(attemptId: Long): U = {
val context = new TaskContext(stageId, partition, attemptId)
val result = func(context, rdd.iterator(split, context))
context.executeOnCompleteCallbacks()
result
try {
func(context, rdd.iterator(split, context))
} finally {
context.executeOnCompleteCallbacks()
}
}
override def preferredLocations: Seq[String] = locs
......
package spark.scheduler
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfter
import spark.TaskContext
import spark.RDD
import spark.SparkContext
import spark.Split
class TaskContextSuite extends FunSuite with BeforeAndAfter {
var sc: SparkContext = _
after {
if (sc != null) {
sc.stop()
sc = null
}
// To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
System.clearProperty("spark.master.port")
}
test("Calls executeOnCompleteCallbacks after failure") {
var completed = false
sc = new SparkContext("local", "test")
val rdd = new RDD[String](sc) {
override val splits = Array[Split](StubSplit(0))
override val dependencies = List()
override def compute(split: Split, context: TaskContext) = {
context.addOnCompleteCallback(() => completed = true)
sys.error("failed")
}
}
val func = (c: TaskContext, i: Iterator[String]) => i.next
val task = new ResultTask[String, String](0, rdd, func, 0, Seq(), 0)
intercept[RuntimeException] {
task.run(0)
}
assert(completed === true)
}
case class StubSplit(val index: Int) extends Split
}
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment