From 9a815de4bfaf959fe1399d6550e561a0ec9f28c2 Mon Sep 17 00:00:00 2001 From: BlackNiuza <zeming89@gmail.com> Date: Thu, 1 Aug 2013 00:36:47 +0800 Subject: [PATCH] write and read generation in ResultTask --- core/src/main/scala/spark/scheduler/ResultTask.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/src/main/scala/spark/scheduler/ResultTask.scala b/core/src/main/scala/spark/scheduler/ResultTask.scala index 361b1e6b91..1ced6f9524 100644 --- a/core/src/main/scala/spark/scheduler/ResultTask.scala +++ b/core/src/main/scala/spark/scheduler/ResultTask.scala @@ -118,6 +118,7 @@ private[spark] class ResultTask[T, U]( out.write(bytes) out.writeInt(partition) out.writeInt(outputId) + out.writeLong(generation) out.writeObject(split) } } @@ -132,6 +133,7 @@ private[spark] class ResultTask[T, U]( func = func_.asInstanceOf[(TaskContext, Iterator[T]) => U] partition = in.readInt() val outputId = in.readInt() + generation = in.readLong() split = in.readObject().asInstanceOf[Partition] } } -- GitLab