diff --git a/core/src/main/scala/spark/RDD.scala b/core/src/main/scala/spark/RDD.scala index 7fe6633f1ba7a632d3f0b68b4ca47b7a30a1a365..fa53d9be2c045de5bd0ba15a0597fdeb75761b74 100644 --- a/core/src/main/scala/spark/RDD.scala +++ b/core/src/main/scala/spark/RDD.scala @@ -48,7 +48,7 @@ abstract class RDD[T: ClassManifest](@transient sc: SparkContext) extends Serial // Methods that must be implemented by subclasses def splits: Array[Split] def compute(split: Split): Iterator[T] - val dependencies: List[Dependency[_]] + @transient val dependencies: List[Dependency[_]] // Optionally overridden by subclasses to specify how they are partitioned val partitioner: Option[Partitioner] = None diff --git a/core/src/main/scala/spark/UnionRDD.scala b/core/src/main/scala/spark/UnionRDD.scala index 6fded339ee885ba7c372690d4e52963e8fbf4bbb..4c0f255e6bb767e61ed3864f3e3600f237692247 100644 --- a/core/src/main/scala/spark/UnionRDD.scala +++ b/core/src/main/scala/spark/UnionRDD.scala @@ -16,7 +16,7 @@ class UnionSplit[T: ClassManifest]( class UnionRDD[T: ClassManifest]( sc: SparkContext, - rdds: Seq[RDD[T]]) + @transient rdds: Seq[RDD[T]]) extends RDD[T](sc) with Serializable { @@ -33,7 +33,7 @@ class UnionRDD[T: ClassManifest]( override def splits = splits_ - override val dependencies = { + @transient override val dependencies = { val deps = new ArrayBuffer[Dependency[_]] var pos = 0 for ((rdd, index) <- rdds.zipWithIndex) { @@ -47,4 +47,4 @@ class UnionRDD[T: ClassManifest]( override def preferredLocations(s: Split): Seq[String] = s.asInstanceOf[UnionSplit[T]].preferredLocations() -} \ No newline at end of file +}