diff --git a/core/src/main/scala/spark/RDD.scala b/core/src/main/scala/spark/RDD.scala index 5f99591fd5458ba82a6a28965dcb6240268d4c0d..dea52eb5c6de8c158134ea1882fa83587f1a0ab3 100644 --- a/core/src/main/scala/spark/RDD.scala +++ b/core/src/main/scala/spark/RDD.scala @@ -235,8 +235,7 @@ abstract class RDD[T: ClassManifest]( /** * Return a new RDD that is reduced into `numSplits` partitions. */ - def coalesce(numSplits: Int = sc.defaultParallelism): RDD[T] = - new CoalescedRDD(this, numSplits) + def coalesce(numSplits: Int): RDD[T] = new CoalescedRDD(this, numSplits) /** * Return a sampled subset of this RDD. diff --git a/core/src/main/scala/spark/api/java/JavaRDDLike.scala b/core/src/main/scala/spark/api/java/JavaRDDLike.scala index 295eaa57c0c2e609a542220a8d0d168477864e23..d3a4b62553c7f9611b594c4ee880407741347900 100644 --- a/core/src/main/scala/spark/api/java/JavaRDDLike.scala +++ b/core/src/main/scala/spark/api/java/JavaRDDLike.scala @@ -130,11 +130,6 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends PairFlatMapWorkaround JavaPairRDD.fromRDD(rdd.cartesian(other.rdd)(other.classManifest))(classManifest, other.classManifest) - /** - * Return a new RDD that is reduced into the default number of partitions. - */ - def coalesce(): RDD[T] = coalesce(rdd.context.defaultParallelism) - /** * Return a new RDD that is reduced into `numSplits` partitions. */