diff --git a/src/scala/spark/DfsShuffle.scala b/src/scala/spark/DfsShuffle.scala index 10f77a824a334ba49b747e053d778cebd98cfece..256bf4ea9c7d74eae5e4fc3e8bc6c073e912bb40 100644 --- a/src/scala/spark/DfsShuffle.scala +++ b/src/scala/spark/DfsShuffle.scala @@ -61,7 +61,8 @@ extends Logging }) // Return an RDD that does each of the merges for a given partition - return sc.parallelize(0 until numOutputSplits).flatMap((myIndex: Int) => { + val indexes = sc.parallelize(0 until numOutputSplits, numOutputSplits) + return indexes.flatMap((myIndex: Int) => { val combiners = new HashMap[K, C] val fs = DfsShuffle.getFileSystem() for (i <- Utils.shuffle(0 until numInputSplits)) {