diff --git a/core/src/main/scala/spark/ShuffleMapTask.scala b/core/src/main/scala/spark/ShuffleMapTask.scala index b64401c2c77e2bf3c38ebd9efc066cadde1204fc..eb6a5e2df39f8b2a313d6e8ba1e397df8c83ed78 100644 --- a/core/src/main/scala/spark/ShuffleMapTask.scala +++ b/core/src/main/scala/spark/ShuffleMapTask.scala @@ -1,5 +1,6 @@ package spark +import java.io.BufferedOutputStream import java.io.FileOutputStream import java.io.ObjectOutputStream import scala.collection.mutable.HashMap @@ -26,7 +27,7 @@ extends DAGTask[String](stageId) with Logging { val ser = SparkEnv.get.serializer.newInstance() for (i <- 0 until numOutputSplits) { val file = LocalFileShuffle.getOutputFile(dep.shuffleId, partition, i) - val out = ser.outputStream(new FileOutputStream(file)) + val out = ser.outputStream(new BufferedOutputStream(new FileOutputStream(file))) buckets(i).foreach(pair => out.writeObject(pair)) // TODO: have some kind of EOF marker out.close()