From 0092abb47a0f9fdc716d5dfc1c591ddb45de8c98 Mon Sep 17 00:00:00 2001 From: Sandy Ryza <sandy@cloudera.com> Date: Tue, 5 May 2015 18:32:16 -0700 Subject: [PATCH] Some minor cleanup after SPARK-4550. JoshRosen this PR addresses the comments you left on #4450 after it got merged. Author: Sandy Ryza <sandy@cloudera.com> Closes #5916 from sryza/sandy-spark-4550-cleanup and squashes the following commits: dee3d85 [Sandy Ryza] Some minor cleanup after SPARK-4550. --- .../org/apache/spark/storage/BlockObjectWriter.scala | 9 ++------- .../collection/PartitionedSerializedPairBuffer.scala | 4 ++-- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala b/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala index 499dd97c06..8bc4e205bc 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockObjectWriter.scala @@ -59,7 +59,7 @@ private[spark] abstract class BlockObjectWriter(val blockId: BlockId) extends Ou def write(key: Any, value: Any) /** - * Notify the writer that a record worth of bytes has been written with writeBytes. + * Notify the writer that a record worth of bytes has been written with OutputStream#write. */ def recordWritten() @@ -215,12 +215,7 @@ private[spark] class DiskBlockObjectWriter( objOut.writeKey(key) objOut.writeValue(value) - numRecordsWritten += 1 - writeMetrics.incShuffleRecordsWritten(1) - - if (numRecordsWritten % 32 == 0) { - updateBytesWritten() - } + recordWritten() } override def write(b: Int): Unit = throw new UnsupportedOperationException() diff --git a/core/src/main/scala/org/apache/spark/util/collection/PartitionedSerializedPairBuffer.scala b/core/src/main/scala/org/apache/spark/util/collection/PartitionedSerializedPairBuffer.scala index b5ca0c62a0..ac9ea63936 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/PartitionedSerializedPairBuffer.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/PartitionedSerializedPairBuffer.scala @@ -71,10 +71,10 @@ private[spark] class PartitionedSerializedPairBuffer[K, V]( if (keyStart < 0) { throw new Exception(s"Can't grow buffer beyond ${1 << 31} bytes") } - kvSerializationStream.writeObject[Any](key) + kvSerializationStream.writeKey[Any](key) kvSerializationStream.flush() val valueStart = kvBuffer.size - kvSerializationStream.writeObject[Any](value) + kvSerializationStream.writeValue[Any](value) kvSerializationStream.flush() val valueEnd = kvBuffer.size -- GitLab