diff --git a/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala b/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala
index 4cc5bcb7f9bafa600a8fa42b87c723dffbddfa09..00475288564f1a8e023d678adc2d2248d6126494 100644
--- a/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala
@@ -23,6 +23,7 @@ import java.util.LinkedHashMap
 
 import scala.collection.mutable
 import scala.collection.mutable.ArrayBuffer
+import scala.collection.mutable.SortedMap
 import scala.reflect.ClassTag
 
 import com.google.common.io.ByteStreams
@@ -88,6 +89,12 @@ private[spark] class MemoryStore(
   // Note: all changes to memory allocations, notably putting blocks, evicting blocks, and
   // acquiring or releasing unroll memory, must be synchronized on `memoryManager`!
 
+  private class OurBlockIdAndSizeType(
+      blockId: BlockId,
+      size: Long) extends Ordering[OurBlockIdAndSizeType] {
+    def compare(other: OurBlockIdAndSizeType): Int = this.size compare other.size
+  }
+
   private val entries = new LinkedHashMap[BlockId, MemoryEntry[_]](32, 0.75f, true)
 
   // A mapping from taskAttemptId to amount of memory used for unrolling a block (in bytes)