diff --git a/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala b/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala
index 43c79ce8f2b44b84056c8bfbae1d7fefa2379181..6726d7d6afe99e02ad42db9e66df68b88d6262c5 100644
--- a/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala
@@ -21,9 +21,9 @@ import java.io.OutputStream
 import java.nio.ByteBuffer
 import java.util.LinkedHashMap
 
-import scala.collection.SortedMap
 import scala.collection.mutable
 import scala.collection.mutable.ArrayBuffer
+import scala.collection.mutable.SortedSet
 import scala.reflect.ClassTag
 
 import com.google.common.io.ByteStreams
@@ -95,6 +95,7 @@ private[spark] class MemoryStore(
     def compare(other: OurBlockIdAndSizeType): Int = this.size.compare(other.size)
   }
 
+  private val blockIdAndSizeSet = new SortedSet[OurBlockIdAndSizeType]()
   private val entries = new LinkedHashMap[BlockId, MemoryEntry[_]](32, 0.75f, true)
 
   // A mapping from taskAttemptId to amount of memory used for unrolling a block (in bytes)
@@ -134,7 +135,7 @@ private[spark] class MemoryStore(
 
   def getSize(blockId: BlockId): Long = {
     entries.synchronized {
-      entries.get(blockId).size
+      entries.get(blockIdblockId).size
     }
   }