From f95e5c0da3f53596da072f2265b9439227c07f05 Mon Sep 17 00:00:00 2001
From: Justin Loew <jloloew@gmail.com>
Date: Sun, 15 Apr 2018 21:16:36 -0500
Subject: [PATCH] Moving changes, please ignore

---
 .../spark/storage/memory/MemoryStore.scala     | 18 +++++-------------
 1 file changed, 5 insertions(+), 13 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala b/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala
index be37cad4a0..d80fec75bc 100644
--- a/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala
+++ b/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala
@@ -459,26 +459,18 @@ private[spark] class MemoryStore(
       // (because of getValue or getBytes) while traversing the iterator, as that
       // can lead to exceptions.
       entries.synchronized {
-        // TODO: add our logic here
-
-
-
-
-
-
-
-        val iterator = entries.entrySet().iterator()
+        val iterator = blockIdAndSizeSet.iterator()
         while (freedMemory < space && iterator.hasNext) {
-          val pair = iterator.next()
-          val blockId = pair.getKey
-          val entry = pair.getValue
+          val idAndSize = iterator.next()
+          val blockId = idAndSize.blockId
+          val entry = entries.get(blockId)
           if (blockIsEvictable(blockId, entry)) {
             // We don't want to evict blocks which are currently being read, so we need to obtain
             // an exclusive write lock on blocks which are candidates for eviction. We perform a
             // non-blocking "tryLock" here in order to ignore blocks which are locked for reading:
             if (blockInfoManager.lockForWriting(blockId, blocking = false).isDefined) {
               selectedBlocks += blockId
-              freedMemory += pair.getValue.size
+              freedMemory += idAndSize.size
             }
           }
         }
-- 
GitLab