From 49fd053dc78cbeede3e9ca60397b8c5fe1e35cc2 Mon Sep 17 00:00:00 2001 From: Justin Loew <jloloew@gmail.com> Date: Sun, 15 Apr 2018 19:10:59 -0500 Subject: [PATCH] Test adding our intermediate block identifier data type --- .../org/apache/spark/storage/memory/MemoryStore.scala | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala b/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala index 4cc5bcb7f9..0047528856 100644 --- a/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala +++ b/core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala @@ -23,6 +23,7 @@ import java.util.LinkedHashMap import scala.collection.mutable import scala.collection.mutable.ArrayBuffer +import scala.collection.mutable.SortedMap import scala.reflect.ClassTag import com.google.common.io.ByteStreams @@ -88,6 +89,12 @@ private[spark] class MemoryStore( // Note: all changes to memory allocations, notably putting blocks, evicting blocks, and // acquiring or releasing unroll memory, must be synchronized on `memoryManager`! + private class OurBlockIdAndSizeType( + blockId: BlockId, + size: Long) extends Ordering[OurBlockIdAndSizeType] { + def compare(other: OurBlockIdAndSizeType): Int = this.size compare other.size + } + private val entries = new LinkedHashMap[BlockId, MemoryEntry[_]](32, 0.75f, true) // A mapping from taskAttemptId to amount of memory used for unrolling a block (in bytes) -- GitLab