From 2c2bfbe294c0082520c80a01562a2dbeeba63b7a Mon Sep 17 00:00:00 2001
From: Evan Chan <ev@ooyala.com>
Date: Tue, 23 Jul 2013 01:36:44 -0700
Subject: [PATCH] Add toMap method to TimeStampedHashMap and use it

---
 core/src/main/scala/spark/SparkContext.scala            | 2 +-
 core/src/main/scala/spark/util/TimeStampedHashMap.scala | 3 +++
 2 files changed, 4 insertions(+), 1 deletion(-)

diff --git a/core/src/main/scala/spark/SparkContext.scala b/core/src/main/scala/spark/SparkContext.scala
index 1b46665d2c..0fb7dfa810 100644
--- a/core/src/main/scala/spark/SparkContext.scala
+++ b/core/src/main/scala/spark/SparkContext.scala
@@ -550,7 +550,7 @@ class SparkContext(
    * Returns an immutable map of RDDs that have marked themselves as cached via cache() call.
    * Note that this does not necessarily mean the caching or computation was successful.
    */
-  def getCachedRDDs: Map[Int, RDD[_]] = persistentRdds.asInstanceOf[Map[Int, RDD[_]]]
+  def getCachedRDDs: Map[Int, RDD[_]] = persistentRdds.toMap
 
   def getStageInfo: Map[Stage,StageInfo] = {
     dagScheduler.stageToInfos
diff --git a/core/src/main/scala/spark/util/TimeStampedHashMap.scala b/core/src/main/scala/spark/util/TimeStampedHashMap.scala
index cc7909194a..07772a0afb 100644
--- a/core/src/main/scala/spark/util/TimeStampedHashMap.scala
+++ b/core/src/main/scala/spark/util/TimeStampedHashMap.scala
@@ -20,6 +20,7 @@ package spark.util
 import java.util.concurrent.ConcurrentHashMap
 import scala.collection.JavaConversions
 import scala.collection.mutable.Map
+import scala.collection.immutable
 import spark.scheduler.MapStatus
 
 /**
@@ -99,6 +100,8 @@ class TimeStampedHashMap[A, B] extends Map[A, B]() with spark.Logging {
     }
   }
 
+  def toMap: immutable.Map[A, B] = iterator.toMap
+
   /**
    * Removes old key-value pairs that have timestamp earlier than `threshTime`
    */
-- 
GitLab