diff --git a/core/src/main/scala/spark/BoundedMemoryCache.scala b/core/src/main/scala/spark/BoundedMemoryCache.scala
index 5ea5c303bd171cc6346ce9df7b4b4e66ccd03c83..6fe0b94297e6aa74eb294187e542a8881fad5a39 100644
--- a/core/src/main/scala/spark/BoundedMemoryCache.scala
+++ b/core/src/main/scala/spark/BoundedMemoryCache.scala
@@ -91,10 +91,14 @@ class BoundedMemoryCache(maxBytes: Long) extends Cache with Logging {
   protected def reportEntryDropped(datasetId: Any, partition: Int, entry: Entry) {
     logInfo("Dropping key (%s, %d) of size %d to make space".format(datasetId, partition, entry.size))
     // TODO: remove BoundedMemoryCache
-    datasetId match {
-      case rddDatasetId: (Int, Int) =>
-        SparkEnv.get.cacheTracker.dropEntry(rddDatasetId._2, partition)
-      case _ =>
+    
+    val (keySpaceId, innerDatasetId) = datasetId.asInstanceOf[(Any, Any)] 
+    innerDatasetId match {
+      case rddId: Int =>
+        SparkEnv.get.cacheTracker.dropEntry(rddId, partition)
+      case broadcastUUID: java.util.UUID =>
+        // TODO: Maybe something should be done if the broadcasted variable falls out of cache  
+      case _ => 
     }    
   }
 }