From 58072a7340e20251ed810457bc67a79f106bae42 Mon Sep 17 00:00:00 2001 From: Charles Reiss <charles@eecs.berkeley.edu> Date: Tue, 1 Jan 2013 07:59:16 -0800 Subject: [PATCH] Remove some dead comments --- core/src/main/scala/spark/CacheTracker.scala | 6 ------ 1 file changed, 6 deletions(-) diff --git a/core/src/main/scala/spark/CacheTracker.scala b/core/src/main/scala/spark/CacheTracker.scala index c8c4063cad..04c26b2e40 100644 --- a/core/src/main/scala/spark/CacheTracker.scala +++ b/core/src/main/scala/spark/CacheTracker.scala @@ -204,17 +204,11 @@ private[spark] class CacheTracker(actorSystem: ActorSystem, isMaster: Boolean, b } try { // If we got here, we have to load the split - // Tell the master that we're doing so - //val host = System.getProperty("spark.hostname", Utils.localHostName) - //val future = trackerActor !! AddedToCache(rdd.id, split.index, host) - // TODO: fetch any remote copy of the split that may be available - // TODO: also register a listener for when it unloads val elements = new ArrayBuffer[Any] logInfo("Computing partition " + split) elements ++= rdd.compute(split, context) // Try to put this block in the blockManager blockManager.put(key, elements, storageLevel, true) - //future.apply() // Wait for the reply from the cache tracker return elements.iterator.asInstanceOf[Iterator[T]] } finally { loading.synchronized { -- GitLab