From c0a0df3285d88aa8e7d8a8d12a53e157973ce01e Mon Sep 17 00:00:00 2001
From: Matei Zaharia <matei@eecs.berkeley.edu>
Date: Thu, 9 Feb 2012 22:32:02 -0800
Subject: [PATCH] Made the default cache BoundedMemoryCache, and reduced its
 default size

---
 core/src/main/scala/spark/BoundedMemoryCache.scala | 2 +-
 core/src/main/scala/spark/SparkEnv.scala           | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/core/src/main/scala/spark/BoundedMemoryCache.scala b/core/src/main/scala/spark/BoundedMemoryCache.scala
index 10143d3dd2..6f2f92f605 100644
--- a/core/src/main/scala/spark/BoundedMemoryCache.scala
+++ b/core/src/main/scala/spark/BoundedMemoryCache.scala
@@ -46,7 +46,7 @@ class BoundedMemoryCache extends Cache with Logging {
 
   private def getMaxBytes(): Long = {
     val memoryFractionToUse = System.getProperty(
-      "spark.boundedMemoryCache.memoryFraction", "0.75").toDouble
+      "spark.boundedMemoryCache.memoryFraction", "0.66").toDouble
     (Runtime.getRuntime.totalMemory * memoryFractionToUse).toLong
   }
 
diff --git a/core/src/main/scala/spark/SparkEnv.scala b/core/src/main/scala/spark/SparkEnv.scala
index 81caf7cff0..d37d1aa909 100644
--- a/core/src/main/scala/spark/SparkEnv.scala
+++ b/core/src/main/scala/spark/SparkEnv.scala
@@ -21,7 +21,7 @@ object SparkEnv {
   }
 
   def createFromSystemProperties(isMaster: Boolean): SparkEnv = {
-    val cacheClass = System.getProperty("spark.cache.class", "spark.SoftReferenceCache")
+    val cacheClass = System.getProperty("spark.cache.class", "spark.BoundedMemoryCache")
     val cache = Class.forName(cacheClass).newInstance().asInstanceOf[Cache]
     
     val serializerClass = System.getProperty("spark.serializer", "spark.JavaSerializer")
-- 
GitLab