diff --git a/core/src/main/scala/spark/BoundedMemoryCache.scala b/core/src/main/scala/spark/BoundedMemoryCache.scala index 10143d3dd22bb24ab2a05044facd84f5636cea70..6f2f92f6051f7701f621cee3419e1d853c44677c 100644 --- a/core/src/main/scala/spark/BoundedMemoryCache.scala +++ b/core/src/main/scala/spark/BoundedMemoryCache.scala @@ -46,7 +46,7 @@ class BoundedMemoryCache extends Cache with Logging { private def getMaxBytes(): Long = { val memoryFractionToUse = System.getProperty( - "spark.boundedMemoryCache.memoryFraction", "0.75").toDouble + "spark.boundedMemoryCache.memoryFraction", "0.66").toDouble (Runtime.getRuntime.totalMemory * memoryFractionToUse).toLong } diff --git a/core/src/main/scala/spark/SparkEnv.scala b/core/src/main/scala/spark/SparkEnv.scala index 81caf7cff05928ea8c653d57f2bfcfdac5132fe6..d37d1aa90912b2a4ae78da0c0dc5e6b9fa6e1fa1 100644 --- a/core/src/main/scala/spark/SparkEnv.scala +++ b/core/src/main/scala/spark/SparkEnv.scala @@ -21,7 +21,7 @@ object SparkEnv { } def createFromSystemProperties(isMaster: Boolean): SparkEnv = { - val cacheClass = System.getProperty("spark.cache.class", "spark.SoftReferenceCache") + val cacheClass = System.getProperty("spark.cache.class", "spark.BoundedMemoryCache") val cache = Class.forName(cacheClass).newInstance().asInstanceOf[Cache] val serializerClass = System.getProperty("spark.serializer", "spark.JavaSerializer")