Skip to content
Snippets Groups Projects
Commit c0a0df32 authored by Matei Zaharia's avatar Matei Zaharia
Browse files

Made the default cache BoundedMemoryCache, and reduced its default size

parent a766780f
No related branches found
No related tags found
No related merge requests found
...@@ -46,7 +46,7 @@ class BoundedMemoryCache extends Cache with Logging { ...@@ -46,7 +46,7 @@ class BoundedMemoryCache extends Cache with Logging {
private def getMaxBytes(): Long = { private def getMaxBytes(): Long = {
val memoryFractionToUse = System.getProperty( val memoryFractionToUse = System.getProperty(
"spark.boundedMemoryCache.memoryFraction", "0.75").toDouble "spark.boundedMemoryCache.memoryFraction", "0.66").toDouble
(Runtime.getRuntime.totalMemory * memoryFractionToUse).toLong (Runtime.getRuntime.totalMemory * memoryFractionToUse).toLong
} }
......
...@@ -21,7 +21,7 @@ object SparkEnv { ...@@ -21,7 +21,7 @@ object SparkEnv {
} }
def createFromSystemProperties(isMaster: Boolean): SparkEnv = { def createFromSystemProperties(isMaster: Boolean): SparkEnv = {
val cacheClass = System.getProperty("spark.cache.class", "spark.SoftReferenceCache") val cacheClass = System.getProperty("spark.cache.class", "spark.BoundedMemoryCache")
val cache = Class.forName(cacheClass).newInstance().asInstanceOf[Cache] val cache = Class.forName(cacheClass).newInstance().asInstanceOf[Cache]
val serializerClass = System.getProperty("spark.serializer", "spark.JavaSerializer") val serializerClass = System.getProperty("spark.serializer", "spark.JavaSerializer")
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment