diff --git a/core/src/main/scala/org/apache/spark/memory/StaticMemoryManager.scala b/core/src/main/scala/org/apache/spark/memory/StaticMemoryManager.scala
index cbd0fa9ec20986f61badd8063ba233d8a929a714..08155aa298ae718b781680bdb0e5ca337969713a 100644
--- a/core/src/main/scala/org/apache/spark/memory/StaticMemoryManager.scala
+++ b/core/src/main/scala/org/apache/spark/memory/StaticMemoryManager.scala
@@ -104,6 +104,8 @@ private[spark] class StaticMemoryManager(
 
 private[spark] object StaticMemoryManager {
 
+  private val MIN_MEMORY_BYTES = 32 * 1024 * 1024
+
   /**
    * Return the total amount of memory available for the storage region, in bytes.
    */
@@ -119,6 +121,20 @@ private[spark] object StaticMemoryManager {
    */
   private def getMaxExecutionMemory(conf: SparkConf): Long = {
     val systemMaxMemory = conf.getLong("spark.testing.memory", Runtime.getRuntime.maxMemory)
+
+    if (systemMaxMemory < MIN_MEMORY_BYTES) {
+      throw new IllegalArgumentException(s"System memory $systemMaxMemory must " +
+        s"be at least $MIN_MEMORY_BYTES. Please increase heap size using the --driver-memory " +
+        s"option or spark.driver.memory in Spark configuration.")
+    }
+    if (conf.contains("spark.executor.memory")) {
+      val executorMemory = conf.getSizeAsBytes("spark.executor.memory")
+      if (executorMemory < MIN_MEMORY_BYTES) {
+        throw new IllegalArgumentException(s"Executor memory $executorMemory must be at least " +
+          s"$MIN_MEMORY_BYTES. Please increase executor memory using the " +
+          s"--executor-memory option or spark.executor.memory in Spark configuration.")
+      }
+    }
     val memoryFraction = conf.getDouble("spark.shuffle.memoryFraction", 0.2)
     val safetyFraction = conf.getDouble("spark.shuffle.safetyFraction", 0.8)
     (systemMaxMemory * memoryFraction * safetyFraction).toLong