diff --git a/conf/spark-env.sh.template b/conf/spark-env.sh.template
index b8936314ecce282f835201c56dac6db6e99bbe1f..c978db00d9574553ea37c10dae516ce9c7a30f73 100755
--- a/conf/spark-env.sh.template
+++ b/conf/spark-env.sh.template
@@ -16,4 +16,9 @@
 # - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT
 # - SPARK_WORKER_INSTANCES, to set the number of worker instances/processes
 #   to be spawned on every slave machine
+# - SPARK_JAVA_OPTS, to set the jvm options for executor backend. Note: This is
+#   only for node-specific options, whereas app-specific options should be set
+#   in the application.
+#   Examples of node-speicic options : -Dspark.local.dir, GC related options.
+#   Examples of app-specific options : -Dspark.serializer
 
diff --git a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
index 345dfe879cf616bd66e157695f9ed0cc7b2a1052..8f6d25c33fb196e61dca5fa2c418d53f75ea1659 100644
--- a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
@@ -111,6 +111,7 @@ private[spark] class ExecutorRunner(
     val libraryOpts = getAppEnv("SPARK_LIBRARY_PATH")
       .map(p => List("-Djava.library.path=" + p))
       .getOrElse(Nil)
+    val workerLocalOpts = Option(getenv("SPARK_JAVA_OPTS")).map(Utils.splitCommandString).getOrElse(Nil)
     val userOpts = getAppEnv("SPARK_JAVA_OPTS").map(Utils.splitCommandString).getOrElse(Nil)
     val memoryOpts = Seq("-Xms" + memory + "M", "-Xmx" + memory + "M")
 
@@ -120,7 +121,7 @@ private[spark] class ExecutorRunner(
         Seq(sparkHome + "/bin/compute-classpath" + ext),
         extraEnvironment=appDesc.command.environment)
 
-    Seq("-cp", classPath) ++ libraryOpts ++ userOpts ++ memoryOpts
+    Seq("-cp", classPath) ++ libraryOpts ++ workerLocalOpts ++ userOpts ++ memoryOpts
   }
 
   /** Spawn a thread that will redirect a given stream to a file */