diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 41f810dde64ec73bf69c92213fbffac282e0f0ff..ad3337d94c921621fc9e858d3d2066bfaf63717f 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -622,7 +622,6 @@ class SparkContext(
       } else {
         val uri = new URI(path)
         key = uri.getScheme match {
-          // TODO: Have this load jars that are available on the driver
           // A JAR file which exists only on the driver node
           case null | "file" =>
             if (SparkHadoopUtil.get.isYarnMode()) {
diff --git a/core/src/main/scala/org/apache/spark/deploy/client/DriverClient.scala b/core/src/main/scala/org/apache/spark/deploy/client/DriverClient.scala
index a5f57637fb921d58d6ab3ec9e07127f9a37ef1ad..8a4cdf07bbbaf169fb783ad5ad1e776bf8e9f1d9 100644
--- a/core/src/main/scala/org/apache/spark/deploy/client/DriverClient.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/client/DriverClient.scala
@@ -70,8 +70,9 @@ object DriverClient extends Logging {
 
     driverArgs.cmd match {
       case "launch" =>
-        // TODO: Could modify env here to pass a flag indicating Spark is in deploy-driver mode
-        //       then use that to load jars locally (e.g. truncate the filesystem path)
+        // TODO: We could add an env variable here and intercept it in `sc.addJar` that would
+        //       truncate filesystem paths similar to what YARN does. For now, we just require
+        //       people call `addJar` assuming the jar is in the same directory.
         val env = Map[String, String]()
         System.getenv().foreach{case (k, v) => env(k) = v}
 
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala
index 41500bb28d7d05a4f46a3c45c5def7dfe3b33d2c..51baa35018597bbd1a09bf69e6681b4477b03a69 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala
@@ -58,7 +58,7 @@ private[spark] class DriverRunner(
           val localJarFilename = downloadUserJar(driverDir)
 
           // Make sure user application jar is on the classpath
-          // TODO: This could eventually exploit ability for driver to add jars
+          // TODO: If we add ability to submit multiple jars they should also be added here
           val env = Map(driverDesc.command.environment.toSeq: _*)
           env("SPARK_CLASSPATH") = env.getOrElse("SPARK_CLASSPATH", "") + s":$localJarFilename"
           val newCommand = Command(driverDesc.command.mainClass,
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
index 2e61d394ea02b5e8dde34fafe13bc3b92222e2a6..fdc9a34886eb8037f98e8e155d19fd663641fa0a 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
@@ -50,7 +50,7 @@ private[spark] class ExecutorRunner(
   var workerThread: Thread = null
   var process: Process = null
 
-  // NOTE: This is now redundant with the automated shut-down enforced by the Executor. It mike
+  // NOTE: This is now redundant with the automated shut-down enforced by the Executor. It might
   // make sense to remove this in the future.
   var shutdownHook: Thread = null
 
diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerWatcher.scala b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerWatcher.scala
index e4352f1ce2ebfd855fc4c3edc271102b55ebb95b..f4184bc5db17b288e9c4f674adb6dd613971a2c2 100644
--- a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerWatcher.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerWatcher.scala
@@ -19,7 +19,7 @@ private[spark] class WorkerWatcher(workerUrl: String) extends Actor with Logging
     worker ! SendHeartbeat // need to send a message here to initiate connection
   }
 
-  // Lets us filter events only from the worker actor
+  // Lets us filter events only from the worker's actor system
   private val expectedHostPort = AddressFromURIString(workerUrl).hostPort
   private def isWorker(address: Address) = address.hostPort == expectedHostPort