Skip to content
Snippets Groups Projects
Commit 8da1012f authored by Patrick Wendell's avatar Patrick Wendell
Browse files

TODO clean-up

parent faefea3f
No related branches found
No related tags found
No related merge requests found
...@@ -622,7 +622,6 @@ class SparkContext( ...@@ -622,7 +622,6 @@ class SparkContext(
} else { } else {
val uri = new URI(path) val uri = new URI(path)
key = uri.getScheme match { key = uri.getScheme match {
// TODO: Have this load jars that are available on the driver
// A JAR file which exists only on the driver node // A JAR file which exists only on the driver node
case null | "file" => case null | "file" =>
if (SparkHadoopUtil.get.isYarnMode()) { if (SparkHadoopUtil.get.isYarnMode()) {
......
...@@ -70,8 +70,9 @@ object DriverClient extends Logging { ...@@ -70,8 +70,9 @@ object DriverClient extends Logging {
driverArgs.cmd match { driverArgs.cmd match {
case "launch" => case "launch" =>
// TODO: Could modify env here to pass a flag indicating Spark is in deploy-driver mode // TODO: We could add an env variable here and intercept it in `sc.addJar` that would
// then use that to load jars locally (e.g. truncate the filesystem path) // truncate filesystem paths similar to what YARN does. For now, we just require
// people call `addJar` assuming the jar is in the same directory.
val env = Map[String, String]() val env = Map[String, String]()
System.getenv().foreach{case (k, v) => env(k) = v} System.getenv().foreach{case (k, v) => env(k) = v}
......
...@@ -58,7 +58,7 @@ private[spark] class DriverRunner( ...@@ -58,7 +58,7 @@ private[spark] class DriverRunner(
val localJarFilename = downloadUserJar(driverDir) val localJarFilename = downloadUserJar(driverDir)
// Make sure user application jar is on the classpath // Make sure user application jar is on the classpath
// TODO: This could eventually exploit ability for driver to add jars // TODO: If we add ability to submit multiple jars they should also be added here
val env = Map(driverDesc.command.environment.toSeq: _*) val env = Map(driverDesc.command.environment.toSeq: _*)
env("SPARK_CLASSPATH") = env.getOrElse("SPARK_CLASSPATH", "") + s":$localJarFilename" env("SPARK_CLASSPATH") = env.getOrElse("SPARK_CLASSPATH", "") + s":$localJarFilename"
val newCommand = Command(driverDesc.command.mainClass, val newCommand = Command(driverDesc.command.mainClass,
......
...@@ -50,7 +50,7 @@ private[spark] class ExecutorRunner( ...@@ -50,7 +50,7 @@ private[spark] class ExecutorRunner(
var workerThread: Thread = null var workerThread: Thread = null
var process: Process = null var process: Process = null
// NOTE: This is now redundant with the automated shut-down enforced by the Executor. It mike // NOTE: This is now redundant with the automated shut-down enforced by the Executor. It might
// make sense to remove this in the future. // make sense to remove this in the future.
var shutdownHook: Thread = null var shutdownHook: Thread = null
......
...@@ -19,7 +19,7 @@ private[spark] class WorkerWatcher(workerUrl: String) extends Actor with Logging ...@@ -19,7 +19,7 @@ private[spark] class WorkerWatcher(workerUrl: String) extends Actor with Logging
worker ! SendHeartbeat // need to send a message here to initiate connection worker ! SendHeartbeat // need to send a message here to initiate connection
} }
// Lets us filter events only from the worker actor // Lets us filter events only from the worker's actor system
private val expectedHostPort = AddressFromURIString(workerUrl).hostPort private val expectedHostPort = AddressFromURIString(workerUrl).hostPort
private def isWorker(address: Address) = address.hostPort == expectedHostPort private def isWorker(address: Address) = address.hostPort == expectedHostPort
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment