Skip to content
Snippets Groups Projects
Commit 4d496802 authored by Michael Gummelt's avatar Michael Gummelt Committed by Sean Owen
Browse files

[SPARK-16952] don't lookup spark home directory when executor uri is set

## What changes were proposed in this pull request?

remove requirement to set spark.mesos.executor.home when spark.executor.uri is used

## How was this patch tested?

unit tests

Author: Michael Gummelt <mgummelt@mesosphere.io>

Closes #14552 from mgummelt/fix-spark-home.
parent 7186e8c3
No related branches found
No related tags found
No related merge requests found
......@@ -163,11 +163,6 @@ private[spark] class MesosCoarseGrainedSchedulerBackend(
}
def createCommand(offer: Offer, numCores: Int, taskId: String): CommandInfo = {
val executorSparkHome = conf.getOption("spark.mesos.executor.home")
.orElse(sc.getSparkHome())
.getOrElse {
throw new SparkException("Executor Spark home `spark.mesos.executor.home` is not set!")
}
val environment = Environment.newBuilder()
val extraClassPath = conf.getOption("spark.executor.extraClassPath")
extraClassPath.foreach { cp =>
......@@ -201,6 +196,11 @@ private[spark] class MesosCoarseGrainedSchedulerBackend(
.orElse(Option(System.getenv("SPARK_EXECUTOR_URI")))
if (uri.isEmpty) {
val executorSparkHome = conf.getOption("spark.mesos.executor.home")
.orElse(sc.getSparkHome())
.getOrElse {
throw new SparkException("Executor Spark home `spark.mesos.executor.home` is not set!")
}
val runScript = new File(executorSparkHome, "./bin/spark-class").getPath
command.setValue(
"%s \"%s\" org.apache.spark.executor.CoarseGrainedExecutorBackend"
......
......@@ -370,6 +370,21 @@ class MesosCoarseGrainedSchedulerBackendSuite extends SparkFunSuite
verify(driverEndpoint, never()).askWithRetry(isA(classOf[RemoveExecutor]))(any[ClassTag[_]])
}
test("mesos supports spark.executor.uri") {
val url = "spark.spark.spark.com"
setBackend(Map(
"spark.executor.uri" -> url
), false)
val (mem, cpu) = (backend.executorMemory(sc), 4)
val offer1 = createOffer("o1", "s1", mem, cpu)
backend.resourceOffers(driver, List(offer1).asJava)
val launchedTasks = verifyTaskLaunched(driver, "o1")
assert(launchedTasks.head.getCommand.getUrisList.asScala(0).getValue == url)
}
private def verifyDeclinedOffer(driver: SchedulerDriver,
offerId: OfferID,
filter: Boolean = false): Unit = {
......@@ -435,13 +450,17 @@ class MesosCoarseGrainedSchedulerBackendSuite extends SparkFunSuite
backend
}
private def setBackend(sparkConfVars: Map[String, String] = null) {
private def setBackend(sparkConfVars: Map[String, String] = null,
setHome: Boolean = true) {
sparkConf = (new SparkConf)
.setMaster("local[*]")
.setAppName("test-mesos-dynamic-alloc")
.setSparkHome("/path")
.set("spark.mesos.driver.webui.url", "http://webui")
if (setHome) {
sparkConf.setSparkHome("/path")
}
if (sparkConfVars != null) {
sparkConf.setAll(sparkConfVars)
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment