Skip to content
Snippets Groups Projects
Commit 38bc6018 authored by Michael Gummelt's avatar Michael Gummelt Committed by Andrew Or
Browse files

[SPARK-5095] Fix style in mesos coarse grained scheduler code

andrewor14 This addressed your style comments from #10993

Author: Michael Gummelt <mgummelt@mesosphere.io>

Closes #11187 from mgummelt/fix_mesos_style.
parent 42d65681
No related branches found
No related tags found
No related merge requests found
......@@ -124,6 +124,7 @@ private[spark] class CoarseMesosSchedulerBackend(
}
}
// This method is factored out for testability
protected def getShuffleClient(): MesosExternalShuffleClient = {
new MesosExternalShuffleClient(
SparkTransportConf.fromSparkConf(conf, "shuffle"),
......@@ -518,10 +519,11 @@ private[spark] class CoarseMesosSchedulerBackend(
* Called when a slave is lost or a Mesos task finished. Updates local view on
* what tasks are running. It also notifies the driver that an executor was removed.
*/
private def executorTerminated(d: SchedulerDriver,
slaveId: String,
taskId: String,
reason: String): Unit = {
private def executorTerminated(
d: SchedulerDriver,
slaveId: String,
taskId: String,
reason: String): Unit = {
stateLock.synchronized {
removeExecutor(taskId, SlaveLost(reason))
slaves(slaveId).taskIDs.remove(taskId)
......
......@@ -41,12 +41,12 @@ class CoarseMesosSchedulerBackendSuite extends SparkFunSuite
with MockitoSugar
with BeforeAndAfter {
var sparkConf: SparkConf = _
var driver: SchedulerDriver = _
var taskScheduler: TaskSchedulerImpl = _
var backend: CoarseMesosSchedulerBackend = _
var externalShuffleClient: MesosExternalShuffleClient = _
var driverEndpoint: RpcEndpointRef = _
private var sparkConf: SparkConf = _
private var driver: SchedulerDriver = _
private var taskScheduler: TaskSchedulerImpl = _
private var backend: CoarseMesosSchedulerBackend = _
private var externalShuffleClient: MesosExternalShuffleClient = _
private var driverEndpoint: RpcEndpointRef = _
test("mesos supports killing and limiting executors") {
setBackend()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment