Skip to content
Snippets Groups Projects
Commit c0cbbdea authored by CrazyJvm's avatar CrazyJvm Committed by Aaron Davidson
Browse files

SPARK-3093 : masterLock in Worker is no longer need

there's no need to use masterLock in Worker now since all communications are within Akka actor

Author: CrazyJvm <crazyjvm@gmail.com>

Closes #2008 from CrazyJvm/no-need-master-lock and squashes the following commits:

dd39e20 [CrazyJvm] fix format
58e7fa5 [CrazyJvm] there's no need to use masterLock now since all communications are within Akka actor
parent 9306b8c6
No related branches found
No related tags found
No related merge requests found
...@@ -72,7 +72,6 @@ private[spark] class Worker( ...@@ -72,7 +72,6 @@ private[spark] class Worker(
val APP_DATA_RETENTION_SECS = conf.getLong("spark.worker.cleanup.appDataTtl", 7 * 24 * 3600) val APP_DATA_RETENTION_SECS = conf.getLong("spark.worker.cleanup.appDataTtl", 7 * 24 * 3600)
val testing: Boolean = sys.props.contains("spark.testing") val testing: Boolean = sys.props.contains("spark.testing")
val masterLock: Object = new Object()
var master: ActorSelection = null var master: ActorSelection = null
var masterAddress: Address = null var masterAddress: Address = null
var activeMasterUrl: String = "" var activeMasterUrl: String = ""
...@@ -145,18 +144,16 @@ private[spark] class Worker( ...@@ -145,18 +144,16 @@ private[spark] class Worker(
} }
def changeMaster(url: String, uiUrl: String) { def changeMaster(url: String, uiUrl: String) {
masterLock.synchronized { activeMasterUrl = url
activeMasterUrl = url activeMasterWebUiUrl = uiUrl
activeMasterWebUiUrl = uiUrl master = context.actorSelection(Master.toAkkaUrl(activeMasterUrl))
master = context.actorSelection(Master.toAkkaUrl(activeMasterUrl)) masterAddress = activeMasterUrl match {
masterAddress = activeMasterUrl match { case Master.sparkUrlRegex(_host, _port) =>
case Master.sparkUrlRegex(_host, _port) => Address("akka.tcp", Master.systemName, _host, _port.toInt)
Address("akka.tcp", Master.systemName, _host, _port.toInt) case x =>
case x => throw new SparkException("Invalid spark URL: " + x)
throw new SparkException("Invalid spark URL: " + x)
}
connected = true
} }
connected = true
} }
def tryRegisterAllMasters() { def tryRegisterAllMasters() {
...@@ -199,9 +196,7 @@ private[spark] class Worker( ...@@ -199,9 +196,7 @@ private[spark] class Worker(
} }
case SendHeartbeat => case SendHeartbeat =>
masterLock.synchronized { if (connected) { master ! Heartbeat(workerId) }
if (connected) { master ! Heartbeat(workerId) }
}
case WorkDirCleanup => case WorkDirCleanup =>
// Spin up a separate thread (in a future) to do the dir cleanup; don't tie up worker actor // Spin up a separate thread (in a future) to do the dir cleanup; don't tie up worker actor
...@@ -244,9 +239,7 @@ private[spark] class Worker( ...@@ -244,9 +239,7 @@ private[spark] class Worker(
manager.start() manager.start()
coresUsed += cores_ coresUsed += cores_
memoryUsed += memory_ memoryUsed += memory_
masterLock.synchronized { master ! ExecutorStateChanged(appId, execId, manager.state, None, None)
master ! ExecutorStateChanged(appId, execId, manager.state, None, None)
}
} catch { } catch {
case e: Exception => { case e: Exception => {
logError("Failed to launch executor %s/%d for %s".format(appId, execId, appDesc.name)) logError("Failed to launch executor %s/%d for %s".format(appId, execId, appDesc.name))
...@@ -254,17 +247,13 @@ private[spark] class Worker( ...@@ -254,17 +247,13 @@ private[spark] class Worker(
executors(appId + "/" + execId).kill() executors(appId + "/" + execId).kill()
executors -= appId + "/" + execId executors -= appId + "/" + execId
} }
masterLock.synchronized { master ! ExecutorStateChanged(appId, execId, ExecutorState.FAILED, None, None)
master ! ExecutorStateChanged(appId, execId, ExecutorState.FAILED, None, None)
}
} }
} }
} }
case ExecutorStateChanged(appId, execId, state, message, exitStatus) => case ExecutorStateChanged(appId, execId, state, message, exitStatus) =>
masterLock.synchronized { master ! ExecutorStateChanged(appId, execId, state, message, exitStatus)
master ! ExecutorStateChanged(appId, execId, state, message, exitStatus)
}
val fullId = appId + "/" + execId val fullId = appId + "/" + execId
if (ExecutorState.isFinished(state)) { if (ExecutorState.isFinished(state)) {
executors.get(fullId) match { executors.get(fullId) match {
...@@ -330,9 +319,7 @@ private[spark] class Worker( ...@@ -330,9 +319,7 @@ private[spark] class Worker(
case _ => case _ =>
logDebug(s"Driver $driverId changed state to $state") logDebug(s"Driver $driverId changed state to $state")
} }
masterLock.synchronized { master ! DriverStateChanged(driverId, state, exception)
master ! DriverStateChanged(driverId, state, exception)
}
val driver = drivers.remove(driverId).get val driver = drivers.remove(driverId).get
finishedDrivers(driverId) = driver finishedDrivers(driverId) = driver
memoryUsed -= driver.driverDesc.mem memoryUsed -= driver.driverDesc.mem
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment