diff --git a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala index db5b774806f94f00939974c6a46ffb26103092d6..e08729510926b440e65d3c8a9194cf0289f09d1f 100644 --- a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala +++ b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala @@ -64,7 +64,7 @@ private[spark] class CoarseGrainedExecutorBackend( // Always receive `true`. Just ignore it case Failure(e) => logError(s"Cannot register with driver: $driverUrl", e) - exitExecutor() + exitExecutor(1) }(ThreadUtils.sameThread) } @@ -81,12 +81,12 @@ private[spark] class CoarseGrainedExecutorBackend( case RegisterExecutorFailed(message) => logError("Slave registration failed: " + message) - exitExecutor() + exitExecutor(1) case LaunchTask(data) => if (executor == null) { logError("Received LaunchTask command but executor was null") - exitExecutor() + exitExecutor(1) } else { val taskDesc = ser.deserialize[TaskDescription](data.value) logInfo("Got assigned task " + taskDesc.taskId) @@ -97,7 +97,7 @@ private[spark] class CoarseGrainedExecutorBackend( case KillTask(taskId, _, interruptThread) => if (executor == null) { logError("Received KillTask command but executor was null") - exitExecutor() + exitExecutor(1) } else { executor.killTask(taskId, interruptThread) } @@ -127,7 +127,7 @@ private[spark] class CoarseGrainedExecutorBackend( logInfo(s"Driver from $remoteAddress disconnected during shutdown") } else if (driver.exists(_.address == remoteAddress)) { logError(s"Driver $remoteAddress disassociated! Shutting down.") - exitExecutor() + exitExecutor(1) } else { logWarning(s"An unknown ($remoteAddress) driver disconnected.") } @@ -146,7 +146,7 @@ private[spark] class CoarseGrainedExecutorBackend( * executor exits differently. For e.g. when an executor goes down, * back-end may not want to take the parent process down. */ - protected def exitExecutor(): Unit = System.exit(1) + protected def exitExecutor(code: Int): Unit = System.exit(code) } private[spark] object CoarseGrainedExecutorBackend extends Logging {