From 0bdbefe9dd1e7c95c58ea6b52d3b264794abbc0e Mon Sep 17 00:00:00 2001
From: Marcelo Vanzin <vanzin@cloudera.com>
Date: Fri, 1 Sep 2017 10:29:36 -0700
Subject: [PATCH] [SPARK-21728][CORE] Follow up: fix user config, auth in
 SparkSubmit logging.

- SecurityManager complains when auth is enabled but no secret is defined;
  SparkSubmit doesn't use the auth functionality of the SecurityManager,
  so use a dummy secret to work around the exception.

- Only reset the log4j configuration when Spark was the one initializing
  it, otherwise user-defined log configuration may be lost.

Tested with the log config file posted to the bug, on a secured YARN cluster.

Author: Marcelo Vanzin <vanzin@cloudera.com>

Closes #19089 from vanzin/SPARK-21728.
---
 .../main/scala/org/apache/spark/deploy/SparkSubmit.scala | 3 +++
 .../main/scala/org/apache/spark/internal/Logging.scala   | 9 ++++++++-
 2 files changed, 11 insertions(+), 1 deletion(-)

diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index 38604fe939..ea9c9bdaed 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -352,6 +352,9 @@ object SparkSubmit extends CommandLineUtils with Logging {
     var localJars: String = null
     var localPyFiles: String = null
     if (deployMode == CLIENT) {
+      // This security manager will not need an auth secret, but set a dummy value in case
+      // spark.authenticate is enabled, otherwise an exception is thrown.
+      sparkConf.set(SecurityManager.SPARK_AUTH_SECRET_CONF, "unused")
       val secMgr = new SecurityManager(sparkConf)
       localPrimaryResource = Option(args.primaryResource).map {
         downloadFile(_, targetDir, sparkConf, hadoopConf, secMgr)
diff --git a/core/src/main/scala/org/apache/spark/internal/Logging.scala b/core/src/main/scala/org/apache/spark/internal/Logging.scala
index cea9964ea8..c0d709ad25 100644
--- a/core/src/main/scala/org/apache/spark/internal/Logging.scala
+++ b/core/src/main/scala/org/apache/spark/internal/Logging.scala
@@ -120,6 +120,7 @@ trait Logging {
       val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements
       // scalastyle:off println
       if (!log4j12Initialized) {
+        Logging.defaultSparkLog4jConfig = true
         val defaultLogProps = "org/apache/spark/log4j-defaults.properties"
         Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match {
           case Some(url) =>
@@ -164,6 +165,7 @@ trait Logging {
 private[spark] object Logging {
   @volatile private var initialized = false
   @volatile private var defaultRootLevel: Level = null
+  @volatile private var defaultSparkLog4jConfig = false
 
   val initLock = new Object()
   try {
@@ -186,7 +188,12 @@ private[spark] object Logging {
    */
   def uninitialize(): Unit = initLock.synchronized {
     if (isLog4j12()) {
-      LogManager.resetConfiguration()
+      if (defaultSparkLog4jConfig) {
+        defaultSparkLog4jConfig = false
+        LogManager.resetConfiguration()
+      } else {
+        LogManager.getRootLogger().setLevel(defaultRootLevel)
+      }
     }
     this.initialized = false
   }
-- 
GitLab