From cdaa562c9a09e2e83e6df4e84d911ce1428a7a7c Mon Sep 17 00:00:00 2001
From: Sean Owen <sowen@cloudera.com>
Date: Sat, 13 Aug 2016 15:40:43 -0700
Subject: [PATCH] [SPARK-16966][SQL][CORE] App Name is a randomUUID even when
 "spark.app.name" exists

## What changes were proposed in this pull request?

Don't override app name specified in `SparkConf` with a random app name. Only set it if the conf has no app name even after options have been applied.

See also https://github.com/apache/spark/pull/14602
This is similar to Sherry302 's original proposal in https://github.com/apache/spark/pull/14556

## How was this patch tested?

Jenkins test, with new case reproducing the bug

Author: Sean Owen <sowen@cloudera.com>

Closes #14630 from srowen/SPARK-16966.2.
---
 .../scala/org/apache/spark/sql/SparkSession.scala     | 11 +++++++----
 .../apache/spark/sql/SparkSessionBuilderSuite.scala   |  1 +
 2 files changed, 8 insertions(+), 4 deletions(-)

diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index 2ade36d075..362bf45d03 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -816,16 +816,19 @@ object SparkSession {
         // No active nor global default session. Create a new one.
         val sparkContext = userSuppliedContext.getOrElse {
           // set app name if not given
-          if (!options.contains("spark.app.name")) {
-            options += "spark.app.name" -> java.util.UUID.randomUUID().toString
-          }
-
+          val randomAppName = java.util.UUID.randomUUID().toString
           val sparkConf = new SparkConf()
           options.foreach { case (k, v) => sparkConf.set(k, v) }
+          if (!sparkConf.contains("spark.app.name")) {
+            sparkConf.setAppName(randomAppName)
+          }
           val sc = SparkContext.getOrCreate(sparkConf)
           // maybe this is an existing SparkContext, update its SparkConf which maybe used
           // by SparkSession
           options.foreach { case (k, v) => sc.conf.set(k, v) }
+          if (!sc.conf.contains("spark.app.name")) {
+            sc.conf.setAppName(randomAppName)
+          }
           sc
         }
         session = new SparkSession(sparkContext)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
index 418345b9ee..386d13d07a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
@@ -100,6 +100,7 @@ class SparkSessionBuilderSuite extends SparkFunSuite {
     assert(session.conf.get("key2") == "value2")
     assert(session.sparkContext.conf.get("key1") == "value1")
     assert(session.sparkContext.conf.get("key2") == "value2")
+    assert(session.sparkContext.conf.get("spark.app.name") == "test")
     session.stop()
   }
 
-- 
GitLab