From 6ae9fc00ed6ef530a9c42c8407fc66fd873239cc Mon Sep 17 00:00:00 2001
From: Reynold Xin <rxin@databricks.com>
Date: Wed, 4 May 2016 14:26:05 -0700
Subject: [PATCH] [SPARK-15126][SQL] RuntimeConfig.set should return Unit

## What changes were proposed in this pull request?
Currently we return RuntimeConfig itself to facilitate chaining. However, it makes the output in interactive environments (e.g. notebooks, scala repl) weird because it'd show the response of calling set as a RuntimeConfig itself.

## How was this patch tested?
Updated unit tests.

Author: Reynold Xin <rxin@databricks.com>

Closes #12902 from rxin/SPARK-15126.
---
 python/pyspark/sql/conf.py                       |  1 -
 python/pyspark/sql/session.py                    |  3 ---
 .../org/apache/spark/sql/RuntimeConfig.scala     |  7 +++----
 .../sql/{internal => }/RuntimeConfigSuite.scala  | 16 ++++++++--------
 4 files changed, 11 insertions(+), 16 deletions(-)
 rename sql/core/src/test/scala/org/apache/spark/sql/{internal => }/RuntimeConfigSuite.scala (85%)

diff --git a/python/pyspark/sql/conf.py b/python/pyspark/sql/conf.py
index 7428c91991..609d882a95 100644
--- a/python/pyspark/sql/conf.py
+++ b/python/pyspark/sql/conf.py
@@ -23,7 +23,6 @@ class RuntimeConfig(object):
     """User-facing configuration API, accessible through `SparkSession.conf`.
 
     Options set here are automatically propagated to the Hadoop configuration during I/O.
-    This a thin wrapper around its Scala implementation org.apache.spark.sql.RuntimeConfig.
     """
 
     def __init__(self, jconf):
diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py
index fb3e318163..04842f6185 100644
--- a/python/pyspark/sql/session.py
+++ b/python/pyspark/sql/session.py
@@ -71,9 +71,6 @@ class SparkSession(object):
             .config("spark.some.config.option", "some-value") \
             .getOrCreate()
 
-    :param sparkContext: The :class:`SparkContext` backing this SparkSession.
-    :param jsparkSession: An optional JVM Scala SparkSession. If set, we do not instantiate a new
-        SparkSession in the JVM, instead we make all calls to this object.
     """
 
     class Builder(object):
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala b/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala
index 4fd6e42640..7e07e0cb84 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala
@@ -35,9 +35,8 @@ class RuntimeConfig private[sql](sqlConf: SQLConf = new SQLConf) {
    *
    * @since 2.0.0
    */
-  def set(key: String, value: String): RuntimeConfig = {
+  def set(key: String, value: String): Unit = {
     sqlConf.setConfString(key, value)
-    this
   }
 
   /**
@@ -45,7 +44,7 @@ class RuntimeConfig private[sql](sqlConf: SQLConf = new SQLConf) {
    *
    * @since 2.0.0
    */
-  def set(key: String, value: Boolean): RuntimeConfig = {
+  def set(key: String, value: Boolean): Unit = {
     set(key, value.toString)
   }
 
@@ -54,7 +53,7 @@ class RuntimeConfig private[sql](sqlConf: SQLConf = new SQLConf) {
    *
    * @since 2.0.0
    */
-  def set(key: String, value: Long): RuntimeConfig = {
+  def set(key: String, value: Long): Unit = {
     set(key, value.toString)
   }
 
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/RuntimeConfigSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/RuntimeConfigSuite.scala
similarity index 85%
rename from sql/core/src/test/scala/org/apache/spark/sql/internal/RuntimeConfigSuite.scala
rename to sql/core/src/test/scala/org/apache/spark/sql/RuntimeConfigSuite.scala
index a629b73ac0..cfe2e9f2db 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/RuntimeConfigSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/RuntimeConfigSuite.scala
@@ -15,10 +15,9 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.internal
+package org.apache.spark.sql
 
 import org.apache.spark.SparkFunSuite
-import org.apache.spark.sql.RuntimeConfig
 
 class RuntimeConfigSuite extends SparkFunSuite {
 
@@ -26,10 +25,9 @@ class RuntimeConfigSuite extends SparkFunSuite {
 
   test("set and get") {
     val conf = newConf()
-    conf
-      .set("k1", "v1")
-      .set("k2", 2)
-      .set("k3", value = false)
+    conf.set("k1", "v1")
+    conf.set("k2", 2)
+    conf.set("k3", value = false)
 
     assert(conf.get("k1") == "v1")
     assert(conf.get("k2") == "2")
@@ -41,13 +39,15 @@ class RuntimeConfigSuite extends SparkFunSuite {
   }
 
   test("getOption") {
-    val conf = newConf().set("k1", "v1")
+    val conf = newConf()
+    conf.set("k1", "v1")
     assert(conf.getOption("k1") == Some("v1"))
     assert(conf.getOption("notset") == None)
   }
 
   test("unset") {
-    val conf = newConf().set("k1", "v1")
+    val conf = newConf()
+    conf.set("k1", "v1")
     assert(conf.get("k1") == "v1")
     conf.unset("k1")
     intercept[NoSuchElementException] {
-- 
GitLab