diff --git a/python/pyspark/sql/conf.py b/python/pyspark/sql/conf.py index 7428c919915f3efc04243c437247c27aade1a597..609d882a95a323057520e855bab507c86db19388 100644 --- a/python/pyspark/sql/conf.py +++ b/python/pyspark/sql/conf.py @@ -23,7 +23,6 @@ class RuntimeConfig(object): """User-facing configuration API, accessible through `SparkSession.conf`. Options set here are automatically propagated to the Hadoop configuration during I/O. - This a thin wrapper around its Scala implementation org.apache.spark.sql.RuntimeConfig. """ def __init__(self, jconf): diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py index fb3e318163e875554be7d5477a25d5afbd56883e..04842f6185c713ffbd3bc0a8b04ecb9a0b2b75f5 100644 --- a/python/pyspark/sql/session.py +++ b/python/pyspark/sql/session.py @@ -71,9 +71,6 @@ class SparkSession(object): .config("spark.some.config.option", "some-value") \ .getOrCreate() - :param sparkContext: The :class:`SparkContext` backing this SparkSession. - :param jsparkSession: An optional JVM Scala SparkSession. If set, we do not instantiate a new - SparkSession in the JVM, instead we make all calls to this object. """ class Builder(object): diff --git a/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala b/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala index 4fd6e42640d242b89eec46e937c8cbb8853539a2..7e07e0cb84a87923d74ac87f27932de9826a5ba6 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala @@ -35,9 +35,8 @@ class RuntimeConfig private[sql](sqlConf: SQLConf = new SQLConf) { * * @since 2.0.0 */ - def set(key: String, value: String): RuntimeConfig = { + def set(key: String, value: String): Unit = { sqlConf.setConfString(key, value) - this } /** @@ -45,7 +44,7 @@ class RuntimeConfig private[sql](sqlConf: SQLConf = new SQLConf) { * * @since 2.0.0 */ - def set(key: String, value: Boolean): RuntimeConfig = { + def set(key: String, value: Boolean): Unit = { set(key, value.toString) } @@ -54,7 +53,7 @@ class RuntimeConfig private[sql](sqlConf: SQLConf = new SQLConf) { * * @since 2.0.0 */ - def set(key: String, value: Long): RuntimeConfig = { + def set(key: String, value: Long): Unit = { set(key, value.toString) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/RuntimeConfigSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/RuntimeConfigSuite.scala similarity index 85% rename from sql/core/src/test/scala/org/apache/spark/sql/internal/RuntimeConfigSuite.scala rename to sql/core/src/test/scala/org/apache/spark/sql/RuntimeConfigSuite.scala index a629b73ac046d8ffac76cb561e9404224076f952..cfe2e9f2dbc4469e2bb157f68220ce8a8dfe7cfd 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/RuntimeConfigSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/RuntimeConfigSuite.scala @@ -15,10 +15,9 @@ * limitations under the License. */ -package org.apache.spark.sql.internal +package org.apache.spark.sql import org.apache.spark.SparkFunSuite -import org.apache.spark.sql.RuntimeConfig class RuntimeConfigSuite extends SparkFunSuite { @@ -26,10 +25,9 @@ class RuntimeConfigSuite extends SparkFunSuite { test("set and get") { val conf = newConf() - conf - .set("k1", "v1") - .set("k2", 2) - .set("k3", value = false) + conf.set("k1", "v1") + conf.set("k2", 2) + conf.set("k3", value = false) assert(conf.get("k1") == "v1") assert(conf.get("k2") == "2") @@ -41,13 +39,15 @@ class RuntimeConfigSuite extends SparkFunSuite { } test("getOption") { - val conf = newConf().set("k1", "v1") + val conf = newConf() + conf.set("k1", "v1") assert(conf.getOption("k1") == Some("v1")) assert(conf.getOption("notset") == None) } test("unset") { - val conf = newConf().set("k1", "v1") + val conf = newConf() + conf.set("k1", "v1") assert(conf.get("k1") == "v1") conf.unset("k1") intercept[NoSuchElementException] {