Skip to content
Snippets Groups Projects
Commit 5634fadb authored by vinodkc's avatar vinodkc Committed by gatorsmile
Browse files

[SPARK-21588][SQL] SQLContext.getConf(key, null) should return null


## What changes were proposed in this pull request?

In SQLContext.get(key,null) for a key that is not defined in the conf, and doesn't have a default value defined, throws a NPE. Int happens only when conf has a value converter

Added null check on defaultValue inside SQLConf.getConfString to avoid calling entry.valueConverter(defaultValue)

## How was this patch tested?
Added unit test

Author: vinodkc <vinod.kc.in@gmail.com>

Closes #18852 from vinodkc/br_Fix_SPARK-21588.

(cherry picked from commit 1ba967b2)
Signed-off-by: default avatargatorsmile <gatorsmile@gmail.com>
parent 734b144d
No related branches found
No related tags found
No related merge requests found
......@@ -917,10 +917,12 @@ class SQLConf extends Serializable with Logging {
* not set yet, return `defaultValue`.
*/
def getConfString(key: String, defaultValue: String): String = {
val entry = sqlConfEntries.get(key)
if (entry != null && defaultValue != "<undefined>") {
// Only verify configs in the SQLConf object
entry.valueConverter(defaultValue)
if (defaultValue != null && defaultValue != "<undefined>") {
val entry = sqlConfEntries.get(key)
if (entry != null) {
// Only verify configs in the SQLConf object
entry.valueConverter(defaultValue)
}
}
Option(settings.get(key)).getOrElse(defaultValue)
}
......
......@@ -270,4 +270,15 @@ class SQLConfSuite extends QueryTest with SharedSQLContext {
val e2 = intercept[AnalysisException](spark.conf.unset(SCHEMA_STRING_LENGTH_THRESHOLD.key))
assert(e2.message.contains("Cannot modify the value of a static config"))
}
test("SPARK-21588 SQLContext.getConf(key, null) should return null") {
withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> "1") {
assert("1" == spark.conf.get(SQLConf.SHUFFLE_PARTITIONS.key, null))
assert("1" == spark.conf.get(SQLConf.SHUFFLE_PARTITIONS.key, "<undefined>"))
}
assert(spark.conf.getOption("spark.sql.nonexistent").isEmpty)
assert(null == spark.conf.get("spark.sql.nonexistent", null))
assert("<undefined>" == spark.conf.get("spark.sql.nonexistent", "<undefined>"))
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment