Skip to content
Snippets Groups Projects
Commit 9d9d67c7 authored by Liwei Lin's avatar Liwei Lin Committed by gatorsmile
Browse files

[SPARK-19265][SQL][FOLLOW-UP] Configurable `tableRelationCache` maximum size

## What changes were proposed in this pull request?

SPARK-19265 had made table relation cache general; this follow-up aims to make `tableRelationCache`'s maximum size configurable.

In order to do sanity-check, this patch also adds a `checkValue()` method to `TypedConfigBuilder`.

## How was this patch tested?

new test case: `test("conf entry: checkValue()")`

Author: Liwei Lin <lwlin7@gmail.com>

Closes #16736 from lw-lin/conf.
parent 50a99126
No related branches found
No related tags found
No related merge requests found
......@@ -90,6 +90,14 @@ private[spark] class TypedConfigBuilder[T](
new TypedConfigBuilder(parent, s => fn(converter(s)), stringConverter)
}
/** Checks if the user-provided value for the config matches the validator. */
def checkValue(validator: T => Boolean, errorMsg: String): TypedConfigBuilder[T] = {
transform { v =>
if (!validator(v)) throw new IllegalArgumentException(errorMsg)
v
}
}
/** Check that user-provided values for the config match a pre-defined set. */
def checkValues(validValues: Set[T]): TypedConfigBuilder[T] = {
transform { v =>
......
......@@ -128,6 +128,28 @@ class ConfigEntrySuite extends SparkFunSuite {
assert(conf.get(transformationConf) === "bar")
}
test("conf entry: checkValue()") {
def createEntry(default: Int): ConfigEntry[Int] =
ConfigBuilder(testKey("checkValue"))
.intConf
.checkValue(value => value >= 0, "value must be non-negative")
.createWithDefault(default)
val conf = new SparkConf()
val entry = createEntry(10)
conf.set(entry, -1)
val e1 = intercept[IllegalArgumentException] {
conf.get(entry)
}
assert(e1.getMessage == "value must be non-negative")
val e2 = intercept[IllegalArgumentException] {
createEntry(-1)
}
assert(e2.getMessage == "value must be non-negative")
}
test("conf entry: valid values check") {
val conf = new SparkConf()
val enum = ConfigBuilder(testKey("enum"))
......
......@@ -34,6 +34,8 @@ trait CatalystConf {
def optimizerInSetConversionThreshold: Int
def maxCaseBranchesForCodegen: Int
def tableRelationCacheSize: Int
def runSQLonFile: Boolean
def warehousePath: String
......@@ -69,6 +71,7 @@ case class SimpleCatalystConf(
optimizerMaxIterations: Int = 100,
optimizerInSetConversionThreshold: Int = 10,
maxCaseBranchesForCodegen: Int = 20,
tableRelationCacheSize: Int = 1000,
runSQLonFile: Boolean = true,
crossJoinEnabled: Boolean = false,
cboEnabled: Boolean = false,
......
......@@ -118,11 +118,11 @@ class SessionCatalog(
}
/**
* A cache of qualified table name to table relation plan.
* A cache of qualified table names to table relation plans.
*/
val tableRelationCache: Cache[QualifiedTableName, LogicalPlan] = {
// TODO: create a config instead of hardcode 1000 here.
CacheBuilder.newBuilder().maximumSize(1000).build[QualifiedTableName, LogicalPlan]()
val cacheSize = conf.tableRelationCacheSize
CacheBuilder.newBuilder().maximumSize(cacheSize).build[QualifiedTableName, LogicalPlan]()
}
/**
......
......@@ -786,6 +786,9 @@ private[sql] class SQLConf extends Serializable with CatalystConf with Logging {
def maxCaseBranchesForCodegen: Int = getConf(MAX_CASES_BRANCHES)
def tableRelationCacheSize: Int =
getConf(StaticSQLConf.FILESOURCE_TABLE_RELATION_CACHE_SIZE)
def exchangeReuseEnabled: Boolean = getConf(EXCHANGE_REUSE_ENABLED)
def caseSensitiveAnalysis: Boolean = getConf(SQLConf.CASE_SENSITIVE)
......@@ -1034,6 +1037,14 @@ object StaticSQLConf {
.intConf
.createWithDefault(4000)
val FILESOURCE_TABLE_RELATION_CACHE_SIZE =
buildStaticConf("spark.sql.filesourceTableRelationCacheSize")
.internal()
.doc("The maximum size of the cache that maps qualified table names to table relation plans.")
.intConf
.checkValue(cacheSize => cacheSize >= 0, "The maximum size of the cache must not be negative")
.createWithDefault(1000)
// When enabling the debug, Spark SQL internal table properties are not filtered out; however,
// some related DDL commands (e.g., ANALYZE TABLE and CREATE TABLE LIKE) might not work properly.
val DEBUG_MODE = buildStaticConf("spark.sql.debug")
......
......@@ -171,4 +171,20 @@ class SQLConfEntrySuite extends SparkFunSuite {
buildConf(key).stringConf.createOptional
}
}
test("StaticSQLConf.FILESOURCE_TABLE_RELATION_CACHE_SIZE") {
val confEntry = StaticSQLConf.FILESOURCE_TABLE_RELATION_CACHE_SIZE
assert(conf.getConf(confEntry) === 1000)
conf.setConf(confEntry, -1)
val e1 = intercept[IllegalArgumentException] {
conf.getConf(confEntry)
}
assert(e1.getMessage === "The maximum size of the cache must not be negative")
val e2 = intercept[IllegalArgumentException] {
conf.setConfString(confEntry.key, "-1")
}
assert(e2.getMessage === "The maximum size of the cache must not be negative")
}
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment