Skip to content
Snippets Groups Projects
Commit 8e875d2a authored by WangTao's avatar WangTao Committed by Patrick Wendell
Browse files

[SPARK-3599]Avoid loading properties file frequently

https://issues.apache.org/jira/browse/SPARK-3599

Author: WangTao <barneystinson@aliyun.com>
Author: WangTaoTheTonic <barneystinson@aliyun.com>

Closes #2454 from WangTaoTheTonic/avoidLoadingFrequently and squashes the following commits:

3681182 [WangTao] do not use clone
7dca036 [WangTao] use lazy val instead
2a79f26 [WangTaoTheTonic] Avoid loaing properties file frequently
parent 293ce851
No related branches found
No related tags found
No related merge requests found
...@@ -280,7 +280,7 @@ object SparkSubmit { ...@@ -280,7 +280,7 @@ object SparkSubmit {
} }
// Read from default spark properties, if any // Read from default spark properties, if any
for ((k, v) <- args.getDefaultSparkProperties) { for ((k, v) <- args.defaultSparkProperties) {
sysProps.getOrElseUpdate(k, v) sysProps.getOrElseUpdate(k, v)
} }
......
...@@ -57,12 +57,8 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) { ...@@ -57,12 +57,8 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
var pyFiles: String = null var pyFiles: String = null
val sparkProperties: HashMap[String, String] = new HashMap[String, String]() val sparkProperties: HashMap[String, String] = new HashMap[String, String]()
parseOpts(args.toList) /** Default properties present in the currently defined defaults file. */
mergeSparkProperties() lazy val defaultSparkProperties: HashMap[String, String] = {
checkRequiredArguments()
/** Return default present in the currently defined defaults file. */
def getDefaultSparkProperties = {
val defaultProperties = new HashMap[String, String]() val defaultProperties = new HashMap[String, String]()
if (verbose) SparkSubmit.printStream.println(s"Using properties file: $propertiesFile") if (verbose) SparkSubmit.printStream.println(s"Using properties file: $propertiesFile")
Option(propertiesFile).foreach { filename => Option(propertiesFile).foreach { filename =>
...@@ -79,6 +75,10 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) { ...@@ -79,6 +75,10 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
defaultProperties defaultProperties
} }
parseOpts(args.toList)
mergeSparkProperties()
checkRequiredArguments()
/** /**
* Fill in any undefined values based on the default properties file or options passed in through * Fill in any undefined values based on the default properties file or options passed in through
* the '--conf' flag. * the '--conf' flag.
...@@ -107,7 +107,8 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) { ...@@ -107,7 +107,8 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
} }
} }
val properties = getDefaultSparkProperties val properties = HashMap[String, String]()
properties.putAll(defaultSparkProperties)
properties.putAll(sparkProperties) properties.putAll(sparkProperties)
// Use properties file as fallback for values which have a direct analog to // Use properties file as fallback for values which have a direct analog to
...@@ -213,7 +214,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) { ...@@ -213,7 +214,7 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) {
| verbose $verbose | verbose $verbose
| |
|Default properties from $propertiesFile: |Default properties from $propertiesFile:
|${getDefaultSparkProperties.mkString(" ", "\n ", "\n")} |${defaultSparkProperties.mkString(" ", "\n ", "\n")}
""".stripMargin """.stripMargin
} }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment