Skip to content
Snippets Groups Projects
Commit 430cd781 authored by Kenichi Maehashi's avatar Kenichi Maehashi Committed by Marcelo Vanzin
Browse files

[SPARK-9180] fix spark-shell to accept --name option

This patch fixes [[SPARK-9180]](https://issues.apache.org/jira/browse/SPARK-9180).
Users can now set the app name of spark-shell using `spark-shell --name "whatever"`.

Author: Kenichi Maehashi <webmaster@kenichimaehashi.com>

Closes #7512 from kmaehashi/fix-spark-shell-app-name and squashes the following commits:

e24991a [Kenichi Maehashi] use setIfMissing instead of setAppName
18aa4ad [Kenichi Maehashi] fix spark-shell to accept --name option
parent 798dff7b
No related branches found
No related tags found
No related merge requests found
...@@ -47,11 +47,11 @@ function main() { ...@@ -47,11 +47,11 @@ function main() {
# (see https://github.com/sbt/sbt/issues/562). # (see https://github.com/sbt/sbt/issues/562).
stty -icanon min 1 -echo > /dev/null 2>&1 stty -icanon min 1 -echo > /dev/null 2>&1
export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Djline.terminal=unix" export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Djline.terminal=unix"
"$FWDIR"/bin/spark-submit --class org.apache.spark.repl.Main "$@" "$FWDIR"/bin/spark-submit --class org.apache.spark.repl.Main --name "Spark shell" "$@"
stty icanon echo > /dev/null 2>&1 stty icanon echo > /dev/null 2>&1
else else
export SPARK_SUBMIT_OPTS export SPARK_SUBMIT_OPTS
"$FWDIR"/bin/spark-submit --class org.apache.spark.repl.Main "$@" "$FWDIR"/bin/spark-submit --class org.apache.spark.repl.Main --name "Spark shell" "$@"
fi fi
} }
......
...@@ -32,4 +32,4 @@ if "x%SPARK_SUBMIT_OPTS%"=="x" ( ...@@ -32,4 +32,4 @@ if "x%SPARK_SUBMIT_OPTS%"=="x" (
set SPARK_SUBMIT_OPTS="%SPARK_SUBMIT_OPTS% -Dscala.usejavacp=true" set SPARK_SUBMIT_OPTS="%SPARK_SUBMIT_OPTS% -Dscala.usejavacp=true"
:run_shell :run_shell
%SPARK_HOME%\bin\spark-submit2.cmd --class org.apache.spark.repl.Main %* %SPARK_HOME%\bin\spark-submit2.cmd --class org.apache.spark.repl.Main --name "Spark shell" %*
...@@ -1008,9 +1008,9 @@ class SparkILoop( ...@@ -1008,9 +1008,9 @@ class SparkILoop(
val jars = SparkILoop.getAddedJars val jars = SparkILoop.getAddedJars
val conf = new SparkConf() val conf = new SparkConf()
.setMaster(getMaster()) .setMaster(getMaster())
.setAppName("Spark shell")
.setJars(jars) .setJars(jars)
.set("spark.repl.class.uri", intp.classServerUri) .set("spark.repl.class.uri", intp.classServerUri)
.setIfMissing("spark.app.name", "Spark shell")
if (execUri != null) { if (execUri != null) {
conf.set("spark.executor.uri", execUri) conf.set("spark.executor.uri", execUri)
} }
......
...@@ -65,9 +65,9 @@ object Main extends Logging { ...@@ -65,9 +65,9 @@ object Main extends Logging {
val jars = getAddedJars val jars = getAddedJars
val conf = new SparkConf() val conf = new SparkConf()
.setMaster(getMaster) .setMaster(getMaster)
.setAppName("Spark shell")
.setJars(jars) .setJars(jars)
.set("spark.repl.class.uri", classServer.uri) .set("spark.repl.class.uri", classServer.uri)
.setIfMissing("spark.app.name", "Spark shell")
logInfo("Spark class server started at " + classServer.uri) logInfo("Spark class server started at " + classServer.uri)
if (execUri != null) { if (execUri != null) {
conf.set("spark.executor.uri", execUri) conf.set("spark.executor.uri", execUri)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment