Newer
Older
Evan Chan
committed
#!/bin/bash --posix
#
# Shell script for starting the Spark Shell REPL
# Options:
# -m Set MASTER to spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT
# -c <cores> Set the number of cores for REPL to use
#
Evan Chan
committed
for o in "$@"; do
if [ "$1" = "-m" -o "$1" = "--master" ]; then
shift
if [ -e "$FWDIR/conf/spark-env.sh" ]; then
. "$FWDIR/conf/spark-env.sh"
fi
if [ -z "$MASTER" ]; then
MASTER="spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}"
fi
export MASTER
fi
if [ "$1" = "-c" -o "$1" = "--cores" ]; then
shift
if [ -n "$1" ]; then
OPTIONS="-Dspark.cores.max=$1"
shift
fi
fi
done
Evan Chan
committed
# Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even in
# binary distribution of Spark where Scala is not installed
exit_status=127
saved_stty=""
# restore stty settings (echo in particular)
function restoreSttySettings() {
stty $saved_stty
saved_stty=""
}
function onExit() {
if [[ "$saved_stty" != "" ]]; then
restoreSttySettings
fi
exit $exit_status
}
# to reenable echo if we are interrupted before completing.
trap onExit INT
# save terminal settings
saved_stty=$(stty -g 2>/dev/null)
# clear on error so we don't later try to restore them
if [[ ! $? ]]; then
saved_stty=""
fi
$FWDIR/run $OPTIONS spark.repl.Main "$@"