From 8113c55df8f4b5f34140ddba5e58e132e3dc2d23 Mon Sep 17 00:00:00 2001
From: Evan Chan <ev@ooyala.com>
Date: Fri, 28 Jun 2013 13:46:21 -0700
Subject: [PATCH] [Feedback] Get rid of -m, set MASTER from
 SPARK_MASTER_IP/PORT automagically

---
 spark-shell | 25 +++++++++++++------------
 1 file changed, 13 insertions(+), 12 deletions(-)

diff --git a/spark-shell b/spark-shell
index ea67a3e6b8..a8e72143fb 100755
--- a/spark-shell
+++ b/spark-shell
@@ -1,24 +1,14 @@
 #!/bin/bash --posix
 #
 # Shell script for starting the Spark Shell REPL
+# Note that it will set MASTER to spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}
+# if those two env vars are set in spark-env.sh but MASTER is not.
 # Options:
-#    -m            Set MASTER to spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT
 #    -c <cores>    Set the number of cores for REPL to use
 #
 FWDIR="`dirname $0`"
 
 for o in "$@"; do
-  if [ "$1" = "-m" -o "$1" = "--master" ]; then
-    shift
-    if [ -e "$FWDIR/conf/spark-env.sh" ]; then
-      . "$FWDIR/conf/spark-env.sh"
-    fi
-    if [ -z "$MASTER" ]; then
-      MASTER="spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}"
-    fi
-    export MASTER
-  fi
-
   if [ "$1" = "-c" -o "$1" = "--cores" ]; then
     shift
     if [ -n "$1" ]; then
@@ -28,6 +18,17 @@ for o in "$@"; do
   fi
 done
 
+# Set MASTER from spark-env if possible
+if [ -z "$MASTER" ]; then
+  if [ -e "$FWDIR/conf/spark-env.sh" ]; then
+    . "$FWDIR/conf/spark-env.sh"
+  fi
+  if [[ "x" != "x$SPARK_MASTER_IP" && "y" != "y$SPARK_MASTER_PORT" ]]; then
+    MASTER="spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}"
+    export MASTER
+  fi
+fi
+
 # Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even in
 # binary distribution of Spark where Scala is not installed
 exit_status=127
-- 
GitLab