diff --git a/spark-shell b/spark-shell
index ea67a3e6b8c424aebf58fa1b77a47dbd3c9afe86..a8e72143fbc08f3dc04712c88d63b93902ed8c35 100755
--- a/spark-shell
+++ b/spark-shell
@@ -1,24 +1,14 @@
 #!/bin/bash --posix
 #
 # Shell script for starting the Spark Shell REPL
+# Note that it will set MASTER to spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}
+# if those two env vars are set in spark-env.sh but MASTER is not.
 # Options:
-#    -m            Set MASTER to spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT
 #    -c <cores>    Set the number of cores for REPL to use
 #
 FWDIR="`dirname $0`"
 
 for o in "$@"; do
-  if [ "$1" = "-m" -o "$1" = "--master" ]; then
-    shift
-    if [ -e "$FWDIR/conf/spark-env.sh" ]; then
-      . "$FWDIR/conf/spark-env.sh"
-    fi
-    if [ -z "$MASTER" ]; then
-      MASTER="spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}"
-    fi
-    export MASTER
-  fi
-
   if [ "$1" = "-c" -o "$1" = "--cores" ]; then
     shift
     if [ -n "$1" ]; then
@@ -28,6 +18,17 @@ for o in "$@"; do
   fi
 done
 
+# Set MASTER from spark-env if possible
+if [ -z "$MASTER" ]; then
+  if [ -e "$FWDIR/conf/spark-env.sh" ]; then
+    . "$FWDIR/conf/spark-env.sh"
+  fi
+  if [[ "x" != "x$SPARK_MASTER_IP" && "y" != "y$SPARK_MASTER_PORT" ]]; then
+    MASTER="spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}"
+    export MASTER
+  fi
+fi
+
 # Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even in
 # binary distribution of Spark where Scala is not installed
 exit_status=127