diff --git a/bin/spark-shell b/bin/spark-shell
index 05a46ee0caf55a0b2a3b2be8d31c810700fb2560..2bff06cf7005168894b5788bea7f169cfe13bdbf 100755
--- a/bin/spark-shell
+++ b/bin/spark-shell
@@ -21,8 +21,6 @@
 # Shell script for starting the Spark Shell REPL
 # Note that it will set MASTER to spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}
 # if those two env vars are set in spark-env.sh but MASTER is not.
-# Options:
-#    -c <cores>    Set the number of cores for REPL to use
 
 cygwin=false
 case "`uname`" in
@@ -32,14 +30,52 @@ esac
 # Enter posix mode for bash
 set -o posix
 
+CORE_PATTERN="^[0-9]+$"
+MEM_PATTERN="^[0-9]+[m|g|M|G]$"
+
 FWDIR="$(cd `dirname $0`/..; pwd)"
 
+if [ "$1" = "--help" ] || [ "$1" = "-h" ]; then
+	echo "Usage: spark-shell [OPTIONS]"
+	echo "OPTIONS:"
+	echo "-c --cores num, the maximum number of cores to be used by the spark shell"
+	echo "-em --execmem num[m|g], the memory used by each executor of spark shell"
+	echo "-dm --drivermem num[m|g], the memory used by the spark shell and driver"
+	echo "-h --help, print this help information" 
+	exit
+fi
+
+SPARK_SHELL_OPTS=""
+
 for o in "$@"; do
   if [ "$1" = "-c" -o "$1" = "--cores" ]; then
     shift
-    if [ -n "$1" ]; then
-      OPTIONS="-Dspark.cores.max=$1"
+    if [[ "$1" =~ $CORE_PATTERN ]]; then
+      SPARK_SHELL_OPTS="$SPARK_SHELL_OPTS -Dspark.cores.max=$1"
       shift
+    else
+      echo "ERROR: wrong format for -c/--cores"
+      exit 1
+    fi
+  fi
+  if [ "$1" = "-em" -o "$1" = "--execmem" ]; then
+    shift
+    if [[ $1 =~ $MEM_PATTERN ]]; then
+      SPARK_SHELL_OPTS="$SPARK_SHELL_OPTS -Dspark.executor.memory=$1"
+      shift
+    else
+      echo "ERROR: wrong format for --execmem/-em"
+      exit 1
+    fi
+  fi
+  if [ "$1" = "-dm" -o "$1" = "--drivermem" ]; then
+    shift
+    if [[ $1 =~ $MEM_PATTERN ]]; then
+      export SPARK_MEM=$1
+      shift
+    else
+      echo "ERROR: wrong format for --drivermem/-dm"
+      exit 1
     fi
   fi
 done
@@ -95,10 +131,10 @@ if $cygwin; then
     # "Backspace sends ^H" setting in "Keys" section of the Mintty options
     # (see https://github.com/sbt/sbt/issues/562).
     stty -icanon min 1 -echo > /dev/null 2>&1
-    $FWDIR/bin/spark-class -Djline.terminal=unix $OPTIONS org.apache.spark.repl.Main "$@"
+    $FWDIR/bin/spark-class -Djline.terminal=unix $SPARK_SHELL_OPTS org.apache.spark.repl.Main "$@"
     stty icanon echo > /dev/null 2>&1
 else
-    $FWDIR/bin/spark-class $OPTIONS org.apache.spark.repl.Main "$@"
+    $FWDIR/bin/spark-class $SPARK_SHELL_OPTS org.apache.spark.repl.Main "$@"
 fi
 
 # record the exit status lest it be overwritten:
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index bc25b50a4efc7e091f35120cccd5a1e79a95eae3..013cea07d48fdc8d69848c358159a4af90da4f7f 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -954,7 +954,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
       conf.setSparkHome(System.getenv("SPARK_HOME"))
     }
     sparkContext = new SparkContext(conf)
-    echo("Created spark context..")
+    logInfo("Created spark context..")
     sparkContext
   }