Skip to content
Snippets Groups Projects
spark-shell 4.07 KiB
Newer Older
  • Learn to ignore specific revisions
  • 
    #
    # Licensed to the Apache Software Foundation (ASF) under one or more
    # contributor license agreements.  See the NOTICE file distributed with
    # this work for additional information regarding copyright ownership.
    # The ASF licenses this file to You under the Apache License, Version 2.0
    # (the "License"); you may not use this file except in compliance with
    # the License.  You may obtain a copy of the License at
    #
    #    http://www.apache.org/licenses/LICENSE-2.0
    #
    # Unless required by applicable law or agreed to in writing, software
    # distributed under the License is distributed on an "AS IS" BASIS,
    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    # See the License for the specific language governing permissions and
    # limitations under the License.
    #
    
    
    #
    # Shell script for starting the Spark Shell REPL
    
    # Note that it will set MASTER to spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}
    # if those two env vars are set in spark-env.sh but MASTER is not.
    
    
    cygwin=false
    case "`uname`" in
        CYGWIN*) cygwin=true;;
    esac
    
    CORE_PATTERN="^[0-9]+$"
    MEM_PATTERN="^[0-9]+[m|g|M|G]$"
    
    
    shane-huang's avatar
    shane-huang committed
    FWDIR="$(cd `dirname $0`/..; pwd)"
    
    if [ "$1" = "--help" ] || [ "$1" = "-h" ]; then
    	echo "Usage: spark-shell [OPTIONS]"
    	echo "OPTIONS:"
    	echo "-c --cores num, the maximum number of cores to be used by the spark shell"
    	echo "-em --execmem num[m|g], the memory used by each executor of spark shell"
    	echo "-dm --drivermem num[m|g], the memory used by the spark shell and driver"
    	echo "-h --help, print this help information" 
    	exit
    fi
    
    
    for o in "$@"; do
      if [ "$1" = "-c" -o "$1" = "--cores" ]; then
        shift
    
        if [[ "$1" =~ $CORE_PATTERN ]]; then
    
          SPARK_REPL_OPTS="$SPARK_REPL_OPTS -Dspark.cores.max=$1"
    
        else
          echo "ERROR: wrong format for -c/--cores"
          exit 1
        fi
      fi
      if [ "$1" = "-em" -o "$1" = "--execmem" ]; then
        shift
        if [[ $1 =~ $MEM_PATTERN ]]; then
    
          SPARK_REPL_OPTS="$SPARK_REPL_OPTS -Dspark.executor.memory=$1"
    
          shift
        else
          echo "ERROR: wrong format for --execmem/-em"
          exit 1
        fi
      fi
      if [ "$1" = "-dm" -o "$1" = "--drivermem" ]; then
        shift
        if [[ $1 =~ $MEM_PATTERN ]]; then
    
          export SPARK_DRIVER_MEMORY=$1
    
          shift
        else
          echo "ERROR: wrong format for --drivermem/-dm"
          exit 1
    
    # Set MASTER from spark-env if possible
    
    DEFAULT_SPARK_MASTER_PORT=7077
    
      . $FWDIR/bin/load-spark-env.sh
    
    CrazyJvm's avatar
    CrazyJvm committed
      if [ "x" != "x$SPARK_MASTER_IP" ]; then
        if [ "y" != "y$SPARK_MASTER_PORT" ]; then
    
          SPARK_MASTER_PORT="${SPARK_MASTER_PORT}"
        else
          SPARK_MASTER_PORT=$DEFAULT_SPARK_MASTER_PORT
        fi
        export MASTER="spark://${SPARK_MASTER_IP}:${SPARK_MASTER_PORT}"
    
    # Copy restore-TTY-on-exit functions from Scala script so spark-shell exits properly even in
    # binary distribution of Spark where Scala is not installed
    exit_status=127
    saved_stty=""
    
    # restore stty settings (echo in particular)
    function restoreSttySettings() {
      stty $saved_stty
      saved_stty=""
    }
    
    function onExit() {
      if [[ "$saved_stty" != "" ]]; then
        restoreSttySettings
      fi
      exit $exit_status
    }
    
    # to reenable echo if we are interrupted before completing.
    trap onExit INT
    
    # save terminal settings
    saved_stty=$(stty -g 2>/dev/null)
    # clear on error so we don't later try to restore them
    if [[ ! $? ]]; then
      saved_stty=""
    fi
    
    
    if $cygwin; then
    
      # Workaround for issue involving JLine and Cygwin
      # (see http://sourceforge.net/p/jline/bugs/40/).
      # If you're using the Mintty terminal emulator in Cygwin, may need to set the
      # "Backspace sends ^H" setting in "Keys" section of the Mintty options
      # (see https://github.com/sbt/sbt/issues/562).
      stty -icanon min 1 -echo > /dev/null 2>&1
      export SPARK_REPL_OPTS="$SPARK_REPL_OPTS -Djline.terminal=unix"
      $FWDIR/bin/spark-class org.apache.spark.repl.Main "$@"
      stty icanon echo > /dev/null 2>&1
    
      export SPARK_REPL_OPTS
      $FWDIR/bin/spark-class org.apache.spark.repl.Main "$@"
    
    
    # record the exit status lest it be overwritten:
    # then reenable echo and propagate the code.
    exit_status=$?
    onExit