Skip to content
Snippets Groups Projects
pyspark 3.34 KiB
Newer Older
  • Learn to ignore specific revisions
  • #!/usr/bin/env bash
    
    #
    # Licensed to the Apache Software Foundation (ASF) under one or more
    # contributor license agreements.  See the NOTICE file distributed with
    # this work for additional information regarding copyright ownership.
    # The ASF licenses this file to You under the Apache License, Version 2.0
    # (the "License"); you may not use this file except in compliance with
    # the License.  You may obtain a copy of the License at
    #
    #    http://www.apache.org/licenses/LICENSE-2.0
    #
    # Unless required by applicable law or agreed to in writing, software
    # distributed under the License is distributed on an "AS IS" BASIS,
    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    # See the License for the specific language governing permissions and
    # limitations under the License.
    #
    
    
    export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
    
    source "$SPARK_HOME"/bin/load-spark-env.sh
    
    export _SPARK_CMD_USAGE="Usage: ./bin/pyspark [options]"
    
    # In Spark <= 1.1, setting IPYTHON=1 would cause the driver to be launched using the `ipython`
    # executable, while the worker would still be launched using PYSPARK_PYTHON.
    #
    # In Spark 1.2, we removed the documentation of the IPYTHON and IPYTHON_OPTS variables and added
    # PYSPARK_DRIVER_PYTHON and PYSPARK_DRIVER_PYTHON_OPTS to allow IPython to be used for the driver.
    # Now, users can simply set PYSPARK_DRIVER_PYTHON=ipython to use IPython and set
    # PYSPARK_DRIVER_PYTHON_OPTS to pass options when starting the Python driver
    # (e.g. PYSPARK_DRIVER_PYTHON_OPTS='notebook').  This supports full customization of the IPython
    # and executor Python executables.
    #
    # For backwards-compatibility, we retain the old IPYTHON and IPYTHON_OPTS variables.
    
    # Determine the Python executable to use if PYSPARK_PYTHON or PYSPARK_DRIVER_PYTHON isn't set:
    if hash python2.7 2>/dev/null; then
      # Attempt to use Python 2.7, if installed:
      DEFAULT_PYTHON="python2.7"
    else
      DEFAULT_PYTHON="python"
    fi
    
    # Determine the Python executable to use for the driver:
    if [[ -n "$IPYTHON_OPTS" || "$IPYTHON" == "1" ]]; then
      # If IPython options are specified, assume user wants to run IPython
      # (for backwards-compatibility)
      PYSPARK_DRIVER_PYTHON_OPTS="$PYSPARK_DRIVER_PYTHON_OPTS $IPYTHON_OPTS"
      PYSPARK_DRIVER_PYTHON="ipython"
    elif [[ -z "$PYSPARK_DRIVER_PYTHON" ]]; then
      PYSPARK_DRIVER_PYTHON="${PYSPARK_PYTHON:-"$DEFAULT_PYTHON"}"
    fi
    
    # Determine the Python executable to use for the executors:
    
    if [[ -z "$PYSPARK_PYTHON" ]]; then
    
      if [[ $PYSPARK_DRIVER_PYTHON == *ipython* && $DEFAULT_PYTHON != "python2.7" ]]; then
        echo "IPython requires Python 2.7+; please install python2.7 or set PYSPARK_PYTHON" 1>&2
        exit 1
    
    fi
    export PYSPARK_PYTHON
    
    # Add the PySpark classes to the Python path:
    
    export PYTHONPATH="$SPARK_HOME/python/:$PYTHONPATH"
    export PYTHONPATH="$SPARK_HOME/python/lib/py4j-0.8.2.1-src.zip:$PYTHONPATH"
    
    # Load the PySpark shell.py script when ./pyspark is used interactively:
    
    export OLD_PYTHONSTARTUP="$PYTHONSTARTUP"
    
    export PYTHONSTARTUP="$SPARK_HOME/python/pyspark/shell.py"
    
    # For pyspark tests
    if [[ -n "$SPARK_TESTING" ]]; then
    
      export PYTHONHASHSEED=0
    
      exec "$PYSPARK_DRIVER_PYTHON" -m $1
    
    export PYSPARK_DRIVER_PYTHON
    export PYSPARK_DRIVER_PYTHON_OPTS
    
    exec "$SPARK_HOME"/bin/spark-submit pyspark-shell-main --name "PySparkShell" "$@"