Skip to content
Snippets Groups Projects
spark-class 5.43 KiB
Newer Older
  • Learn to ignore specific revisions
  • Matei Zaharia's avatar
    Matei Zaharia committed
    
    
    #
    # Licensed to the Apache Software Foundation (ASF) under one or more
    # contributor license agreements.  See the NOTICE file distributed with
    # this work for additional information regarding copyright ownership.
    # The ASF licenses this file to You under the Apache License, Version 2.0
    # (the "License"); you may not use this file except in compliance with
    # the License.  You may obtain a copy of the License at
    #
    #    http://www.apache.org/licenses/LICENSE-2.0
    #
    # Unless required by applicable law or agreed to in writing, software
    # distributed under the License is distributed on an "AS IS" BASIS,
    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    # See the License for the specific language governing permissions and
    # limitations under the License.
    #
    
    
    cygwin=false
    case "`uname`" in
        CYGWIN*) cygwin=true;;
    esac
    
    
    Prashant Sharma's avatar
    Prashant Sharma committed
    SCALA_VERSION=2.10
    
    Matei Zaharia's avatar
    Matei Zaharia committed
    
    # Figure out where the Scala framework is installed
    
    shane-huang's avatar
    shane-huang committed
    FWDIR="$(cd `dirname $0`/..; pwd)"
    
    Matei Zaharia's avatar
    Matei Zaharia committed
    
    
    # Export this as SPARK_HOME
    export SPARK_HOME="$FWDIR"
    
    
    . $FWDIR/bin/load-spark-env.sh
    
      echo "Usage: spark-class <class> [<args>]" >&2
    
    if [ -n "$SPARK_MEM" ]; then
      echo "Warning: SPARK_MEM is deprecated, please use a more specific config option"
      echo "(e.g., spark.executor.memory or SPARK_DRIVER_MEMORY)."
    
    # Use SPARK_MEM or 512m as the default memory, to be overridden by specific options
    DEFAULT_MEM=${SPARK_MEM:-512m}
    
    SPARK_DAEMON_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS -Dspark.akka.logLifecycleEvents=true"
    
    # Add java opts and memory settings for master, worker, history server, executors, and repl.
    
      # Master, Worker, and HistoryServer use SPARK_DAEMON_JAVA_OPTS (and specific opts) + SPARK_DAEMON_MEMORY.
    
      'org.apache.spark.deploy.master.Master')
    
        OUR_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS $SPARK_MASTER_OPTS"
        OUR_JAVA_MEM=${SPARK_DAEMON_MEMORY:-$DEFAULT_MEM}
    
    Matei Zaharia's avatar
    Matei Zaharia committed
        ;;
    
      'org.apache.spark.deploy.worker.Worker')
    
        OUR_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS $SPARK_WORKER_OPTS"
        OUR_JAVA_MEM=${SPARK_DAEMON_MEMORY:-$DEFAULT_MEM}
    
    Matei Zaharia's avatar
    Matei Zaharia committed
        ;;
    
      'org.apache.spark.deploy.history.HistoryServer')
        OUR_JAVA_OPTS="$SPARK_DAEMON_JAVA_OPTS $SPARK_HISTORY_OPTS"
        OUR_JAVA_MEM=${SPARK_DAEMON_MEMORY:-$DEFAULT_MEM}
        ;;
    
    
      # Executors use SPARK_JAVA_OPTS + SPARK_EXECUTOR_MEMORY.
    
      'org.apache.spark.executor.CoarseGrainedExecutorBackend')
    
        OUR_JAVA_OPTS="$SPARK_JAVA_OPTS $SPARK_EXECUTOR_OPTS"
        OUR_JAVA_MEM=${SPARK_EXECUTOR_MEMORY:-$DEFAULT_MEM}
    
    Matei Zaharia's avatar
    Matei Zaharia committed
        ;;
    
      'org.apache.spark.executor.MesosExecutorBackend')
    
        OUR_JAVA_OPTS="$SPARK_JAVA_OPTS $SPARK_EXECUTOR_OPTS"
        OUR_JAVA_MEM=${SPARK_EXECUTOR_MEMORY:-$DEFAULT_MEM}
    
    Matei Zaharia's avatar
    Matei Zaharia committed
        ;;
    
      # Spark submit uses SPARK_SUBMIT_OPTS and SPARK_JAVA_OPTS
        'org.apache.spark.deploy.SparkSubmit')
        OUR_JAVA_OPTS="$SPARK_JAVA_OPTS $SPARK_SUBMIT_OPTS \
          -Djava.library.path=$SPARK_SUBMIT_LIBRARY_PATH"
    
        OUR_JAVA_MEM=${SPARK_DRIVER_MEMORY:-$DEFAULT_MEM}
        ;;
    
      *)
        OUR_JAVA_OPTS="$SPARK_JAVA_OPTS"
        OUR_JAVA_MEM=${SPARK_DRIVER_MEMORY:-$DEFAULT_MEM}
    
    Matei Zaharia's avatar
    Matei Zaharia committed
        ;;
    
    # Find the java binary
    if [ -n "${JAVA_HOME}" ]; then
      RUNNER="${JAVA_HOME}/bin/java"
    
      if [ `command -v java` ]; then
        RUNNER="java"
    
        echo "JAVA_HOME is not set" >&2
        exit 1
    
    # Set JAVA_OPTS to be able to load native libraries and to set heap size
    
    JAVA_OPTS="$OUR_JAVA_OPTS"
    
    JAVA_OPTS="$JAVA_OPTS -Xms$OUR_JAVA_MEM -Xmx$OUR_JAVA_MEM"
    
    # Load extra JAVA_OPTS from conf/java-opts, if it exists
    
    Andrew xia's avatar
    Andrew xia committed
    if [ -e "$FWDIR/conf/java-opts" ] ; then
    
      JAVA_OPTS="$JAVA_OPTS `cat $FWDIR/conf/java-opts`"
    
    Matei Zaharia's avatar
    Matei Zaharia committed
    fi
    export JAVA_OPTS
    
    # Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in ExecutorRunner.scala!
    
    Matei Zaharia's avatar
    Matei Zaharia committed
    
    
    if [ ! -f "$FWDIR/RELEASE" ]; then
      # Exit if the user hasn't compiled Spark
    
      num_jars=$(ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/ | grep "spark-assembly.*hadoop.*.jar" | wc -l)
      jars_list=$(ls "$FWDIR"/assembly/target/scala-$SCALA_VERSION/ | grep "spark-assembly.*hadoop.*.jar")
      if [ "$num_jars" -eq "0" ]; then
        echo "Failed to find Spark assembly in $FWDIR/assembly/target/scala-$SCALA_VERSION/" >&2
    
        echo "You need to build Spark with 'sbt/sbt assembly' before running this program." >&2
    
        exit 1
      fi
      if [ "$num_jars" -gt "1" ]; then
        echo "Found multiple Spark assembly jars in $FWDIR/assembly/target/scala-$SCALA_VERSION:" >&2
        echo "$jars_list"
        echo "Please remove all but one jar."
    
    TOOLS_DIR="$FWDIR"/tools
    SPARK_TOOLS_JAR=""
    if [ -e "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then
      # Use the JAR from the SBT build
      export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar`
    fi
    if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then
      # Use the JAR from the Maven build
      # TODO: this also needs to become an assembly!
      export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar`
    
    # Compute classpath using external script
    CLASSPATH=`$FWDIR/bin/compute-classpath.sh`
    
    if [[ "$1" =~ org.apache.spark.tools.* ]]; then
    
      CLASSPATH="$CLASSPATH:$SPARK_TOOLS_JAR"
    fi
    
    
    if $cygwin; then
    
      CLASSPATH=`cygpath -wp $CLASSPATH`
    
      if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
        export SPARK_TOOLS_JAR=`cygpath -w $SPARK_TOOLS_JAR`
      fi
    
    Matei Zaharia's avatar
    Matei Zaharia committed
    
    
    if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then
    
      echo -n "Spark Command: "
      echo "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"
    
      echo "========================================"
    
    Matei Zaharia's avatar
    Matei Zaharia committed
    fi
    
    
    exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"