Skip to content
Snippets Groups Projects
run-example 2.93 KiB
Newer Older
  • Learn to ignore specific revisions
  • #!/usr/bin/env bash
    
    #
    # Licensed to the Apache Software Foundation (ASF) under one or more
    # contributor license agreements.  See the NOTICE file distributed with
    # this work for additional information regarding copyright ownership.
    # The ASF licenses this file to You under the Apache License, Version 2.0
    # (the "License"); you may not use this file except in compliance with
    # the License.  You may obtain a copy of the License at
    #
    #    http://www.apache.org/licenses/LICENSE-2.0
    #
    # Unless required by applicable law or agreed to in writing, software
    # distributed under the License is distributed on an "AS IS" BASIS,
    # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    # See the License for the specific language governing permissions and
    # limitations under the License.
    #
    
    
    cygwin=false
    case "`uname`" in
        CYGWIN*) cygwin=true;;
    esac
    
    
    SCALA_VERSION=2.10
    
    
    # Figure out where the Scala framework is installed
    
    shane-huang's avatar
    shane-huang committed
    FWDIR="$(cd `dirname $0`/..; pwd)"
    
    
    # Export this as SPARK_HOME
    export SPARK_HOME="$FWDIR"
    
    
    . $FWDIR/bin/load-spark-env.sh
    
    
    if [ -z "$1" ]; then
      echo "Usage: run-example <example-class> [<args>]" >&2
      exit 1
    fi
    
    # Figure out the JAR file that our examples were packaged into. This includes a bit of a hack
    # to avoid the -sources and -doc packages that are built by publish-local.
    EXAMPLES_DIR="$FWDIR"/examples
    
    
    if [ -f "$FWDIR/RELEASE" ]; then
      export SPARK_EXAMPLES_JAR=`ls "$FWDIR"/lib/spark-examples-*hadoop*.jar`
    elif [ -e "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/spark-examples-*hadoop*.jar ]; then
      export SPARK_EXAMPLES_JAR=`ls "$EXAMPLES_DIR"/target/scala-$SCALA_VERSION/spark-examples-*hadoop*.jar`
    
    if [[ -z $SPARK_EXAMPLES_JAR ]]; then
    
      echo "Failed to find Spark examples assembly in $FWDIR/lib or $FWDIR/examples/target" >&2
    
      echo "You need to build Spark with sbt/sbt assembly before running this program" >&2
    
    # Since the examples JAR ideally shouldn't include spark-core (that dependency should be
    # "provided"), also add our standard Spark classpath, built using compute-classpath.sh.
    CLASSPATH=`$FWDIR/bin/compute-classpath.sh`
    CLASSPATH="$SPARK_EXAMPLES_JAR:$CLASSPATH"
    
    
    if $cygwin; then
        CLASSPATH=`cygpath -wp $CLASSPATH`
        export SPARK_EXAMPLES_JAR=`cygpath -w $SPARK_EXAMPLES_JAR`
    fi
    
    
    # Find java binary
    if [ -n "${JAVA_HOME}" ]; then
      RUNNER="${JAVA_HOME}/bin/java"
    else
      if [ `command -v java` ]; then
        RUNNER="java"
      else
        echo "JAVA_HOME is not set" >&2
        exit 1
      fi
    fi
    
    
    # Set JAVA_OPTS to be able to load native libraries and to set heap size
    JAVA_OPTS="$SPARK_JAVA_OPTS"
    # Load extra JAVA_OPTS from conf/java-opts, if it exists
    if [ -e "$FWDIR/conf/java-opts" ] ; then
      JAVA_OPTS="$JAVA_OPTS `cat $FWDIR/conf/java-opts`"
    fi
    export JAVA_OPTS
    
    
    if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then
      echo -n "Spark Command: "
    
      echo "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"
    
      echo "========================================"
      echo
    fi
    
    
    exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@"