diff --git a/bin/slaves.sh b/bin/slaves.sh index c8fb5ca473699333c671422cdd6428fe5c4b855f..752565b759f77384b169f917bf212a474abd59ae 100755 --- a/bin/slaves.sh +++ b/bin/slaves.sh @@ -42,7 +42,7 @@ bin=`cd "$bin"; pwd` . "$bin/spark-config.sh" # If the slaves file is specified in the command line, -# then it takes precedence over the definition in +# then it takes precedence over the definition in # spark-env.sh. Save it here. HOSTLIST=$SPARK_SLAVES @@ -58,8 +58,6 @@ if [ "$HOSTLIST" = "" ]; then fi fi -echo $"${@// /\\ }" - # By default disable strict host key checking if [ "$SPARK_SSH_OPTS" = "" ]; then SPARK_SSH_OPTS="-o StrictHostKeyChecking=no" diff --git a/bin/spark-daemon.sh b/bin/spark-daemon.sh index f01ff84d95a04de387514a02a386f1fe3c0e5c45..5bfe967fbfaeb76729ae3a30b81287cfb6021b55 100755 --- a/bin/spark-daemon.sh +++ b/bin/spark-daemon.sh @@ -75,6 +75,9 @@ if [ "$SPARK_IDENT_STRING" = "" ]; then export SPARK_IDENT_STRING="$USER" fi + +export SPARK_PRINT_LAUNCH_COMMAND="1" + # get log directory if [ "$SPARK_LOG_DIR" = "" ]; then export SPARK_LOG_DIR="$SPARK_HOME/logs" @@ -122,12 +125,19 @@ case $startStop in rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $SPARK_MASTER/ "$SPARK_HOME" fi - spark_rotate_log $log + spark_rotate_log "$log" echo starting $command, logging to $log cd "$SPARK_PREFIX" nohup nice -n $SPARK_NICENESS "$SPARK_PREFIX"/spark-class $command "$@" >> "$log" 2>&1 < /dev/null & - echo $! > $pid - sleep 1; head "$log" + newpid=$! + echo $newpid > $pid + sleep 2 + # Check if the process has died; in that case we'll tail the log so the user can see + if ! kill -0 $newpid >/dev/null 2>&1; then + echo "failed to launch $command:" + tail -2 "$log" | sed 's/^/ /' + echo "full log in $log" + fi ;; (stop) diff --git a/bin/start-slaves.sh b/bin/start-slaves.sh index dad7c3df765c6a09690038a4eddbc43ee65c0171..00dc4888b2e95220b2ea8ec4bcc32810a7235bfc 100755 --- a/bin/start-slaves.sh +++ b/bin/start-slaves.sh @@ -35,8 +35,6 @@ if [ "$SPARK_MASTER_IP" = "" ]; then SPARK_MASTER_IP=`hostname` fi -echo "Master IP: $SPARK_MASTER_IP" - # Launch the slaves if [ "$SPARK_WORKER_INSTANCES" = "" ]; then exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT diff --git a/bin/stop-all.sh b/bin/stop-all.sh index a043ac0095b48873ac3fb1a0613c0574b88fcf02..b6c83a7ba4c7df08aafa58f7e7a772e0d98452c9 100755 --- a/bin/stop-all.sh +++ b/bin/stop-all.sh @@ -20,6 +20,7 @@ # Start all spark daemons. # Run this on the master nde + bin=`dirname "$0"` bin=`cd "$bin"; pwd` diff --git a/core/src/main/scala/spark/deploy/worker/ui/WorkerWebUI.scala b/core/src/main/scala/spark/deploy/worker/ui/WorkerWebUI.scala index 717619f80d14dc029e0d3d61459d173b90d0f7e5..0a75ad8cf4a0ab05051dc99a356631b17c4e8bff 100644 --- a/core/src/main/scala/spark/deploy/worker/ui/WorkerWebUI.scala +++ b/core/src/main/scala/spark/deploy/worker/ui/WorkerWebUI.scala @@ -143,7 +143,6 @@ class WorkerWebUI(val worker: Worker, val workDir: File, requestedPort: Option[I <html> <body> {linkToMaster} - <hr /> <div> <div style="float:left;width:40%">{backButton}</div> <div style="float:left;">{range}</div>