From fcfe4f920484b64b01e4e22219d59c78ffd17054 Mon Sep 17 00:00:00 2001 From: shane-huang <shengsheng.huang@intel.com> Date: Mon, 23 Sep 2013 12:42:34 +0800 Subject: [PATCH] add admin scripts to sbin Signed-off-by: shane-huang <shengsheng.huang@intel.com> --- docs/spark-standalone.md | 12 ++++++------ {bin => sbin}/compute-classpath.cmd | 0 {bin => sbin}/compute-classpath.sh | 0 {bin => sbin}/slaves.sh | 6 +++--- {bin => sbin}/spark-config.sh | 0 {bin => sbin}/spark-daemon.sh | 6 +++--- {bin => sbin}/spark-daemons.sh | 8 ++++---- {bin => sbin}/start-all.sh | 10 +++++----- {bin => sbin}/start-master.sh | 8 ++++---- {bin => sbin}/start-slave.sh | 6 +++--- {bin => sbin}/start-slaves.sh | 10 +++++----- {bin => sbin}/stop-all.sh | 10 +++++----- {bin => sbin}/stop-master.sh | 8 ++++---- {bin => sbin}/stop-slaves.sh | 10 +++++----- 14 files changed, 47 insertions(+), 47 deletions(-) rename {bin => sbin}/compute-classpath.cmd (100%) rename {bin => sbin}/compute-classpath.sh (100%) rename {bin => sbin}/slaves.sh (96%) rename {bin => sbin}/spark-config.sh (100%) rename {bin => sbin}/spark-daemon.sh (98%) rename {bin => sbin}/spark-daemons.sh (88%) rename {bin => sbin}/start-all.sh (89%) rename {bin => sbin}/start-master.sh (88%) rename {bin => sbin}/start-slave.sh (92%) rename {bin => sbin}/start-slaves.sh (78%) rename {bin => sbin}/stop-all.sh (89%) rename {bin => sbin}/stop-master.sh (86%) rename {bin => sbin}/stop-slaves.sh (82%) diff --git a/docs/spark-standalone.md b/docs/spark-standalone.md index 9d4ad1ec8d..b3f9160673 100644 --- a/docs/spark-standalone.md +++ b/docs/spark-standalone.md @@ -67,12 +67,12 @@ To launch a Spark standalone cluster with the launch scripts, you need to create Once you've set up this file, you can launch or stop your cluster with the following shell scripts, based on Hadoop's deploy scripts, and available in `SPARK_HOME/bin`: -- `bin/start-master.sh` - Starts a master instance on the machine the script is executed on. -- `bin/start-slaves.sh` - Starts a slave instance on each machine specified in the `conf/slaves` file. -- `bin/start-all.sh` - Starts both a master and a number of slaves as described above. -- `bin/stop-master.sh` - Stops the master that was started via the `bin/start-master.sh` script. -- `bin/stop-slaves.sh` - Stops the slave instances that were started via `bin/start-slaves.sh`. -- `bin/stop-all.sh` - Stops both the master and the slaves as described above. +- `sbin/start-master.sh` - Starts a master instance on the machine the script is executed on. +- `sbin/start-slaves.sh` - Starts a slave instance on each machine specified in the `conf/slaves` file. +- `sbin/start-all.sh` - Starts both a master and a number of slaves as described above. +- `sbin/stop-master.sh` - Stops the master that was started via the `bin/start-master.sh` script. +- `sbin/stop-slaves.sh` - Stops the slave instances that were started via `bin/start-slaves.sh`. +- `sbin/stop-all.sh` - Stops both the master and the slaves as described above. Note that these scripts must be executed on the machine you want to run the Spark master on, not your local machine. diff --git a/bin/compute-classpath.cmd b/sbin/compute-classpath.cmd similarity index 100% rename from bin/compute-classpath.cmd rename to sbin/compute-classpath.cmd diff --git a/bin/compute-classpath.sh b/sbin/compute-classpath.sh similarity index 100% rename from bin/compute-classpath.sh rename to sbin/compute-classpath.sh diff --git a/bin/slaves.sh b/sbin/slaves.sh similarity index 96% rename from bin/slaves.sh rename to sbin/slaves.sh index 752565b759..68408bcad8 100755 --- a/bin/slaves.sh +++ b/sbin/slaves.sh @@ -36,10 +36,10 @@ if [ $# -le 0 ]; then exit 1 fi -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" # If the slaves file is specified in the command line, # then it takes precedence over the definition in diff --git a/bin/spark-config.sh b/sbin/spark-config.sh similarity index 100% rename from bin/spark-config.sh rename to sbin/spark-config.sh diff --git a/bin/spark-daemon.sh b/sbin/spark-daemon.sh similarity index 98% rename from bin/spark-daemon.sh rename to sbin/spark-daemon.sh index 6baee0c33b..ae82349cc6 100755 --- a/bin/spark-daemon.sh +++ b/sbin/spark-daemon.sh @@ -37,10 +37,10 @@ if [ $# -le 1 ]; then exit 1 fi -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" # get arguments startStop=$1 diff --git a/bin/spark-daemons.sh b/sbin/spark-daemons.sh similarity index 88% rename from bin/spark-daemons.sh rename to sbin/spark-daemons.sh index 354eb905a1..d91254b690 100755 --- a/bin/spark-daemons.sh +++ b/sbin/spark-daemons.sh @@ -27,9 +27,9 @@ if [ $# -le 1 ]; then exit 1 fi -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" -exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/spark-daemon.sh" "$@" +exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/spark-daemon.sh" "$@" diff --git a/bin/start-all.sh b/sbin/start-all.sh similarity index 89% rename from bin/start-all.sh rename to sbin/start-all.sh index 0182f1ab24..2daf49db35 100755 --- a/bin/start-all.sh +++ b/sbin/start-all.sh @@ -21,14 +21,14 @@ # Starts the master on this node. # Starts a worker on each node specified in conf/slaves -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` # Load the Spark configuration -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" # Start Master -"$bin"/start-master.sh +"$sbin"/start-master.sh # Start Workers -"$bin"/start-slaves.sh +"$sbin"/start-slaves.sh diff --git a/bin/start-master.sh b/sbin/start-master.sh similarity index 88% rename from bin/start-master.sh rename to sbin/start-master.sh index 648c7ae75f..3dcf7cc348 100755 --- a/bin/start-master.sh +++ b/sbin/start-master.sh @@ -19,10 +19,10 @@ # Starts the master on the machine this script is executed on. -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then . "${SPARK_CONF_DIR}/spark-env.sh" @@ -49,4 +49,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then fi fi -"$bin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT +"$sbin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT diff --git a/bin/start-slave.sh b/sbin/start-slave.sh similarity index 92% rename from bin/start-slave.sh rename to sbin/start-slave.sh index 4eefa20944..524be38c62 100755 --- a/bin/start-slave.sh +++ b/sbin/start-slave.sh @@ -20,8 +20,8 @@ # Usage: start-slave.sh <worker#> <master-spark-URL> # where <master-spark-URL> is like "spark://localhost:7077" -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` # Set SPARK_PUBLIC_DNS so slaves can be linked in master web UI if [ "$SPARK_PUBLIC_DNS" = "" ]; then @@ -32,4 +32,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then fi fi -"$bin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@" +"$sbin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@" diff --git a/bin/start-slaves.sh b/sbin/start-slaves.sh similarity index 78% rename from bin/start-slaves.sh rename to sbin/start-slaves.sh index 00dc4888b2..fd5cdeb1e6 100755 --- a/bin/start-slaves.sh +++ b/sbin/start-slaves.sh @@ -17,10 +17,10 @@ # limitations under the License. # -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then . "${SPARK_CONF_DIR}/spark-env.sh" @@ -37,12 +37,12 @@ fi # Launch the slaves if [ "$SPARK_WORKER_INSTANCES" = "" ]; then - exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT + exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT else if [ "$SPARK_WORKER_WEBUI_PORT" = "" ]; then SPARK_WORKER_WEBUI_PORT=8081 fi for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do - "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" $(( $i + 1 )) spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT --webui-port $(( $SPARK_WORKER_WEBUI_PORT + $i )) + "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" $(( $i + 1 )) spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT --webui-port $(( $SPARK_WORKER_WEBUI_PORT + $i )) done fi diff --git a/bin/stop-all.sh b/sbin/stop-all.sh similarity index 89% rename from bin/stop-all.sh rename to sbin/stop-all.sh index b6c83a7ba4..60b358d374 100755 --- a/bin/stop-all.sh +++ b/sbin/stop-all.sh @@ -21,12 +21,12 @@ # Run this on the master nde -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` # Load the Spark configuration -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" # Stop the slaves, then the master -"$bin"/stop-slaves.sh -"$bin"/stop-master.sh +"$sbin"/stop-slaves.sh +"$sbin"/stop-master.sh diff --git a/bin/stop-master.sh b/sbin/stop-master.sh similarity index 86% rename from bin/stop-master.sh rename to sbin/stop-master.sh index 310e33bedc..2adabd4265 100755 --- a/bin/stop-master.sh +++ b/sbin/stop-master.sh @@ -19,9 +19,9 @@ # Starts the master on the machine this script is executed on. -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" -"$bin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1 +"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1 diff --git a/bin/stop-slaves.sh b/sbin/stop-slaves.sh similarity index 82% rename from bin/stop-slaves.sh rename to sbin/stop-slaves.sh index 03e416a132..63802e6df5 100755 --- a/bin/stop-slaves.sh +++ b/sbin/stop-slaves.sh @@ -19,19 +19,19 @@ # Starts the master on the machine this script is executed on. -bin=`dirname "$0"` -bin=`cd "$bin"; pwd` +sbin=`dirname "$0"` +sbin=`cd "$sbin"; pwd` -. "$bin/spark-config.sh" +. "$sbin/spark-config.sh" if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then . "${SPARK_CONF_DIR}/spark-env.sh" fi if [ "$SPARK_WORKER_INSTANCES" = "" ]; then - "$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1 + "$sbin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1 else for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do - "$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 )) + "$sbin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 )) done fi -- GitLab