add admin scripts to sbin

Signed-off-by: shane-huang <shengsheng.huang@intel.com>
This commit is contained in:
shane-huang 2013-09-23 12:42:34 +08:00
parent dfbdc9ddb7
commit fcfe4f9204
14 changed files with 47 additions and 47 deletions

View file

@ -67,12 +67,12 @@ To launch a Spark standalone cluster with the launch scripts, you need to create
Once you've set up this file, you can launch or stop your cluster with the following shell scripts, based on Hadoop's deploy scripts, and available in `SPARK_HOME/bin`:
- `bin/start-master.sh` - Starts a master instance on the machine the script is executed on.
- `bin/start-slaves.sh` - Starts a slave instance on each machine specified in the `conf/slaves` file.
- `bin/start-all.sh` - Starts both a master and a number of slaves as described above.
- `bin/stop-master.sh` - Stops the master that was started via the `bin/start-master.sh` script.
- `bin/stop-slaves.sh` - Stops the slave instances that were started via `bin/start-slaves.sh`.
- `bin/stop-all.sh` - Stops both the master and the slaves as described above.
- `sbin/start-master.sh` - Starts a master instance on the machine the script is executed on.
- `sbin/start-slaves.sh` - Starts a slave instance on each machine specified in the `conf/slaves` file.
- `sbin/start-all.sh` - Starts both a master and a number of slaves as described above.
- `sbin/stop-master.sh` - Stops the master that was started via the `bin/start-master.sh` script.
- `sbin/stop-slaves.sh` - Stops the slave instances that were started via `bin/start-slaves.sh`.
- `sbin/stop-all.sh` - Stops both the master and the slaves as described above.
Note that these scripts must be executed on the machine you want to run the Spark master on, not your local machine.

View file

@ -36,10 +36,10 @@ if [ $# -le 0 ]; then
exit 1
fi
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
# If the slaves file is specified in the command line,
# then it takes precedence over the definition in

View file

@ -37,10 +37,10 @@ if [ $# -le 1 ]; then
exit 1
fi
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
# get arguments
startStop=$1

View file

@ -27,9 +27,9 @@ if [ $# -le 1 ]; then
exit 1
fi
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/spark-daemon.sh" "$@"
exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/spark-daemon.sh" "$@"

View file

@ -21,14 +21,14 @@
# Starts the master on this node.
# Starts a worker on each node specified in conf/slaves
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
# Load the Spark configuration
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
# Start Master
"$bin"/start-master.sh
"$sbin"/start-master.sh
# Start Workers
"$bin"/start-slaves.sh
"$sbin"/start-slaves.sh

View file

@ -19,10 +19,10 @@
# Starts the master on the machine this script is executed on.
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
. "${SPARK_CONF_DIR}/spark-env.sh"
@ -49,4 +49,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then
fi
fi
"$bin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
"$sbin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT

View file

@ -20,8 +20,8 @@
# Usage: start-slave.sh <worker#> <master-spark-URL>
# where <master-spark-URL> is like "spark://localhost:7077"
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
# Set SPARK_PUBLIC_DNS so slaves can be linked in master web UI
if [ "$SPARK_PUBLIC_DNS" = "" ]; then
@ -32,4 +32,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then
fi
fi
"$bin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@"
"$sbin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@"

View file

@ -17,10 +17,10 @@
# limitations under the License.
#
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
. "${SPARK_CONF_DIR}/spark-env.sh"
@ -37,12 +37,12 @@ fi
# Launch the slaves
if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT
exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT
else
if [ "$SPARK_WORKER_WEBUI_PORT" = "" ]; then
SPARK_WORKER_WEBUI_PORT=8081
fi
for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
"$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" $(( $i + 1 )) spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT --webui-port $(( $SPARK_WORKER_WEBUI_PORT + $i ))
"$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" $(( $i + 1 )) spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT --webui-port $(( $SPARK_WORKER_WEBUI_PORT + $i ))
done
fi

View file

@ -21,12 +21,12 @@
# Run this on the master nde
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
# Load the Spark configuration
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
# Stop the slaves, then the master
"$bin"/stop-slaves.sh
"$bin"/stop-master.sh
"$sbin"/stop-slaves.sh
"$sbin"/stop-master.sh

View file

@ -19,9 +19,9 @@
# Starts the master on the machine this script is executed on.
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
"$bin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1

View file

@ -19,19 +19,19 @@
# Starts the master on the machine this script is executed on.
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
. "${SPARK_CONF_DIR}/spark-env.sh"
fi
if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
"$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1
"$sbin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1
else
for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
"$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
"$sbin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
done
fi