Skip to content
Snippets Groups Projects
Commit fcfe4f92 authored by shane-huang's avatar shane-huang
Browse files

add admin scripts to sbin

parent dfbdc9dd
No related branches found
No related tags found
No related merge requests found
......@@ -67,12 +67,12 @@ To launch a Spark standalone cluster with the launch scripts, you need to create
Once you've set up this file, you can launch or stop your cluster with the following shell scripts, based on Hadoop's deploy scripts, and available in `SPARK_HOME/bin`:
- `bin/start-master.sh` - Starts a master instance on the machine the script is executed on.
- `bin/start-slaves.sh` - Starts a slave instance on each machine specified in the `conf/slaves` file.
- `bin/start-all.sh` - Starts both a master and a number of slaves as described above.
- `bin/stop-master.sh` - Stops the master that was started via the `bin/start-master.sh` script.
- `bin/stop-slaves.sh` - Stops the slave instances that were started via `bin/start-slaves.sh`.
- `bin/stop-all.sh` - Stops both the master and the slaves as described above.
- `sbin/start-master.sh` - Starts a master instance on the machine the script is executed on.
- `sbin/start-slaves.sh` - Starts a slave instance on each machine specified in the `conf/slaves` file.
- `sbin/start-all.sh` - Starts both a master and a number of slaves as described above.
- `sbin/stop-master.sh` - Stops the master that was started via the `bin/start-master.sh` script.
- `sbin/stop-slaves.sh` - Stops the slave instances that were started via `bin/start-slaves.sh`.
- `sbin/stop-all.sh` - Stops both the master and the slaves as described above.
Note that these scripts must be executed on the machine you want to run the Spark master on, not your local machine.
......
File moved
File moved
......@@ -36,10 +36,10 @@ if [ $# -le 0 ]; then
exit 1
fi
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
# If the slaves file is specified in the command line,
# then it takes precedence over the definition in
......
File moved
......@@ -37,10 +37,10 @@ if [ $# -le 1 ]; then
exit 1
fi
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
# get arguments
startStop=$1
......
......@@ -27,9 +27,9 @@ if [ $# -le 1 ]; then
exit 1
fi
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/spark-daemon.sh" "$@"
exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/spark-daemon.sh" "$@"
......@@ -21,14 +21,14 @@
# Starts the master on this node.
# Starts a worker on each node specified in conf/slaves
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
# Load the Spark configuration
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
# Start Master
"$bin"/start-master.sh
"$sbin"/start-master.sh
# Start Workers
"$bin"/start-slaves.sh
"$sbin"/start-slaves.sh
......@@ -19,10 +19,10 @@
# Starts the master on the machine this script is executed on.
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
. "${SPARK_CONF_DIR}/spark-env.sh"
......@@ -49,4 +49,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then
fi
fi
"$bin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
"$sbin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
......@@ -20,8 +20,8 @@
# Usage: start-slave.sh <worker#> <master-spark-URL>
# where <master-spark-URL> is like "spark://localhost:7077"
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
# Set SPARK_PUBLIC_DNS so slaves can be linked in master web UI
if [ "$SPARK_PUBLIC_DNS" = "" ]; then
......@@ -32,4 +32,4 @@ if [ "$SPARK_PUBLIC_DNS" = "" ]; then
fi
fi
"$bin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@"
"$sbin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@"
......@@ -17,10 +17,10 @@
# limitations under the License.
#
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
. "${SPARK_CONF_DIR}/spark-env.sh"
......@@ -37,12 +37,12 @@ fi
# Launch the slaves
if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
exec "$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT
exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT
else
if [ "$SPARK_WORKER_WEBUI_PORT" = "" ]; then
SPARK_WORKER_WEBUI_PORT=8081
fi
for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
"$bin/slaves.sh" cd "$SPARK_HOME" \; "$bin/start-slave.sh" $(( $i + 1 )) spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT --webui-port $(( $SPARK_WORKER_WEBUI_PORT + $i ))
"$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" $(( $i + 1 )) spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT --webui-port $(( $SPARK_WORKER_WEBUI_PORT + $i ))
done
fi
......@@ -21,12 +21,12 @@
# Run this on the master nde
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
# Load the Spark configuration
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
# Stop the slaves, then the master
"$bin"/stop-slaves.sh
"$bin"/stop-master.sh
"$sbin"/stop-slaves.sh
"$sbin"/stop-master.sh
......@@ -19,9 +19,9 @@
# Starts the master on the machine this script is executed on.
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
"$bin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
......@@ -19,19 +19,19 @@
# Starts the master on the machine this script is executed on.
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
sbin=`dirname "$0"`
sbin=`cd "$sbin"; pwd`
. "$bin/spark-config.sh"
. "$sbin/spark-config.sh"
if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
. "${SPARK_CONF_DIR}/spark-env.sh"
fi
if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
"$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1
"$sbin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1
else
for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
"$bin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
"$sbin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
done
fi
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment