From 1948f46093d2934284daeae06cc2891541c39e68 Mon Sep 17 00:00:00 2001
From: Josh Rosen <joshrosen@eecs.berkeley.edu>
Date: Fri, 14 Dec 2012 01:19:00 +0000
Subject: [PATCH] Use spark-env.sh to configure standalone master.  See
 SPARK-638.

Also fixed a typo in the standalone mode documentation.
---
 bin/start-all.sh         |  4 ++--
 bin/start-master.sh      | 19 +++++++++++++++++--
 bin/start-slave.sh       |  1 -
 docs/spark-standalone.md |  2 +-
 4 files changed, 20 insertions(+), 6 deletions(-)

diff --git a/bin/start-all.sh b/bin/start-all.sh
index 9bd6c50654..b9891ad2f6 100755
--- a/bin/start-all.sh
+++ b/bin/start-all.sh
@@ -11,7 +11,7 @@ bin=`cd "$bin"; pwd`
 . "$bin/spark-config.sh"
 
 # Start Master
-"$bin"/start-master.sh --config $SPARK_CONF_DIR
+"$bin"/start-master.sh
 
 # Start Workers
-"$bin"/start-slaves.sh --config $SPARK_CONF_DIR
\ No newline at end of file
+"$bin"/start-slaves.sh
diff --git a/bin/start-master.sh b/bin/start-master.sh
index ad19d48331..a901b1c260 100755
--- a/bin/start-master.sh
+++ b/bin/start-master.sh
@@ -7,13 +7,28 @@ bin=`cd "$bin"; pwd`
 
 . "$bin/spark-config.sh"
 
+if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
+  . "${SPARK_CONF_DIR}/spark-env.sh"
+fi
+
+if [ "$SPARK_MASTER_PORT" = "" ]; then
+  SPARK_MASTER_PORT=7077
+fi
+
+if [ "$SPARK_MASTER_IP" = "" ]; then
+  SPARK_MASTER_IP=`hostname`
+fi
+
+if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then
+  SPARK_MASTER_WEBUI_PORT=8080
+fi
+
 # Set SPARK_PUBLIC_DNS so the master report the correct webUI address to the slaves
 if [ "$SPARK_PUBLIC_DNS" = "" ]; then
     # If we appear to be running on EC2, use the public address by default:
     if [[ `hostname` == *ec2.internal ]]; then
-        echo "RUNNING ON EC2"
         export SPARK_PUBLIC_DNS=`wget -q -O - http://instance-data.ec2.internal/latest/meta-data/public-hostname`
     fi
 fi
 
-"$bin"/spark-daemon.sh start spark.deploy.master.Master
+"$bin"/spark-daemon.sh start spark.deploy.master.Master --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
diff --git a/bin/start-slave.sh b/bin/start-slave.sh
index 10cce9c17b..45a0cf7a6b 100755
--- a/bin/start-slave.sh
+++ b/bin/start-slave.sh
@@ -7,7 +7,6 @@ bin=`cd "$bin"; pwd`
 if [ "$SPARK_PUBLIC_DNS" = "" ]; then
     # If we appear to be running on EC2, use the public address by default:
     if [[ `hostname` == *ec2.internal ]]; then
-        echo "RUNNING ON EC2"
         export SPARK_PUBLIC_DNS=`wget -q -O - http://instance-data.ec2.internal/latest/meta-data/public-hostname`
     fi
 fi
diff --git a/docs/spark-standalone.md b/docs/spark-standalone.md
index ae630a0371..e0ba7c35cb 100644
--- a/docs/spark-standalone.md
+++ b/docs/spark-standalone.md
@@ -68,7 +68,7 @@ Finally, the following configuration options can be passed to the master and wor
 
 To launch a Spark standalone cluster with the deploy scripts, you need to set up two files, `conf/spark-env.sh` and `conf/slaves`. The `conf/spark-env.sh` file lets you specify global settings for the master and slave instances, such as memory, or port numbers to bind to, while `conf/slaves` is a list of slave nodes. The system requires that all the slave machines have the same configuration files, so *copy these files to each machine*.
 
-In `conf/spark-env.sh`, you can set the following parameters, in addition to the [standard Spark configuration settongs](configuration.html):
+In `conf/spark-env.sh`, you can set the following parameters, in addition to the [standard Spark configuration settings](configuration.html):
 
 <table class="table">
   <tr><th style="width:21%">Environment Variable</th><th>Meaning</th></tr>
-- 
GitLab