diff --git a/conf/spark-env.sh.template b/conf/spark-env.sh.template
index e2071e2ade8cd87fefdb60a8aebd57db177219c1..6432a566089bec04681bd307ee144a4383973616 100755
--- a/conf/spark-env.sh.template
+++ b/conf/spark-env.sh.template
@@ -19,3 +19,4 @@
 # - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT
 # - SPARK_WORKER_INSTANCES, to set the number of worker processes per node
 # - SPARK_WORKER_DIR, to set the working directory of worker processes
+# - SPARK_PUBLIC_DNS, to set the public dns name of the master
diff --git a/sbin/start-master.sh b/sbin/start-master.sh
index 3dcf7cc3483a785c21fd3266b16426a40cbdbb54..ec3dfdb4197ec206ab734ad13bf91b2e0811ddf3 100755
--- a/sbin/start-master.sh
+++ b/sbin/start-master.sh
@@ -40,13 +40,4 @@ if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then
   SPARK_MASTER_WEBUI_PORT=8080
 fi
 
-# Set SPARK_PUBLIC_DNS so the master report the correct webUI address to the slaves
-if [ "$SPARK_PUBLIC_DNS" = "" ]; then
-    # If we appear to be running on EC2, use the public address by default:
-    # NOTE: ec2-metadata is installed on Amazon Linux AMI. Check based on that and hostname
-    if command -v ec2-metadata > /dev/null || [[ `hostname` == *ec2.internal ]]; then
-        export SPARK_PUBLIC_DNS=`wget -q -O - http://instance-data.ec2.internal/latest/meta-data/public-hostname`
-    fi
-fi
-
 "$sbin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
diff --git a/sbin/start-slave.sh b/sbin/start-slave.sh
index 524be38c629681b8acf1df845feabe020e25ee5c..b563400dc24f348a9a42c8ceb6dea2e667bc1b03 100755
--- a/sbin/start-slave.sh
+++ b/sbin/start-slave.sh
@@ -23,13 +23,4 @@
 sbin=`dirname "$0"`
 sbin=`cd "$sbin"; pwd`
 
-# Set SPARK_PUBLIC_DNS so slaves can be linked in master web UI
-if [ "$SPARK_PUBLIC_DNS" = "" ]; then
-    # If we appear to be running on EC2, use the public address by default:
-    # NOTE: ec2-metadata is installed on Amazon Linux AMI. Check based on that and hostname
-    if command -v ec2-metadata > /dev/null || [[ `hostname` == *ec2.internal ]]; then
-        export SPARK_PUBLIC_DNS=`wget -q -O - http://instance-data.ec2.internal/latest/meta-data/public-hostname`
-    fi
-fi
-
 "$sbin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker "$@"