diff --git a/bin/beeline b/bin/beeline
index 3fcb6df34339d3295f450b5f62f3077ac1b941f6..1627626941a73a5c7ac561584356e2079af6a613 100755
--- a/bin/beeline
+++ b/bin/beeline
@@ -23,8 +23,10 @@
 # Enter posix mode for bash
 set -o posix
 
-# Figure out where Spark is installed
-FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
+# Figure out if SPARK_HOME is set
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 CLASS="org.apache.hive.beeline.BeeLine"
-exec "$FWDIR/bin/spark-class" $CLASS "$@"
+exec "${SPARK_HOME}/bin/spark-class" $CLASS "$@"
diff --git a/bin/load-spark-env.sh b/bin/load-spark-env.sh
index 95779e9ddbb1881c444f3c7c18591e07577a63ff..eaea964ed5b3d5e1a79f4eb204916cf72e80f105 100644
--- a/bin/load-spark-env.sh
+++ b/bin/load-spark-env.sh
@@ -20,13 +20,17 @@
 # This script loads spark-env.sh if it exists, and ensures it is only loaded once.
 # spark-env.sh is loaded from SPARK_CONF_DIR if set, or within the current directory's
 # conf/ subdirectory.
-FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
+
+# Figure out where Spark is installed
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 if [ -z "$SPARK_ENV_LOADED" ]; then
   export SPARK_ENV_LOADED=1
 
   # Returns the parent of the directory this script lives in.
-  parent_dir="$(cd "`dirname "$0"`"/..; pwd)"
+  parent_dir="${SPARK_HOME}"
 
   user_conf_dir="${SPARK_CONF_DIR:-"$parent_dir"/conf}"
 
@@ -42,18 +46,18 @@ fi
 
 if [ -z "$SPARK_SCALA_VERSION" ]; then
 
-    ASSEMBLY_DIR2="$FWDIR/assembly/target/scala-2.11"
-    ASSEMBLY_DIR1="$FWDIR/assembly/target/scala-2.10"
+  ASSEMBLY_DIR2="${SPARK_HOME}/assembly/target/scala-2.11"
+  ASSEMBLY_DIR1="${SPARK_HOME}/assembly/target/scala-2.10"
 
-    if [[ -d "$ASSEMBLY_DIR2" && -d "$ASSEMBLY_DIR1" ]]; then
-        echo -e "Presence of build for both scala versions(SCALA 2.10 and SCALA 2.11) detected." 1>&2
-        echo -e 'Either clean one of them or, export SPARK_SCALA_VERSION=2.11 in spark-env.sh.' 1>&2
-        exit 1
-    fi
+  if [[ -d "$ASSEMBLY_DIR2" && -d "$ASSEMBLY_DIR1" ]]; then
+    echo -e "Presence of build for both scala versions(SCALA 2.10 and SCALA 2.11) detected." 1>&2
+    echo -e 'Either clean one of them or, export SPARK_SCALA_VERSION=2.11 in spark-env.sh.' 1>&2
+    exit 1
+  fi
 
-    if [ -d "$ASSEMBLY_DIR2" ]; then
-        export SPARK_SCALA_VERSION="2.11"
-    else
-        export SPARK_SCALA_VERSION="2.10"
-    fi
+  if [ -d "$ASSEMBLY_DIR2" ]; then
+    export SPARK_SCALA_VERSION="2.11"
+  else
+    export SPARK_SCALA_VERSION="2.10"
+  fi
 fi
diff --git a/bin/pyspark b/bin/pyspark
index 18012ee4a0b4f0aca51ae5ba1a8034d51b1dbd0d..5eaa17d3c2016d602950676017e02a423bd0dac2 100755
--- a/bin/pyspark
+++ b/bin/pyspark
@@ -17,9 +17,11 @@
 # limitations under the License.
 #
 
-export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-source "$SPARK_HOME"/bin/load-spark-env.sh
+source "${SPARK_HOME}"/bin/load-spark-env.sh
 export _SPARK_CMD_USAGE="Usage: ./bin/pyspark [options]"
 
 # In Spark <= 1.1, setting IPYTHON=1 would cause the driver to be launched using the `ipython`
@@ -64,12 +66,12 @@ fi
 export PYSPARK_PYTHON
 
 # Add the PySpark classes to the Python path:
-export PYTHONPATH="$SPARK_HOME/python/:$PYTHONPATH"
-export PYTHONPATH="$SPARK_HOME/python/lib/py4j-0.9-src.zip:$PYTHONPATH"
+export PYTHONPATH="${SPARK_HOME}/python/:$PYTHONPATH"
+export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.9-src.zip:$PYTHONPATH"
 
 # Load the PySpark shell.py script when ./pyspark is used interactively:
 export OLD_PYTHONSTARTUP="$PYTHONSTARTUP"
-export PYTHONSTARTUP="$SPARK_HOME/python/pyspark/shell.py"
+export PYTHONSTARTUP="${SPARK_HOME}/python/pyspark/shell.py"
 
 # For pyspark tests
 if [[ -n "$SPARK_TESTING" ]]; then
@@ -82,4 +84,4 @@ fi
 
 export PYSPARK_DRIVER_PYTHON
 export PYSPARK_DRIVER_PYTHON_OPTS
-exec "$SPARK_HOME"/bin/spark-submit pyspark-shell-main --name "PySparkShell" "$@"
+exec "${SPARK_HOME}"/bin/spark-submit pyspark-shell-main --name "PySparkShell" "$@"
diff --git a/bin/run-example b/bin/run-example
index 798e2caeb88cedf879ba14a90f21afe62e1583af..e1b0d5789bed62b6a8426d2c2106bebc07544c75 100755
--- a/bin/run-example
+++ b/bin/run-example
@@ -17,11 +17,13 @@
 # limitations under the License.
 #
 
-FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
-export SPARK_HOME="$FWDIR"
-EXAMPLES_DIR="$FWDIR"/examples
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+EXAMPLES_DIR="${SPARK_HOME}"/examples
 
-. "$FWDIR"/bin/load-spark-env.sh
+. "${SPARK_HOME}"/bin/load-spark-env.sh
 
 if [ -n "$1" ]; then
   EXAMPLE_CLASS="$1"
@@ -34,8 +36,8 @@ else
   exit 1
 fi
 
-if [ -f "$FWDIR/RELEASE" ]; then
-  JAR_PATH="${FWDIR}/lib"
+if [ -f "${SPARK_HOME}/RELEASE" ]; then
+  JAR_PATH="${SPARK_HOME}/lib"
 else
   JAR_PATH="${EXAMPLES_DIR}/target/scala-${SPARK_SCALA_VERSION}"
 fi
@@ -44,7 +46,7 @@ JAR_COUNT=0
 
 for f in "${JAR_PATH}"/spark-examples-*hadoop*.jar; do
   if [[ ! -e "$f" ]]; then
-    echo "Failed to find Spark examples assembly in $FWDIR/lib or $FWDIR/examples/target" 1>&2
+    echo "Failed to find Spark examples assembly in ${SPARK_HOME}/lib or ${SPARK_HOME}/examples/target" 1>&2
     echo "You need to build Spark before running this program" 1>&2
     exit 1
   fi
@@ -67,7 +69,7 @@ if [[ ! $EXAMPLE_CLASS == org.apache.spark.examples* ]]; then
   EXAMPLE_CLASS="org.apache.spark.examples.$EXAMPLE_CLASS"
 fi
 
-exec "$FWDIR"/bin/spark-submit \
+exec "${SPARK_HOME}"/bin/spark-submit \
   --master $EXAMPLE_MASTER \
   --class $EXAMPLE_CLASS \
   "$SPARK_EXAMPLES_JAR" \
diff --git a/bin/spark-class b/bin/spark-class
index 8cae6ccbabe7c1b4b4a5698414a0d68f6942d62d..87d06693af4fe60d17f80fce3be7640643d38268 100755
--- a/bin/spark-class
+++ b/bin/spark-class
@@ -17,10 +17,11 @@
 # limitations under the License.
 #
 
-# Figure out where Spark is installed
-export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$SPARK_HOME"/bin/load-spark-env.sh
+. "${SPARK_HOME}"/bin/load-spark-env.sh
 
 # Find the java binary
 if [ -n "${JAVA_HOME}" ]; then
@@ -36,10 +37,10 @@ fi
 
 # Find assembly jar
 SPARK_ASSEMBLY_JAR=
-if [ -f "$SPARK_HOME/RELEASE" ]; then
-  ASSEMBLY_DIR="$SPARK_HOME/lib"
+if [ -f "${SPARK_HOME}/RELEASE" ]; then
+  ASSEMBLY_DIR="${SPARK_HOME}/lib"
 else
-  ASSEMBLY_DIR="$SPARK_HOME/assembly/target/scala-$SPARK_SCALA_VERSION"
+  ASSEMBLY_DIR="${SPARK_HOME}/assembly/target/scala-$SPARK_SCALA_VERSION"
 fi
 
 GREP_OPTIONS=
@@ -65,7 +66,7 @@ LAUNCH_CLASSPATH="$SPARK_ASSEMBLY_JAR"
 
 # Add the launcher build dir to the classpath if requested.
 if [ -n "$SPARK_PREPEND_CLASSES" ]; then
-  LAUNCH_CLASSPATH="$SPARK_HOME/launcher/target/scala-$SPARK_SCALA_VERSION/classes:$LAUNCH_CLASSPATH"
+  LAUNCH_CLASSPATH="${SPARK_HOME}/launcher/target/scala-$SPARK_SCALA_VERSION/classes:$LAUNCH_CLASSPATH"
 fi
 
 export _SPARK_ASSEMBLY="$SPARK_ASSEMBLY_JAR"
diff --git a/bin/spark-shell b/bin/spark-shell
index 00ab7afd118b5f1824fbdc4a3d2e2b08322e9c97..6583b5bd880eebe98acdf7035a8262cc3a179871 100755
--- a/bin/spark-shell
+++ b/bin/spark-shell
@@ -28,7 +28,10 @@ esac
 # Enter posix mode for bash
 set -o posix
 
-export FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
 export _SPARK_CMD_USAGE="Usage: ./bin/spark-shell [options]"
 
 # SPARK-4161: scala does not assume use of the java classpath,
@@ -47,11 +50,11 @@ function main() {
     # (see https://github.com/sbt/sbt/issues/562).
     stty -icanon min 1 -echo > /dev/null 2>&1
     export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS -Djline.terminal=unix"
-    "$FWDIR"/bin/spark-submit --class org.apache.spark.repl.Main --name "Spark shell" "$@"
+    "${SPARK_HOME}"/bin/spark-submit --class org.apache.spark.repl.Main --name "Spark shell" "$@"
     stty icanon echo > /dev/null 2>&1
   else
     export SPARK_SUBMIT_OPTS
-    "$FWDIR"/bin/spark-submit --class org.apache.spark.repl.Main --name "Spark shell" "$@"
+    "${SPARK_HOME}"/bin/spark-submit --class org.apache.spark.repl.Main --name "Spark shell" "$@"
   fi
 }
 
diff --git a/bin/spark-sql b/bin/spark-sql
index 4ea7bc6e39c07b1bf50fbf69cb416c7ce0227752..970d12cbf51dd3bfb7595e1ea5c59c596920fe2a 100755
--- a/bin/spark-sql
+++ b/bin/spark-sql
@@ -17,6 +17,9 @@
 # limitations under the License.
 #
 
-export FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
 export _SPARK_CMD_USAGE="Usage: ./bin/spark-sql [options] [cli option]"
-exec "$FWDIR"/bin/spark-submit --class org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver "$@"
+exec "${SPARK_HOME}"/bin/spark-submit --class org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver "$@"
diff --git a/bin/spark-submit b/bin/spark-submit
index 255378b0f077c938e3aa0fb810e684b7fc7bea53..023f9c162f4b8e94449715c7f07bd1185af865fd 100755
--- a/bin/spark-submit
+++ b/bin/spark-submit
@@ -17,9 +17,11 @@
 # limitations under the License.
 #
 
-SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 # disable randomized hash for string in Python 3.3+
 export PYTHONHASHSEED=0
 
-exec "$SPARK_HOME"/bin/spark-class org.apache.spark.deploy.SparkSubmit "$@"
+exec "${SPARK_HOME}"/bin/spark-class org.apache.spark.deploy.SparkSubmit "$@"
diff --git a/bin/sparkR b/bin/sparkR
index 464c29f369424b7c1842e46095436d8c3d574426..2c07a82e2173bbcdd987f768a6e76ee57aad35df 100755
--- a/bin/sparkR
+++ b/bin/sparkR
@@ -17,7 +17,10 @@
 # limitations under the License.
 #
 
-export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
-source "$SPARK_HOME"/bin/load-spark-env.sh
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
+
+source "${SPARK_HOME}"/bin/load-spark-env.sh
 export _SPARK_CMD_USAGE="Usage: ./bin/sparkR [options]"
-exec "$SPARK_HOME"/bin/spark-submit sparkr-shell-main "$@"
+exec "${SPARK_HOME}"/bin/spark-submit sparkr-shell-main "$@"
diff --git a/sbin/slaves.sh b/sbin/slaves.sh
index cdad47ee2e5948b24b6d3d487d99b7f4f5884382..c971aa3296b093050e8e49ccc9477b874cf56ba8 100755
--- a/sbin/slaves.sh
+++ b/sbin/slaves.sh
@@ -36,10 +36,11 @@ if [ $# -le 0 ]; then
   exit 1
 fi
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
 # If the slaves file is specified in the command line,
 # then it takes precedence over the definition in
@@ -65,7 +66,7 @@ then
   shift
 fi
 
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 if [ "$HOSTLIST" = "" ]; then
   if [ "$SPARK_SLAVES" = "" ]; then
diff --git a/sbin/spark-config.sh b/sbin/spark-config.sh
index e6bf544c14799615b3d9e1aa60ce3d56a5951eee..d8d9d00d64ebcf4ece96ecf7b0b93ad6c742a6eb 100755
--- a/sbin/spark-config.sh
+++ b/sbin/spark-config.sh
@@ -19,21 +19,12 @@
 # should not be executable directly
 # also should not be passed any arguments, since we need original $*
 
-# resolve links - $0 may be a softlink
-this="${BASH_SOURCE:-$0}"
-common_bin="$(cd -P -- "$(dirname -- "$this")" && pwd -P)"
-script="$(basename -- "$this")"
-this="$common_bin/$script"
+# symlink and absolute path should rely on SPARK_HOME to resolve
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-# convert relative path to absolute path
-config_bin="`dirname "$this"`"
-script="`basename "$this"`"
-config_bin="`cd "$config_bin"; pwd`"
-this="$config_bin/$script"
-
-export SPARK_PREFIX="`dirname "$this"`"/..
-export SPARK_HOME="${SPARK_PREFIX}"
-export SPARK_CONF_DIR="${SPARK_CONF_DIR:-"$SPARK_HOME/conf"}"
+export SPARK_CONF_DIR="${SPARK_CONF_DIR:-"${SPARK_HOME}/conf"}"
 # Add the PySpark classes to the PYTHONPATH:
-export PYTHONPATH="$SPARK_HOME/python:$PYTHONPATH"
-export PYTHONPATH="$SPARK_HOME/python/lib/py4j-0.9-src.zip:$PYTHONPATH"
+export PYTHONPATH="${SPARK_HOME}/python:${PYTHONPATH}"
+export PYTHONPATH="${SPARK_HOME}/python/lib/py4j-0.9-src.zip:${PYTHONPATH}"
diff --git a/sbin/spark-daemon.sh b/sbin/spark-daemon.sh
index 0fbe795822fbf8d111890362db5d790042a03557..6ab57df409529453d04401709847e59e3ec11d6f 100755
--- a/sbin/spark-daemon.sh
+++ b/sbin/spark-daemon.sh
@@ -37,10 +37,11 @@ if [ $# -le 1 ]; then
   exit 1
 fi
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
 # get arguments
 
@@ -86,7 +87,7 @@ spark_rotate_log ()
     fi
 }
 
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 if [ "$SPARK_IDENT_STRING" = "" ]; then
   export SPARK_IDENT_STRING="$USER"
@@ -97,7 +98,7 @@ export SPARK_PRINT_LAUNCH_COMMAND="1"
 
 # get log directory
 if [ "$SPARK_LOG_DIR" = "" ]; then
-  export SPARK_LOG_DIR="$SPARK_HOME/logs"
+  export SPARK_LOG_DIR="${SPARK_HOME}/logs"
 fi
 mkdir -p "$SPARK_LOG_DIR"
 touch "$SPARK_LOG_DIR"/.spark_test > /dev/null 2>&1
@@ -137,7 +138,7 @@ run_command() {
 
   if [ "$SPARK_MASTER" != "" ]; then
     echo rsync from "$SPARK_MASTER"
-    rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' "$SPARK_MASTER/" "$SPARK_HOME"
+    rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' "$SPARK_MASTER/" "${SPARK_HOME}"
   fi
 
   spark_rotate_log "$log"
@@ -145,12 +146,12 @@ run_command() {
 
   case "$mode" in
     (class)
-      nohup nice -n "$SPARK_NICENESS" "$SPARK_PREFIX"/bin/spark-class $command "$@" >> "$log" 2>&1 < /dev/null &
+      nohup nice -n "$SPARK_NICENESS" "${SPARK_HOME}"/bin/spark-class $command "$@" >> "$log" 2>&1 < /dev/null &
       newpid="$!"
       ;;
 
     (submit)
-      nohup nice -n "$SPARK_NICENESS" "$SPARK_PREFIX"/bin/spark-submit --class $command "$@" >> "$log" 2>&1 < /dev/null &
+      nohup nice -n "$SPARK_NICENESS" "${SPARK_HOME}"/bin/spark-submit --class $command "$@" >> "$log" 2>&1 < /dev/null &
       newpid="$!"
       ;;
 
@@ -205,13 +206,13 @@ case $option in
       else
         echo $pid file is present but $command not running
         exit 1
-      fi  
+      fi
     else
       echo $command not running.
       exit 2
-    fi  
+    fi
     ;;
-  
+
   (*)
     echo $usage
     exit 1
diff --git a/sbin/spark-daemons.sh b/sbin/spark-daemons.sh
index 5d9f2bb51cae0547b053967fd5521a6ecb53ca04..dec2f4432df39ca7da223bb7d42aceb785f4f928 100755
--- a/sbin/spark-daemons.sh
+++ b/sbin/spark-daemons.sh
@@ -27,9 +27,10 @@ if [ $# -le 1 ]; then
   exit 1
 fi
 
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/spark-daemon.sh" "$@"
+exec "${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin/spark-daemon.sh" "$@"
diff --git a/sbin/start-all.sh b/sbin/start-all.sh
index 1baf57cea09eedc94f0b3467611ceee1a9395324..6217f9bf28e3de2768001992cacf36f179ab97f9 100755
--- a/sbin/start-all.sh
+++ b/sbin/start-all.sh
@@ -21,8 +21,9 @@
 # Starts the master on this node.
 # Starts a worker on each node specified in conf/slaves
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 TACHYON_STR=""
 
@@ -36,10 +37,10 @@ shift
 done
 
 # Load the Spark configuration
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
 # Start Master
-"$sbin"/start-master.sh $TACHYON_STR
+"${SPARK_HOME}/sbin"/start-master.sh $TACHYON_STR
 
 # Start Workers
-"$sbin"/start-slaves.sh $TACHYON_STR
+"${SPARK_HOME}/sbin"/start-slaves.sh $TACHYON_STR
diff --git a/sbin/start-history-server.sh b/sbin/start-history-server.sh
index 9034e5715cc8534d392b10473bdc84fd3956663d..6851d99b7e8f44f36e22f50a4a0c953d2039db16 100755
--- a/sbin/start-history-server.sh
+++ b/sbin/start-history-server.sh
@@ -24,10 +24,11 @@
 # Use the SPARK_HISTORY_OPTS environment variable to set history server configuration.
 #
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
-exec "$sbin"/spark-daemon.sh start org.apache.spark.deploy.history.HistoryServer 1 $@
+exec "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.history.HistoryServer 1 $@
diff --git a/sbin/start-master.sh b/sbin/start-master.sh
index a7f5d5702fd804e3581edf0e7ff116d171185c28..c20e19a8412df8192a6446a0e220708f253db135 100755
--- a/sbin/start-master.sh
+++ b/sbin/start-master.sh
@@ -19,8 +19,9 @@
 
 # Starts the master on the machine this script is executed on.
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 ORIGINAL_ARGS="$@"
 
@@ -39,9 +40,9 @@ case $1 in
 shift
 done
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 if [ "$SPARK_MASTER_PORT" = "" ]; then
   SPARK_MASTER_PORT=7077
@@ -55,12 +56,12 @@ if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then
   SPARK_MASTER_WEBUI_PORT=8080
 fi
 
-"$sbin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 \
+"${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 \
   --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT \
   $ORIGINAL_ARGS
 
 if [ "$START_TACHYON" == "true" ]; then
-  "$sbin"/../tachyon/bin/tachyon bootstrap-conf $SPARK_MASTER_IP
-  "$sbin"/../tachyon/bin/tachyon format -s
-  "$sbin"/../tachyon/bin/tachyon-start.sh master
+  "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon bootstrap-conf $SPARK_MASTER_IP
+  "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon format -s
+  "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon-start.sh master
 fi
diff --git a/sbin/start-mesos-dispatcher.sh b/sbin/start-mesos-dispatcher.sh
index ef1fc573d5c65ebe4bd579c4c2c244e2bb376592..4777e1668c7031f46f90033ff87a1793451050ad 100755
--- a/sbin/start-mesos-dispatcher.sh
+++ b/sbin/start-mesos-dispatcher.sh
@@ -21,12 +21,13 @@
 # Rest server to handle driver requests for Mesos cluster mode.
 # Only one cluster dispatcher is needed per Mesos cluster.
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 if [ "$SPARK_MESOS_DISPATCHER_PORT" = "" ]; then
   SPARK_MESOS_DISPATCHER_PORT=7077
@@ -37,4 +38,4 @@ if [ "$SPARK_MESOS_DISPATCHER_HOST" = "" ]; then
 fi
 
 
-"$sbin"/spark-daemon.sh start org.apache.spark.deploy.mesos.MesosClusterDispatcher 1 --host $SPARK_MESOS_DISPATCHER_HOST --port $SPARK_MESOS_DISPATCHER_PORT "$@"
+"${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.mesos.MesosClusterDispatcher 1 --host $SPARK_MESOS_DISPATCHER_HOST --port $SPARK_MESOS_DISPATCHER_PORT "$@"
diff --git a/sbin/start-mesos-shuffle-service.sh b/sbin/start-mesos-shuffle-service.sh
index 64580762c5dc471dd13db63a4e661273c27190a9..1845845676029f0571602822d49d19e2b8b8f5cc 100755
--- a/sbin/start-mesos-shuffle-service.sh
+++ b/sbin/start-mesos-shuffle-service.sh
@@ -26,10 +26,11 @@
 # Use the SPARK_SHUFFLE_OPTS environment variable to set shuffle service configuration.
 #
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
-exec "$sbin"/spark-daemon.sh start org.apache.spark.deploy.mesos.MesosExternalShuffleService 1
+exec "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.mesos.MesosExternalShuffleService 1
diff --git a/sbin/start-shuffle-service.sh b/sbin/start-shuffle-service.sh
index 4fddcf7f95d40d5894e5114be630c94a699cc29f..793e165be6c78c15be19f1a48590c994ce186696 100755
--- a/sbin/start-shuffle-service.sh
+++ b/sbin/start-shuffle-service.sh
@@ -24,10 +24,11 @@
 # Use the SPARK_SHUFFLE_OPTS environment variable to set shuffle server configuration.
 #
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
-exec "$sbin"/spark-daemon.sh start org.apache.spark.deploy.ExternalShuffleService 1
+exec "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.ExternalShuffleService 1
diff --git a/sbin/start-slave.sh b/sbin/start-slave.sh
index 4c919ff76a8f54546195be14b24754b1b019bf02..21455648d1c6d0318e1c530b868e73f80db5d861 100755
--- a/sbin/start-slave.sh
+++ b/sbin/start-slave.sh
@@ -21,14 +21,14 @@
 #
 # Environment Variables
 #
-#   SPARK_WORKER_INSTANCES  The number of worker instances to run on this 
+#   SPARK_WORKER_INSTANCES  The number of worker instances to run on this
 #                           slave.  Default is 1.
-#   SPARK_WORKER_PORT       The base port number for the first worker. If set, 
+#   SPARK_WORKER_PORT       The base port number for the first worker. If set,
 #                           subsequent workers will increment this number.  If
 #                           unset, Spark will find a valid port number, but
 #                           with no guarantee of a predictable pattern.
 #   SPARK_WORKER_WEBUI_PORT The base port for the web interface of the first
-#                           worker.  Subsequent workers will increment this 
+#                           worker.  Subsequent workers will increment this
 #                           number.  Default is 8081.
 
 usage="Usage: start-slave.sh <spark-master-URL> where <spark-master-URL> is like spark://localhost:7077"
@@ -39,12 +39,13 @@ if [ $# -lt 1 ]; then
   exit 1
 fi
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 # First argument should be the master; we need to store it aside because we may
 # need to insert arguments between it and the other arguments
@@ -71,7 +72,7 @@ function start_instance {
   fi
   WEBUI_PORT=$(( $SPARK_WORKER_WEBUI_PORT + $WORKER_NUM - 1 ))
 
-  "$sbin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker $WORKER_NUM \
+  "${SPARK_HOME}/sbin"/spark-daemon.sh start org.apache.spark.deploy.worker.Worker $WORKER_NUM \
      --webui-port "$WEBUI_PORT" $PORT_FLAG $PORT_NUM $MASTER "$@"
 }
 
@@ -82,4 +83,3 @@ else
     start_instance $(( 1 + $i )) "$@"
   done
 fi
-
diff --git a/sbin/start-slaves.sh b/sbin/start-slaves.sh
index 24d6268815ed3edc469d0a797f7579c8c5b21508..51ca81e053b707258c63614767659b69e01b7d73 100755
--- a/sbin/start-slaves.sh
+++ b/sbin/start-slaves.sh
@@ -19,16 +19,16 @@
 
 # Starts a slave instance on each machine specified in the conf/slaves file.
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
-
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 START_TACHYON=false
 
 while (( "$#" )); do
 case $1 in
     --with-tachyon)
-      if [ ! -e "$sbin"/../tachyon/bin/tachyon ]; then
+      if [ ! -e "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon ]; then
         echo "Error: --with-tachyon specified, but tachyon not found."
         exit -1
       fi
@@ -38,9 +38,8 @@ case $1 in
 shift
 done
 
-. "$sbin/spark-config.sh"
-
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 # Find the port number for the master
 if [ "$SPARK_MASTER_PORT" = "" ]; then
@@ -52,11 +51,11 @@ if [ "$SPARK_MASTER_IP" = "" ]; then
 fi
 
 if [ "$START_TACHYON" == "true" ]; then
-  "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin"/../tachyon/bin/tachyon bootstrap-conf "$SPARK_MASTER_IP"
+  "${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon bootstrap-conf "$SPARK_MASTER_IP"
 
   # set -t so we can call sudo
-  SPARK_SSH_OPTS="-o StrictHostKeyChecking=no -t" "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/../tachyon/bin/tachyon-start.sh" worker SudoMount \; sleep 1
+  SPARK_SSH_OPTS="-o StrictHostKeyChecking=no -t" "${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/tachyon/bin/tachyon-start.sh" worker SudoMount \; sleep 1
 fi
 
 # Launch the slaves
-"$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" "spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT"
+"${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin/start-slave.sh" "spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT"
diff --git a/sbin/start-thriftserver.sh b/sbin/start-thriftserver.sh
index 5b0aeb177fff310aacd34b34d3f33a8712cdf63a..ad7e7c5277eb16046f5604656a98b45aacca1ce8 100755
--- a/sbin/start-thriftserver.sh
+++ b/sbin/start-thriftserver.sh
@@ -23,8 +23,9 @@
 # Enter posix mode for bash
 set -o posix
 
-# Figure out where Spark is installed
-FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 # NOTE: This exact class name is matched downstream by SparkSubmit.
 # Any changes need to be reflected there.
@@ -39,10 +40,10 @@ function usage {
   pattern+="\|======="
   pattern+="\|--help"
 
-  "$FWDIR"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
+  "${SPARK_HOME}"/bin/spark-submit --help 2>&1 | grep -v Usage 1>&2
   echo
   echo "Thrift server options:"
-  "$FWDIR"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
+  "${SPARK_HOME}"/bin/spark-class $CLASS --help 2>&1 | grep -v "$pattern" 1>&2
 }
 
 if [[ "$@" = *--help ]] || [[ "$@" = *-h ]]; then
@@ -52,4 +53,4 @@ fi
 
 export SUBMIT_USAGE_FUNCTION=usage
 
-exec "$FWDIR"/sbin/spark-daemon.sh submit $CLASS 1 "$@"
+exec "${SPARK_HOME}"/sbin/spark-daemon.sh submit $CLASS 1 "$@"
diff --git a/sbin/stop-all.sh b/sbin/stop-all.sh
index 1a9abe07db84446094fac68132c7585715c80fdc..4e476ca05cb0555aa267f01e53ce48d98afc2d16 100755
--- a/sbin/stop-all.sh
+++ b/sbin/stop-all.sh
@@ -20,23 +20,23 @@
 # Stop all spark daemons.
 # Run this on the master node.
 
-
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
 # Load the Spark configuration
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
 # Stop the slaves, then the master
-"$sbin"/stop-slaves.sh
-"$sbin"/stop-master.sh
+"${SPARK_HOME}/sbin"/stop-slaves.sh
+"${SPARK_HOME}/sbin"/stop-master.sh
 
 if [ "$1" == "--wait" ]
 then
   printf "Waiting for workers to shut down..."
   while true
   do
-    running=`$sbin/slaves.sh ps -ef | grep -v grep | grep deploy.worker.Worker`
+    running=`${SPARK_HOME}/sbin/slaves.sh ps -ef | grep -v grep | grep deploy.worker.Worker`
     if [ -z "$running" ]
     then
       printf "\nAll workers successfully shut down.\n"
diff --git a/sbin/stop-history-server.sh b/sbin/stop-history-server.sh
index 6e6056359510f695ca75b4c539b5465bc55c4fec..14e3af4be910acd621594823948a487bbcf73369 100755
--- a/sbin/stop-history-server.sh
+++ b/sbin/stop-history-server.sh
@@ -19,7 +19,8 @@
 
 # Stops the history server on the machine this script is executed on.
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.history.HistoryServer 1
+"${SPARK_HOME}/sbin/spark-daemon.sh" stop org.apache.spark.deploy.history.HistoryServer 1
diff --git a/sbin/stop-master.sh b/sbin/stop-master.sh
index 729702d92191e10571491cad83d73013b1e34568..e57962bb354d93321745fbf2b244c2e27e0c35c4 100755
--- a/sbin/stop-master.sh
+++ b/sbin/stop-master.sh
@@ -19,13 +19,14 @@
 
 # Stops the master on the machine this script is executed on.
 
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
 
-if [ -e "$sbin"/../tachyon/bin/tachyon ]; then
-  "$sbin"/../tachyon/bin/tachyon killAll tachyon.master.Master
+if [ -e "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon ]; then
+  "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon killAll tachyon.master.Master
 fi
diff --git a/sbin/stop-mesos-dispatcher.sh b/sbin/stop-mesos-dispatcher.sh
index cb65d95b5e5245258eadf84830a292fa2b759bbe..5c0b4e051db383605c185fd1327fab6fc3fc46fb 100755
--- a/sbin/stop-mesos-dispatcher.sh
+++ b/sbin/stop-mesos-dispatcher.sh
@@ -18,10 +18,11 @@
 #
 # Stop the Mesos Cluster dispatcher on the machine this script is executed on.
 
-sbin=`dirname "$0"`
-sbin=`cd "$sbin"; pwd`
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.mesos.MesosClusterDispatcher 1
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.mesos.MesosClusterDispatcher 1
 
diff --git a/sbin/stop-mesos-shuffle-service.sh b/sbin/stop-mesos-shuffle-service.sh
index 0e965d5ec5886b3ab10080144b78696572e657da..d23cad375e1bdefef9e37b5e9145052e5d7c821d 100755
--- a/sbin/stop-mesos-shuffle-service.sh
+++ b/sbin/stop-mesos-shuffle-service.sh
@@ -19,7 +19,8 @@
 
 # Stops the Mesos external shuffle service on the machine this script is executed on.
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.mesos.MesosExternalShuffleService 1
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.mesos.MesosExternalShuffleService 1
diff --git a/sbin/stop-shuffle-service.sh b/sbin/stop-shuffle-service.sh
index 4cb6891ae27fae87fe7d4c07a6a504088fdff1a6..50d69cf34e0a58e5854f2a614971ac5cdea6456f 100755
--- a/sbin/stop-shuffle-service.sh
+++ b/sbin/stop-shuffle-service.sh
@@ -19,7 +19,8 @@
 
 # Stops the external shuffle service on the machine this script is executed on.
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-"$sbin"/spark-daemon.sh stop org.apache.spark.deploy.ExternalShuffleService 1
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.ExternalShuffleService 1
diff --git a/sbin/stop-slave.sh b/sbin/stop-slave.sh
index 3d1da5b254f2ac208ab75eb9d201a69fba9248d0..685bcf59b33aac64a1324a22f454687ab909981e 100755
--- a/sbin/stop-slave.sh
+++ b/sbin/stop-slave.sh
@@ -21,23 +21,24 @@
 #
 # Environment variables
 #
-#   SPARK_WORKER_INSTANCES The number of worker instances that should be 
+#   SPARK_WORKER_INSTANCES The number of worker instances that should be
 #                          running on this slave.  Default is 1.
 
 # Usage: stop-slave.sh
 #   Stops all slaves on this worker machine
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
-  "$sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker 1
+  "${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker 1
 else
   for ((i=0; i<$SPARK_WORKER_INSTANCES; i++)); do
-    "$sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
+    "${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker $(( $i + 1 ))
   done
 fi
diff --git a/sbin/stop-slaves.sh b/sbin/stop-slaves.sh
index 54c9bd46803a90e74c6da2746c97d6dece3f14b6..63956377629d627e94fb8946ef45963b3fcee6ee 100755
--- a/sbin/stop-slaves.sh
+++ b/sbin/stop-slaves.sh
@@ -17,16 +17,17 @@
 # limitations under the License.
 #
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-. "$sbin/spark-config.sh"
+. "${SPARK_HOME}/sbin/spark-config.sh"
 
-. "$SPARK_PREFIX/bin/load-spark-env.sh"
+. "${SPARK_HOME}/bin/load-spark-env.sh"
 
 # do before the below calls as they exec
-if [ -e "$sbin"/../tachyon/bin/tachyon ]; then
-  "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin"/../tachyon/bin/tachyon killAll tachyon.worker.Worker
+if [ -e "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon ]; then
+  "${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin"/../tachyon/bin/tachyon killAll tachyon.worker.Worker
 fi
 
-"$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin"/stop-slave.sh
+"${SPARK_HOME}/sbin/slaves.sh" cd "${SPARK_HOME}" \; "${SPARK_HOME}/sbin"/stop-slave.sh
diff --git a/sbin/stop-thriftserver.sh b/sbin/stop-thriftserver.sh
index 4031a00d4a6891b86002321e1a66986ad8ea1ef6..cf45058f882a0c32ed5f854d01a210ae2daa4d00 100755
--- a/sbin/stop-thriftserver.sh
+++ b/sbin/stop-thriftserver.sh
@@ -19,7 +19,8 @@
 
 # Stops the thrift server on the machine this script is executed on.
 
-sbin="`dirname "$0"`"
-sbin="`cd "$sbin"; pwd`"
+if [ -z "${SPARK_HOME}" ]; then
+  export SPARK_HOME="$(cd "`dirname "$0"`"/..; pwd)"
+fi
 
-"$sbin"/spark-daemon.sh stop org.apache.spark.sql.hive.thriftserver.HiveThriftServer2 1
+"${SPARK_HOME}/sbin"/spark-daemon.sh stop org.apache.spark.sql.hive.thriftserver.HiveThriftServer2 1