diff --git a/make-distribution.sh b/make-distribution.sh
index e6b5956d1e7e25efd0c5bb49cb6e549e0dc01f63..6bc6819d8da926ee56fa9086abb92038775e17e7 100755
--- a/make-distribution.sh
+++ b/make-distribution.sh
@@ -58,6 +58,7 @@ echo "Version is ${VERSION}"
 # Initialize defaults
 SPARK_HADOOP_VERSION=1.0.4
 SPARK_YARN=false
+SPARK_TACHYON=false
 MAKE_TGZ=false
 
 # Parse arguments
@@ -70,6 +71,9 @@ while (( "$#" )); do
     --with-yarn)
       SPARK_YARN=true
       ;;
+    --with-tachyon)
+      SPARK_TACHYON=true
+      ;;
     --tgz)
       MAKE_TGZ=true
       ;;
@@ -90,6 +94,12 @@ else
   echo "YARN disabled"
 fi
 
+if [ "$SPARK_TACHYON" == "true" ]; then
+  echo "Tachyon Enabled"
+else
+  echo "Tachyon Disabled"
+fi
+
 # Build fat JAR
 export SPARK_HADOOP_VERSION
 export SPARK_YARN
@@ -113,6 +123,28 @@ cp -r "$FWDIR/python" "$DISTDIR"
 cp -r "$FWDIR/sbin" "$DISTDIR"
 
 
+# Download and copy in tachyon, if requested
+if [ "$SPARK_TACHYON" == "true" ]; then
+  TACHYON_VERSION="0.4.1"
+  TACHYON_URL="https://github.com/amplab/tachyon/releases/download/v${TACHYON_VERSION}/tachyon-${TACHYON_VERSION}-bin.tar.gz"
+
+  TMPD=`mktemp -d`
+
+  pushd $TMPD > /dev/null
+  echo "Fetchting tachyon tgz"
+  wget "$TACHYON_URL"
+
+  tar xf "tachyon-${TACHYON_VERSION}-bin.tar.gz"
+  cp "tachyon-${TACHYON_VERSION}/target/tachyon-${TACHYON_VERSION}-jar-with-dependencies.jar" "$DISTDIR/jars"
+  mkdir -p "$DISTDIR/tachyon/src/main/java/tachyon/web"
+  cp -r "tachyon-${TACHYON_VERSION}"/{bin,conf,libexec} "$DISTDIR/tachyon"
+  cp -r "tachyon-${TACHYON_VERSION}"/src/main/java/tachyon/web/resources "$DISTDIR/tachyon/src/main/java/tachyon/web"
+  sed -i "s|export TACHYON_JAR=\$TACHYON_HOME/target/\(.*\)|# This is set for spark's make-distribution\n  export TACHYON_JAR=\$TACHYON_HOME/../../jars/\1|" "$DISTDIR/tachyon/libexec/tachyon-config.sh"
+
+  popd > /dev/null
+  rm -rf $TMPD
+fi
+
 if [ "$MAKE_TGZ" == "true" ]; then
   TARDIR="$FWDIR/spark-$VERSION"
   cp -r "$DISTDIR" "$TARDIR"
diff --git a/sbin/start-all.sh b/sbin/start-all.sh
index 2daf49db359df4d62c9b3ddee2d9787523801345..5c89ab4d86b3a0c3c9e03cf2c9bbdf779a40c349 100755
--- a/sbin/start-all.sh
+++ b/sbin/start-all.sh
@@ -24,11 +24,22 @@
 sbin=`dirname "$0"`
 sbin=`cd "$sbin"; pwd`
 
+TACHYON_STR=""
+
+while (( "$#" )); do
+case $1 in
+    --with-tachyon)
+      TACHYON_STR="--with-tachyon"
+      ;;
+  esac
+shift
+done
+
 # Load the Spark configuration
 . "$sbin/spark-config.sh"
 
 # Start Master
-"$sbin"/start-master.sh
+"$sbin"/start-master.sh $TACHYON_STR
 
 # Start Workers
-"$sbin"/start-slaves.sh
+"$sbin"/start-slaves.sh $TACHYON_STR
diff --git a/sbin/start-master.sh b/sbin/start-master.sh
index ec3dfdb4197ec206ab734ad13bf91b2e0811ddf3..03a3428aea9f1a6a96deb8fed9f9b57bc0ef327d 100755
--- a/sbin/start-master.sh
+++ b/sbin/start-master.sh
@@ -22,6 +22,21 @@
 sbin=`dirname "$0"`
 sbin=`cd "$sbin"; pwd`
 
+START_TACHYON=false
+
+while (( "$#" )); do
+case $1 in
+    --with-tachyon)
+      if [ ! -e "$sbin"/../tachyon/bin/tachyon ]; then
+        echo "Error: --with-tachyon specified, but tachyon not found."
+        exit -1
+      fi
+      START_TACHYON=true
+      ;;
+  esac
+shift
+done
+
 . "$sbin/spark-config.sh"
 
 if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
@@ -41,3 +56,9 @@ if [ "$SPARK_MASTER_WEBUI_PORT" = "" ]; then
 fi
 
 "$sbin"/spark-daemon.sh start org.apache.spark.deploy.master.Master 1 --ip $SPARK_MASTER_IP --port $SPARK_MASTER_PORT --webui-port $SPARK_MASTER_WEBUI_PORT
+
+if [ "$START_TACHYON" == "true" ]; then
+  "$sbin"/../tachyon/bin/tachyon bootstrap-conf $SPARK_MASTER_IP
+  "$sbin"/../tachyon/bin/tachyon format -s
+  "$sbin"/../tachyon/bin/tachyon-start.sh master
+fi
diff --git a/sbin/start-slaves.sh b/sbin/start-slaves.sh
index fd5cdeb1e6788ab0e7550621781883b31dc8f3cd..da641cfe3c6fa2312d7ea3e5cbb91d30daee3fe7 100755
--- a/sbin/start-slaves.sh
+++ b/sbin/start-slaves.sh
@@ -20,6 +20,22 @@
 sbin=`dirname "$0"`
 sbin=`cd "$sbin"; pwd`
 
+
+START_TACHYON=false
+
+while (( "$#" )); do
+case $1 in
+    --with-tachyon)
+      if [ ! -e "$sbin"/../tachyon/bin/tachyon ]; then
+        echo "Error: --with-tachyon specified, but tachyon not found."
+        exit -1
+      fi
+      START_TACHYON=true
+      ;;
+  esac
+shift
+done
+
 . "$sbin/spark-config.sh"
 
 if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
@@ -35,6 +51,13 @@ if [ "$SPARK_MASTER_IP" = "" ]; then
   SPARK_MASTER_IP=`hostname`
 fi
 
+if [ "$START_TACHYON" == "true" ]; then
+  "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin"/../tachyon/bin/tachyon bootstrap-conf $SPARK_MASTER_IP
+
+  # set -t so we can call sudo
+  SPARK_SSH_OPTS="-o StrictHostKeyChecking=no -t" "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/../tachyon/bin/tachyon-start.sh" worker SudoMount \; sleep 1
+fi
+
 # Launch the slaves
 if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
   exec "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin/start-slave.sh" 1 spark://$SPARK_MASTER_IP:$SPARK_MASTER_PORT
diff --git a/sbin/stop-master.sh b/sbin/stop-master.sh
index 2adabd426563c7f5bf4f67f73da10cc60e753fc2..b6bdaa4db373c96223206d07571fc96d6e76be08 100755
--- a/sbin/stop-master.sh
+++ b/sbin/stop-master.sh
@@ -25,3 +25,7 @@ sbin=`cd "$sbin"; pwd`
 . "$sbin/spark-config.sh"
 
 "$sbin"/spark-daemon.sh stop org.apache.spark.deploy.master.Master 1
+
+if [ -e "$sbin"/../tachyon/bin/tachyon ]; then
+  "$sbin"/../tachyon/bin/tachyon killAll tachyon.master.Master
+fi
diff --git a/sbin/stop-slaves.sh b/sbin/stop-slaves.sh
index eb803b4900347e833ea0be5d2b4ae11845988de9..6bf393ccd4b09b95592c7599b9325b3da6358b9c 100755
--- a/sbin/stop-slaves.sh
+++ b/sbin/stop-slaves.sh
@@ -26,6 +26,11 @@ if [ -f "${SPARK_CONF_DIR}/spark-env.sh" ]; then
   . "${SPARK_CONF_DIR}/spark-env.sh"
 fi
 
+# do before the below calls as they exec
+if [ -e "$sbin"/../tachyon/bin/tachyon ]; then
+  "$sbin/slaves.sh" cd "$SPARK_HOME" \; "$sbin"/../tachyon/bin/tachyon killAll tachyon.worker.Worker
+fi
+
 if [ "$SPARK_WORKER_INSTANCES" = "" ]; then
   "$sbin"/spark-daemons.sh stop org.apache.spark.deploy.worker.Worker 1
 else