From 250bddc255b719dd9a39b6bb3f1e60574579f983 Mon Sep 17 00:00:00 2001
From: Ali Ghodsi <alig@cs.berkeley.edu>
Date: Sat, 31 Aug 2013 17:25:05 -0700
Subject: [PATCH] Don't require spark home to be set for standalone mode

---
 .../spark/scheduler/cluster/SparkDeploySchedulerBackend.scala  | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/core/src/main/scala/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala b/core/src/main/scala/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
index 42c3b4a6cf..7ac574bdc8 100644
--- a/core/src/main/scala/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
+++ b/core/src/main/scala/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
@@ -46,8 +46,7 @@ private[spark] class SparkDeploySchedulerBackend(
       StandaloneSchedulerBackend.ACTOR_NAME)
     val args = Seq(driverUrl, "{{EXECUTOR_ID}}", "{{HOSTNAME}}", "{{CORES}}")
     val command = Command("spark.executor.StandaloneExecutorBackend", args, sc.executorEnvs)
-    val sparkHome = sc.getSparkHome().getOrElse(
-      throw new IllegalArgumentException("must supply spark home for spark standalone"))
+    val sparkHome = sc.getSparkHome().getOrElse(null)
     val appDesc = new ApplicationDescription(appName, maxCores, executorMemory, command, sparkHome,
         sc.ui.appUIAddress)
 
-- 
GitLab