From 531a7e55745ad76d17d41f80dbdea0072acce079 Mon Sep 17 00:00:00 2001
From: Charles Reiss <woggle@apache.org>
Date: Wed, 10 Jul 2013 17:59:43 -0700
Subject: [PATCH] Pass executor env vars (e.g. SPARK_CLASSPATH) to
 compute-classpath.

---
 core/src/main/scala/spark/Utils.scala                | 12 +++++++++---
 .../scala/spark/deploy/worker/ExecutorRunner.scala   |  4 +++-
 2 files changed, 12 insertions(+), 4 deletions(-)

diff --git a/core/src/main/scala/spark/Utils.scala b/core/src/main/scala/spark/Utils.scala
index 64547bbdcd..d2bf151cbf 100644
--- a/core/src/main/scala/spark/Utils.scala
+++ b/core/src/main/scala/spark/Utils.scala
@@ -6,6 +6,7 @@ import java.util.{Locale, Random, UUID}
 import java.util.concurrent.{ConcurrentHashMap, Executors, ThreadFactory, ThreadPoolExecutor}
 import java.util.regex.Pattern
 
+import scala.collection.Map
 import scala.collection.mutable.{ArrayBuffer, HashMap}
 import scala.collection.JavaConversions._
 import scala.io.Source
@@ -545,10 +546,15 @@ private object Utils extends Logging {
   /**
    * Execute a command and get its output, throwing an exception if it yields a code other than 0.
    */
-  def executeAndGetOutput(command: Seq[String], workingDir: File = new File(".")): String = {
-    val process = new ProcessBuilder(command: _*)
+  def executeAndGetOutput(command: Seq[String], workingDir: File = new File("."),
+                          extraEnvironment: Map[String, String] = Map.empty): String = {
+    val builder = new ProcessBuilder(command: _*)
         .directory(workingDir)
-        .start()
+    val environment = builder.environment()
+    for ((key, value) <- extraEnvironment) {
+      environment.put(key, value)
+    }
+    val process = builder.start()
     new Thread("read stderr for " + command(0)) {
       override def run() {
         for (line <- Source.fromInputStream(process.getErrorStream).getLines) {
diff --git a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
index d7f58b2cb1..5d3d54c65e 100644
--- a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
@@ -98,7 +98,9 @@ private[spark] class ExecutorRunner(
 
     // Figure out our classpath with the external compute-classpath script
     val ext = if (System.getProperty("os.name").startsWith("Windows")) ".cmd" else ".sh"
-    val classPath = Utils.executeAndGetOutput(Seq(sparkHome + "/bin/compute-classpath" + ext))
+    val classPath = Utils.executeAndGetOutput(
+        Seq(sparkHome + "/bin/compute-classpath" + ext),
+        extraEnvironment=appDesc.command.environment)
 
     Seq("-cp", classPath) ++ libraryOpts ++ userOpts ++ memoryOpts
   }
-- 
GitLab