diff --git a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
index 4f8e1dcb260a14d2114cc641ec4967d974cb1bcb..d7f58b2cb1bc110a94477a04d75eb23c66d27742 100644
--- a/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
+++ b/core/src/main/scala/spark/deploy/worker/ExecutorRunner.scala
@@ -1,6 +1,7 @@
 package spark.deploy.worker
 
 import java.io._
+import java.lang.System.getenv
 import spark.deploy.{ExecutorState, ExecutorStateChanged, ApplicationDescription}
 import akka.actor.ActorRef
 import spark.{Utils, Logging}
@@ -77,11 +78,7 @@ private[spark] class ExecutorRunner(
 
   def buildCommandSeq(): Seq[String] = {
     val command = appDesc.command
-    val runner = if (System.getenv("JAVA_HOME") == null) {
-      "java"
-    } else {
-      System.getenv("JAVA_HOME") + "/bin/java"
-    }
+    val runner = Option(getenv("JAVA_HOME")).map(_ + "/bin/java").getOrElse("java")
     // SPARK-698: do not call the run.cmd script, as process.destroy()
     // fails to kill a process tree on Windows
     Seq(runner) ++ buildJavaOpts() ++ Seq(command.mainClass) ++
@@ -93,18 +90,10 @@ private[spark] class ExecutorRunner(
    * the way the JAVA_OPTS are assembled there.
    */
   def buildJavaOpts(): Seq[String] = {
-    val libraryOpts = if (System.getenv("SPARK_LIBRARY_PATH") == null) {
-      Nil
-    } else {
-      List("-Djava.library.path=" + System.getenv("SPARK_LIBRARY_PATH"))
-    }
-
-    val userOpts = if (System.getenv("SPARK_JAVA_OPTS") == null) {
-      Nil
-    } else {
-      Utils.splitCommandString(System.getenv("SPARK_JAVA_OPTS"))
-    }
-
+    val libraryOpts = Option(getenv("SPARK_LIBRARY_PATH"))
+      .map(p => List("-Djava.library.path=" + p))
+      .getOrElse(Nil)
+    val userOpts = Option(getenv("SPARK_JAVA_OPTS")).map(Utils.splitCommandString).getOrElse(Nil)
     val memoryOpts = Seq("-Xms" + memory + "M", "-Xmx" + memory + "M")
 
     // Figure out our classpath with the external compute-classpath script