From d9f34e505d88daa6e3665b40ab70dab41e277c9d Mon Sep 17 00:00:00 2001
From: Prashant Sharma <prashant.iiith@gmail.com>
Date: Wed, 20 Mar 2013 00:18:04 +0530
Subject: [PATCH] Ctrl-D hang bug fixed!

---
 repl/src/main/scala/spark/repl/SparkILoop.scala     | 13 ++++++++++++-
 repl/src/main/scala/spark/repl/SparkILoopInit.scala |  1 +
 2 files changed, 13 insertions(+), 1 deletion(-)

diff --git a/repl/src/main/scala/spark/repl/SparkILoop.scala b/repl/src/main/scala/spark/repl/SparkILoop.scala
index 2f2b5b2372..1abcc8131c 100644
--- a/repl/src/main/scala/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/spark/repl/SparkILoop.scala
@@ -151,9 +151,20 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
     finally in = saved
   }
 
+  /*PRASHANT:Detecting if a lazy val has been materialized or not is possible but not worth it
+   * as in most cases of spark shell usages they will be. Incase they are not user will find
+   * shutdown slower than the shell start up itself
+   * */
+  def sparkCleanUp(){
+    echo("Stopping spark context.")
+    intp.beQuietDuring {
+      command("sc.stop()")
+    }
+  }
   /** Close the interpreter and set the var to null. */
   def closeInterpreter() {
     if (intp ne null) {
+      sparkCleanUp()
       intp.close()
       intp = null
     }
@@ -873,6 +884,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
       val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
       if (autorun.isDefined) intp.quietRun(autorun.get)
     })
+    addThunk(initializeSpark())
 
     loadFiles(settings)
     // it is broken on startup; go ahead and exit
@@ -886,7 +898,6 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
     // message to an actor.
     if (isAsync) {
       intp initialize initializedCallback()
-      addThunk(initializeSpark())
       createAsyncListener() // listens for signal to run postInitialization
     }
     else {
diff --git a/repl/src/main/scala/spark/repl/SparkILoopInit.scala b/repl/src/main/scala/spark/repl/SparkILoopInit.scala
index b275faf981..c3d7f45dc9 100644
--- a/repl/src/main/scala/spark/repl/SparkILoopInit.scala
+++ b/repl/src/main/scala/spark/repl/SparkILoopInit.scala
@@ -121,6 +121,7 @@ trait SparkILoopInit {
         """)
       command("import spark.SparkContext._");
     }
+   echo("... Spark context available as sc.")
   }
 
   // code to be executed only after the interpreter is initialized
-- 
GitLab