diff --git a/repl/src/main/scala/spark/repl/SparkILoop.scala b/repl/src/main/scala/spark/repl/SparkILoop.scala
index 2f2b5b237262d9872a2085fa25fc80ad30d01eb5..1abcc8131c27e2e93eec011f81e70dd4f03db8b2 100644
--- a/repl/src/main/scala/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/spark/repl/SparkILoop.scala
@@ -151,9 +151,20 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
     finally in = saved
   }
 
+  /*PRASHANT:Detecting if a lazy val has been materialized or not is possible but not worth it
+   * as in most cases of spark shell usages they will be. Incase they are not user will find
+   * shutdown slower than the shell start up itself
+   * */
+  def sparkCleanUp(){
+    echo("Stopping spark context.")
+    intp.beQuietDuring {
+      command("sc.stop()")
+    }
+  }
   /** Close the interpreter and set the var to null. */
   def closeInterpreter() {
     if (intp ne null) {
+      sparkCleanUp()
       intp.close()
       intp = null
     }
@@ -873,6 +884,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
       val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
       if (autorun.isDefined) intp.quietRun(autorun.get)
     })
+    addThunk(initializeSpark())
 
     loadFiles(settings)
     // it is broken on startup; go ahead and exit
@@ -886,7 +898,6 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
     // message to an actor.
     if (isAsync) {
       intp initialize initializedCallback()
-      addThunk(initializeSpark())
       createAsyncListener() // listens for signal to run postInitialization
     }
     else {
diff --git a/repl/src/main/scala/spark/repl/SparkILoopInit.scala b/repl/src/main/scala/spark/repl/SparkILoopInit.scala
index b275faf981d4f5250d63aa3062ce3a00d9e8ed4b..c3d7f45dc9aa34e0b39b990e70b6242cb098fe0d 100644
--- a/repl/src/main/scala/spark/repl/SparkILoopInit.scala
+++ b/repl/src/main/scala/spark/repl/SparkILoopInit.scala
@@ -121,6 +121,7 @@ trait SparkILoopInit {
         """)
       command("import spark.SparkContext._");
     }
+   echo("... Spark context available as sc.")
   }
 
   // code to be executed only after the interpreter is initialized