Skip to content
Snippets Groups Projects
Commit d9f34e50 authored by Prashant Sharma's avatar Prashant Sharma
Browse files

Ctrl-D hang bug fixed!

parent 432a2273
No related branches found
No related tags found
No related merge requests found
...@@ -151,9 +151,20 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter, ...@@ -151,9 +151,20 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
finally in = saved finally in = saved
} }
/*PRASHANT:Detecting if a lazy val has been materialized or not is possible but not worth it
* as in most cases of spark shell usages they will be. Incase they are not user will find
* shutdown slower than the shell start up itself
* */
def sparkCleanUp(){
echo("Stopping spark context.")
intp.beQuietDuring {
command("sc.stop()")
}
}
/** Close the interpreter and set the var to null. */ /** Close the interpreter and set the var to null. */
def closeInterpreter() { def closeInterpreter() {
if (intp ne null) { if (intp ne null) {
sparkCleanUp()
intp.close() intp.close()
intp = null intp = null
} }
...@@ -873,6 +884,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter, ...@@ -873,6 +884,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp()) val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
if (autorun.isDefined) intp.quietRun(autorun.get) if (autorun.isDefined) intp.quietRun(autorun.get)
}) })
addThunk(initializeSpark())
loadFiles(settings) loadFiles(settings)
// it is broken on startup; go ahead and exit // it is broken on startup; go ahead and exit
...@@ -886,7 +898,6 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter, ...@@ -886,7 +898,6 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
// message to an actor. // message to an actor.
if (isAsync) { if (isAsync) {
intp initialize initializedCallback() intp initialize initializedCallback()
addThunk(initializeSpark())
createAsyncListener() // listens for signal to run postInitialization createAsyncListener() // listens for signal to run postInitialization
} }
else { else {
......
...@@ -121,6 +121,7 @@ trait SparkILoopInit { ...@@ -121,6 +121,7 @@ trait SparkILoopInit {
""") """)
command("import spark.SparkContext._"); command("import spark.SparkContext._");
} }
echo("... Spark context available as sc.")
} }
// code to be executed only after the interpreter is initialized // code to be executed only after the interpreter is initialized
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment