Ctrl-D hang bug fixed!

This commit is contained in:
Prashant Sharma 2013-03-20 00:18:04 +05:30
parent 432a227320
commit d9f34e505d
2 changed files with 13 additions and 1 deletions

View file

@ -151,9 +151,20 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
finally in = saved
}
/*PRASHANT:Detecting if a lazy val has been materialized or not is possible but not worth it
* as in most cases of spark shell usages they will be. Incase they are not user will find
* shutdown slower than the shell start up itself
* */
def sparkCleanUp(){
echo("Stopping spark context.")
intp.beQuietDuring {
command("sc.stop()")
}
}
/** Close the interpreter and set the var to null. */
def closeInterpreter() {
if (intp ne null) {
sparkCleanUp()
intp.close()
intp = null
}
@ -873,6 +884,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
if (autorun.isDefined) intp.quietRun(autorun.get)
})
addThunk(initializeSpark())
loadFiles(settings)
// it is broken on startup; go ahead and exit
@ -886,7 +898,6 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
// message to an actor.
if (isAsync) {
intp initialize initializedCallback()
addThunk(initializeSpark())
createAsyncListener() // listens for signal to run postInitialization
}
else {

View file

@ -121,6 +121,7 @@ trait SparkILoopInit {
""")
command("import spark.SparkContext._");
}
echo("... Spark context available as sc.")
}
// code to be executed only after the interpreter is initialized